Searched refs:zdev (Results 1 - 17 of 17) sorted by relevance

/linux-4.4.14/drivers/s390/crypto/
H A Dzcrypt_cex4.c73 struct zcrypt_device *zdev = NULL; zcrypt_cex4_probe() local
80 zdev = zcrypt_device_alloc(CEX4A_MAX_MESSAGE_SIZE); zcrypt_cex4_probe()
81 if (!zdev) zcrypt_cex4_probe()
84 zdev->type_string = "CEX4A"; zcrypt_cex4_probe()
85 zdev->speed_rating = CEX4A_SPEED_RATING; zcrypt_cex4_probe()
87 zdev->type_string = "CEX5A"; zcrypt_cex4_probe()
88 zdev->speed_rating = CEX5A_SPEED_RATING; zcrypt_cex4_probe()
90 zdev->user_space_type = ZCRYPT_CEX3A; zcrypt_cex4_probe()
91 zdev->min_mod_size = CEX4A_MIN_MOD_SIZE; zcrypt_cex4_probe()
94 zdev->max_mod_size = zcrypt_cex4_probe()
96 zdev->max_exp_bit_length = zcrypt_cex4_probe()
99 zdev->max_mod_size = zcrypt_cex4_probe()
101 zdev->max_exp_bit_length = zcrypt_cex4_probe()
104 zdev->short_crt = 1; zcrypt_cex4_probe()
105 zdev->ops = zcrypt_msgtype_request(MSGTYPE50_NAME, zcrypt_cex4_probe()
108 zdev = zcrypt_device_alloc(CEX4C_MAX_MESSAGE_SIZE); zcrypt_cex4_probe()
109 if (!zdev) zcrypt_cex4_probe()
112 zdev->type_string = "CEX4C"; zcrypt_cex4_probe()
113 zdev->speed_rating = CEX4C_SPEED_RATING; zcrypt_cex4_probe()
115 zdev->type_string = "CEX5C"; zcrypt_cex4_probe()
116 zdev->speed_rating = CEX5C_SPEED_RATING; zcrypt_cex4_probe()
118 zdev->user_space_type = ZCRYPT_CEX3C; zcrypt_cex4_probe()
119 zdev->min_mod_size = CEX4C_MIN_MOD_SIZE; zcrypt_cex4_probe()
120 zdev->max_mod_size = CEX4C_MAX_MOD_SIZE; zcrypt_cex4_probe()
121 zdev->max_exp_bit_length = CEX4C_MAX_MOD_SIZE; zcrypt_cex4_probe()
122 zdev->short_crt = 0; zcrypt_cex4_probe()
123 zdev->ops = zcrypt_msgtype_request(MSGTYPE06_NAME, zcrypt_cex4_probe()
126 zdev = zcrypt_device_alloc(CEX4C_MAX_MESSAGE_SIZE); zcrypt_cex4_probe()
127 if (!zdev) zcrypt_cex4_probe()
130 zdev->type_string = "CEX4P"; zcrypt_cex4_probe()
131 zdev->speed_rating = CEX4P_SPEED_RATING; zcrypt_cex4_probe()
133 zdev->type_string = "CEX5P"; zcrypt_cex4_probe()
134 zdev->speed_rating = CEX5P_SPEED_RATING; zcrypt_cex4_probe()
136 zdev->user_space_type = ZCRYPT_CEX4; zcrypt_cex4_probe()
137 zdev->min_mod_size = CEX4C_MIN_MOD_SIZE; zcrypt_cex4_probe()
138 zdev->max_mod_size = CEX4C_MAX_MOD_SIZE; zcrypt_cex4_probe()
139 zdev->max_exp_bit_length = CEX4C_MAX_MOD_SIZE; zcrypt_cex4_probe()
140 zdev->short_crt = 0; zcrypt_cex4_probe()
141 zdev->ops = zcrypt_msgtype_request(MSGTYPE06_NAME, zcrypt_cex4_probe()
146 if (!zdev) zcrypt_cex4_probe()
148 zdev->ap_dev = ap_dev; zcrypt_cex4_probe()
149 zdev->online = 1; zcrypt_cex4_probe()
150 ap_dev->reply = &zdev->reply; zcrypt_cex4_probe()
151 ap_dev->private = zdev; zcrypt_cex4_probe()
152 rc = zcrypt_device_register(zdev); zcrypt_cex4_probe()
154 zcrypt_msgtype_release(zdev->ops); zcrypt_cex4_probe()
156 zcrypt_device_free(zdev); zcrypt_cex4_probe()
167 struct zcrypt_device *zdev = ap_dev->private; zcrypt_cex4_remove() local
170 if (zdev) { zcrypt_cex4_remove()
171 zops = zdev->ops; zcrypt_cex4_remove()
172 zcrypt_device_unregister(zdev); zcrypt_cex4_remove()
H A Dzcrypt_cex2a.c89 struct zcrypt_device *zdev = NULL; zcrypt_cex2a_probe() local
94 zdev = zcrypt_device_alloc(CEX2A_MAX_RESPONSE_SIZE); zcrypt_cex2a_probe()
95 if (!zdev) zcrypt_cex2a_probe()
97 zdev->user_space_type = ZCRYPT_CEX2A; zcrypt_cex2a_probe()
98 zdev->type_string = "CEX2A"; zcrypt_cex2a_probe()
99 zdev->min_mod_size = CEX2A_MIN_MOD_SIZE; zcrypt_cex2a_probe()
100 zdev->max_mod_size = CEX2A_MAX_MOD_SIZE; zcrypt_cex2a_probe()
101 zdev->short_crt = 1; zcrypt_cex2a_probe()
102 zdev->speed_rating = CEX2A_SPEED_RATING; zcrypt_cex2a_probe()
103 zdev->max_exp_bit_length = CEX2A_MAX_MOD_SIZE; zcrypt_cex2a_probe()
106 zdev = zcrypt_device_alloc(CEX3A_MAX_RESPONSE_SIZE); zcrypt_cex2a_probe()
107 if (!zdev) zcrypt_cex2a_probe()
109 zdev->user_space_type = ZCRYPT_CEX3A; zcrypt_cex2a_probe()
110 zdev->type_string = "CEX3A"; zcrypt_cex2a_probe()
111 zdev->min_mod_size = CEX2A_MIN_MOD_SIZE; zcrypt_cex2a_probe()
112 zdev->max_mod_size = CEX2A_MAX_MOD_SIZE; zcrypt_cex2a_probe()
113 zdev->max_exp_bit_length = CEX2A_MAX_MOD_SIZE; zcrypt_cex2a_probe()
116 zdev->max_mod_size = CEX3A_MAX_MOD_SIZE; zcrypt_cex2a_probe()
117 zdev->max_exp_bit_length = CEX3A_MAX_MOD_SIZE; zcrypt_cex2a_probe()
119 zdev->short_crt = 1; zcrypt_cex2a_probe()
120 zdev->speed_rating = CEX3A_SPEED_RATING; zcrypt_cex2a_probe()
123 if (!zdev) zcrypt_cex2a_probe()
125 zdev->ops = zcrypt_msgtype_request(MSGTYPE50_NAME, zcrypt_cex2a_probe()
127 zdev->ap_dev = ap_dev; zcrypt_cex2a_probe()
128 zdev->online = 1; zcrypt_cex2a_probe()
129 ap_dev->reply = &zdev->reply; zcrypt_cex2a_probe()
130 ap_dev->private = zdev; zcrypt_cex2a_probe()
131 rc = zcrypt_device_register(zdev); zcrypt_cex2a_probe()
134 zcrypt_msgtype_release(zdev->ops); zcrypt_cex2a_probe()
135 zcrypt_device_free(zdev); zcrypt_cex2a_probe()
146 struct zcrypt_device *zdev = ap_dev->private; zcrypt_cex2a_remove() local
147 struct zcrypt_ops *zops = zdev->ops; zcrypt_cex2a_remove()
149 zcrypt_device_unregister(zdev); zcrypt_cex2a_remove()
H A Dzcrypt_api.c86 struct zcrypt_device *zdev = to_ap_dev(dev)->private; zcrypt_type_show() local
87 return snprintf(buf, PAGE_SIZE, "%s\n", zdev->type_string); zcrypt_type_show()
95 struct zcrypt_device *zdev = to_ap_dev(dev)->private; zcrypt_online_show() local
96 return snprintf(buf, PAGE_SIZE, "%d\n", zdev->online); zcrypt_online_show()
103 struct zcrypt_device *zdev = to_ap_dev(dev)->private; zcrypt_online_store() local
108 zdev->online = online; zcrypt_online_store()
109 ZCRYPT_DBF_DEV(DBF_INFO, zdev, "dev%04xo%dman", zdev->ap_dev->qid, zcrypt_online_store()
110 zdev->online); zcrypt_online_store()
112 ap_flush_queue(zdev->ap_dev); zcrypt_online_store()
148 * @zdev: Pointer the crypto device
154 static void __zcrypt_increase_preference(struct zcrypt_device *zdev) __zcrypt_increase_preference() argument
159 if (zdev->speed_rating == 0) __zcrypt_increase_preference()
161 for (l = zdev->list.prev; l != &zcrypt_device_list; l = l->prev) { __zcrypt_increase_preference()
164 (zdev->request_count + 1) * zdev->speed_rating && __zcrypt_increase_preference()
168 if (l == zdev->list.prev) __zcrypt_increase_preference()
170 /* Move zdev behind l */ __zcrypt_increase_preference()
171 list_move(&zdev->list, l); __zcrypt_increase_preference()
176 * @zdev: Pointer to a crypto device.
182 static void __zcrypt_decrease_preference(struct zcrypt_device *zdev) __zcrypt_decrease_preference() argument
187 if (zdev->speed_rating == 0) __zcrypt_decrease_preference()
189 for (l = zdev->list.next; l != &zcrypt_device_list; l = l->next) { __zcrypt_decrease_preference()
192 (zdev->request_count + 1) * zdev->speed_rating || __zcrypt_decrease_preference()
196 if (l == zdev->list.next) __zcrypt_decrease_preference()
198 /* Move zdev before l */ __zcrypt_decrease_preference()
199 list_move_tail(&zdev->list, l); __zcrypt_decrease_preference()
204 struct zcrypt_device *zdev = zcrypt_device_release() local
206 zcrypt_device_free(zdev); zcrypt_device_release()
209 void zcrypt_device_get(struct zcrypt_device *zdev) zcrypt_device_get() argument
211 kref_get(&zdev->refcount); zcrypt_device_get()
215 int zcrypt_device_put(struct zcrypt_device *zdev) zcrypt_device_put() argument
217 return kref_put(&zdev->refcount, zcrypt_device_release); zcrypt_device_put()
223 struct zcrypt_device *zdev; zcrypt_device_alloc() local
225 zdev = kzalloc(sizeof(struct zcrypt_device), GFP_KERNEL); zcrypt_device_alloc()
226 if (!zdev) zcrypt_device_alloc()
228 zdev->reply.message = kmalloc(max_response_size, GFP_KERNEL); zcrypt_device_alloc()
229 if (!zdev->reply.message) zcrypt_device_alloc()
231 zdev->reply.length = max_response_size; zcrypt_device_alloc()
232 spin_lock_init(&zdev->lock); zcrypt_device_alloc()
233 INIT_LIST_HEAD(&zdev->list); zcrypt_device_alloc()
234 zdev->dbf_area = zcrypt_dbf_devices; zcrypt_device_alloc()
235 return zdev; zcrypt_device_alloc()
238 kfree(zdev); zcrypt_device_alloc()
243 void zcrypt_device_free(struct zcrypt_device *zdev) zcrypt_device_free() argument
245 kfree(zdev->reply.message); zcrypt_device_free()
246 kfree(zdev); zcrypt_device_free()
252 * @zdev: Pointer to a crypto device
256 int zcrypt_device_register(struct zcrypt_device *zdev) zcrypt_device_register() argument
260 if (!zdev->ops) zcrypt_device_register()
262 rc = sysfs_create_group(&zdev->ap_dev->device.kobj, zcrypt_device_register()
266 get_device(&zdev->ap_dev->device); zcrypt_device_register()
267 kref_init(&zdev->refcount); zcrypt_device_register()
269 zdev->online = 1; /* New devices are online by default. */ zcrypt_device_register()
270 ZCRYPT_DBF_DEV(DBF_INFO, zdev, "dev%04xo%dreg", zdev->ap_dev->qid, zcrypt_device_register()
271 zdev->online); zcrypt_device_register()
272 list_add_tail(&zdev->list, &zcrypt_device_list); zcrypt_device_register()
273 __zcrypt_increase_preference(zdev); zcrypt_device_register()
276 if (zdev->ops->rng) { zcrypt_device_register()
286 list_del_init(&zdev->list); zcrypt_device_register()
288 sysfs_remove_group(&zdev->ap_dev->device.kobj, zcrypt_device_register()
290 put_device(&zdev->ap_dev->device); zcrypt_device_register()
291 zcrypt_device_put(zdev); zcrypt_device_register()
299 * @zdev: Pointer to crypto device
303 void zcrypt_device_unregister(struct zcrypt_device *zdev) zcrypt_device_unregister() argument
305 if (zdev->ops->rng) zcrypt_device_unregister()
309 list_del_init(&zdev->list); zcrypt_device_unregister()
311 sysfs_remove_group(&zdev->ap_dev->device.kobj, zcrypt_device_unregister()
313 put_device(&zdev->ap_dev->device); zcrypt_device_unregister()
314 zcrypt_device_put(zdev); zcrypt_device_unregister()
425 struct zcrypt_device *zdev; zcrypt_rsa_modexpo() local
438 list_for_each_entry(zdev, &zcrypt_device_list, list) { zcrypt_rsa_modexpo()
439 if (!zdev->online || zcrypt_rsa_modexpo()
440 !zdev->ops->rsa_modexpo || zcrypt_rsa_modexpo()
441 zdev->min_mod_size > mex->inputdatalength || zcrypt_rsa_modexpo()
442 zdev->max_mod_size < mex->inputdatalength) zcrypt_rsa_modexpo()
444 zcrypt_device_get(zdev); zcrypt_rsa_modexpo()
445 get_device(&zdev->ap_dev->device); zcrypt_rsa_modexpo()
446 zdev->request_count++; zcrypt_rsa_modexpo()
447 __zcrypt_decrease_preference(zdev); zcrypt_rsa_modexpo()
448 if (try_module_get(zdev->ap_dev->drv->driver.owner)) { zcrypt_rsa_modexpo()
450 rc = zdev->ops->rsa_modexpo(zdev, mex); zcrypt_rsa_modexpo()
452 module_put(zdev->ap_dev->drv->driver.owner); zcrypt_rsa_modexpo()
456 zdev->request_count--; zcrypt_rsa_modexpo()
457 __zcrypt_increase_preference(zdev); zcrypt_rsa_modexpo()
458 put_device(&zdev->ap_dev->device); zcrypt_rsa_modexpo()
459 zcrypt_device_put(zdev); zcrypt_rsa_modexpo()
469 struct zcrypt_device *zdev; zcrypt_rsa_crt() local
485 list_for_each_entry(zdev, &zcrypt_device_list, list) { zcrypt_rsa_crt()
486 if (!zdev->online || zcrypt_rsa_crt()
487 !zdev->ops->rsa_modexpo_crt || zcrypt_rsa_crt()
488 zdev->min_mod_size > crt->inputdatalength || zcrypt_rsa_crt()
489 zdev->max_mod_size < crt->inputdatalength) zcrypt_rsa_crt()
491 if (zdev->short_crt && crt->inputdatalength > 240) { zcrypt_rsa_crt()
528 zcrypt_device_get(zdev); zcrypt_rsa_crt()
529 get_device(&zdev->ap_dev->device); zcrypt_rsa_crt()
530 zdev->request_count++; zcrypt_rsa_crt()
531 __zcrypt_decrease_preference(zdev); zcrypt_rsa_crt()
532 if (try_module_get(zdev->ap_dev->drv->driver.owner)) { zcrypt_rsa_crt()
534 rc = zdev->ops->rsa_modexpo_crt(zdev, crt); zcrypt_rsa_crt()
536 module_put(zdev->ap_dev->drv->driver.owner); zcrypt_rsa_crt()
540 zdev->request_count--; zcrypt_rsa_crt()
541 __zcrypt_increase_preference(zdev); zcrypt_rsa_crt()
542 put_device(&zdev->ap_dev->device); zcrypt_rsa_crt()
543 zcrypt_device_put(zdev); zcrypt_rsa_crt()
553 struct zcrypt_device *zdev; zcrypt_send_cprb() local
557 list_for_each_entry(zdev, &zcrypt_device_list, list) { zcrypt_send_cprb()
558 if (!zdev->online || !zdev->ops->send_cprb || zcrypt_send_cprb()
559 (zdev->ops->variant == MSGTYPE06_VARIANT_EP11) || zcrypt_send_cprb()
561 AP_QID_DEVICE(zdev->ap_dev->qid) != xcRB->user_defined)) zcrypt_send_cprb()
563 zcrypt_device_get(zdev); zcrypt_send_cprb()
564 get_device(&zdev->ap_dev->device); zcrypt_send_cprb()
565 zdev->request_count++; zcrypt_send_cprb()
566 __zcrypt_decrease_preference(zdev); zcrypt_send_cprb()
567 if (try_module_get(zdev->ap_dev->drv->driver.owner)) { zcrypt_send_cprb()
569 rc = zdev->ops->send_cprb(zdev, xcRB); zcrypt_send_cprb()
571 module_put(zdev->ap_dev->drv->driver.owner); zcrypt_send_cprb()
575 zdev->request_count--; zcrypt_send_cprb()
576 __zcrypt_increase_preference(zdev); zcrypt_send_cprb()
577 put_device(&zdev->ap_dev->device); zcrypt_send_cprb()
578 zcrypt_device_put(zdev); zcrypt_send_cprb()
607 struct zcrypt_device *zdev; zcrypt_send_ep11_cprb() local
636 list_for_each_entry(zdev, &zcrypt_device_list, list) { zcrypt_send_ep11_cprb()
638 if (!zdev->online || zcrypt_send_ep11_cprb()
639 zdev->ops->variant != MSGTYPE06_VARIANT_EP11) zcrypt_send_ep11_cprb()
643 if (!is_desired_ep11dev(zdev->ap_dev->qid, ep11_dev_list) && zcrypt_send_ep11_cprb()
647 zcrypt_device_get(zdev); zcrypt_send_ep11_cprb()
648 get_device(&zdev->ap_dev->device); zcrypt_send_ep11_cprb()
649 zdev->request_count++; zcrypt_send_ep11_cprb()
650 __zcrypt_decrease_preference(zdev); zcrypt_send_ep11_cprb()
651 if (try_module_get(zdev->ap_dev->drv->driver.owner)) { zcrypt_send_ep11_cprb()
653 rc = zdev->ops->send_ep11_cprb(zdev, xcrb); zcrypt_send_ep11_cprb()
655 module_put(zdev->ap_dev->drv->driver.owner); zcrypt_send_ep11_cprb()
659 zdev->request_count--; zcrypt_send_ep11_cprb()
660 __zcrypt_increase_preference(zdev); zcrypt_send_ep11_cprb()
661 put_device(&zdev->ap_dev->device); zcrypt_send_ep11_cprb()
662 zcrypt_device_put(zdev); zcrypt_send_ep11_cprb()
672 struct zcrypt_device *zdev; zcrypt_rng() local
676 list_for_each_entry(zdev, &zcrypt_device_list, list) { zcrypt_rng()
677 if (!zdev->online || !zdev->ops->rng) zcrypt_rng()
679 zcrypt_device_get(zdev); zcrypt_rng()
680 get_device(&zdev->ap_dev->device); zcrypt_rng()
681 zdev->request_count++; zcrypt_rng()
682 __zcrypt_decrease_preference(zdev); zcrypt_rng()
683 if (try_module_get(zdev->ap_dev->drv->driver.owner)) { zcrypt_rng()
685 rc = zdev->ops->rng(zdev, buffer); zcrypt_rng()
687 module_put(zdev->ap_dev->drv->driver.owner); zcrypt_rng()
690 zdev->request_count--; zcrypt_rng()
691 __zcrypt_increase_preference(zdev); zcrypt_rng()
692 put_device(&zdev->ap_dev->device); zcrypt_rng()
693 zcrypt_device_put(zdev); zcrypt_rng()
703 struct zcrypt_device *zdev; zcrypt_status_mask() local
707 list_for_each_entry(zdev, &zcrypt_device_list, list) zcrypt_status_mask()
708 status[AP_QID_DEVICE(zdev->ap_dev->qid)] = zcrypt_status_mask()
709 zdev->online ? zdev->user_space_type : 0x0d; zcrypt_status_mask()
715 struct zcrypt_device *zdev; zcrypt_qdepth_mask() local
719 list_for_each_entry(zdev, &zcrypt_device_list, list) { zcrypt_qdepth_mask()
720 spin_lock(&zdev->ap_dev->lock); zcrypt_qdepth_mask()
721 qdepth[AP_QID_DEVICE(zdev->ap_dev->qid)] = zcrypt_qdepth_mask()
722 zdev->ap_dev->pendingq_count + zcrypt_qdepth_mask()
723 zdev->ap_dev->requestq_count; zcrypt_qdepth_mask()
724 spin_unlock(&zdev->ap_dev->lock); zcrypt_qdepth_mask()
731 struct zcrypt_device *zdev; zcrypt_perdev_reqcnt() local
735 list_for_each_entry(zdev, &zcrypt_device_list, list) { zcrypt_perdev_reqcnt()
736 spin_lock(&zdev->ap_dev->lock); zcrypt_perdev_reqcnt()
737 reqcnt[AP_QID_DEVICE(zdev->ap_dev->qid)] = zcrypt_perdev_reqcnt()
738 zdev->ap_dev->total_request_count; zcrypt_perdev_reqcnt()
739 spin_unlock(&zdev->ap_dev->lock); zcrypt_perdev_reqcnt()
746 struct zcrypt_device *zdev; zcrypt_pendingq_count() local
750 list_for_each_entry(zdev, &zcrypt_device_list, list) { zcrypt_pendingq_count()
751 spin_lock(&zdev->ap_dev->lock); zcrypt_pendingq_count()
752 pendingq_count += zdev->ap_dev->pendingq_count; zcrypt_pendingq_count()
753 spin_unlock(&zdev->ap_dev->lock); zcrypt_pendingq_count()
761 struct zcrypt_device *zdev; zcrypt_requestq_count() local
765 list_for_each_entry(zdev, &zcrypt_device_list, list) { zcrypt_requestq_count()
766 spin_lock(&zdev->ap_dev->lock); zcrypt_requestq_count()
767 requestq_count += zdev->ap_dev->requestq_count; zcrypt_requestq_count()
768 spin_unlock(&zdev->ap_dev->lock); zcrypt_requestq_count()
776 struct zcrypt_device *zdev; zcrypt_count_type() local
780 list_for_each_entry(zdev, &zcrypt_device_list, list) zcrypt_count_type()
781 if (zdev->user_space_type == type) zcrypt_count_type()
1252 struct zcrypt_device *zdev; zcrypt_disable_card() local
1255 list_for_each_entry(zdev, &zcrypt_device_list, list) zcrypt_disable_card()
1256 if (AP_QID_DEVICE(zdev->ap_dev->qid) == index) { zcrypt_disable_card()
1257 zdev->online = 0; zcrypt_disable_card()
1258 ap_flush_queue(zdev->ap_dev); zcrypt_disable_card()
1266 struct zcrypt_device *zdev; zcrypt_enable_card() local
1269 list_for_each_entry(zdev, &zcrypt_device_list, list) zcrypt_enable_card()
1270 if (AP_QID_DEVICE(zdev->ap_dev->qid) == index) { zcrypt_enable_card()
1271 zdev->online = 1; zcrypt_enable_card()
H A Dzcrypt_error.h90 static inline int convert_error(struct zcrypt_device *zdev, convert_error() argument
113 zdev->online = 0; convert_error()
115 zdev->ap_dev->qid); convert_error()
116 ZCRYPT_DBF_DEV(DBF_ERR, zdev, "dev%04xo%drc%d", convert_error()
117 zdev->ap_dev->qid, zdev->online, ehdr->reply_code); convert_error()
124 zdev->online = 0; convert_error()
126 zdev->ap_dev->qid); convert_error()
127 ZCRYPT_DBF_DEV(DBF_ERR, zdev, "dev%04xo%drc%d", convert_error()
128 zdev->ap_dev->qid, zdev->online, ehdr->reply_code); convert_error()
131 zdev->online = 0; convert_error()
133 zdev->ap_dev->qid); convert_error()
134 ZCRYPT_DBF_DEV(DBF_ERR, zdev, "dev%04xo%drc%d", convert_error()
135 zdev->ap_dev->qid, zdev->online, ehdr->reply_code); convert_error()
H A Dzcrypt_msgtype6.c155 * @zdev: crypto device pointer
161 static int ICAMEX_msg_to_type6MEX_msgX(struct zcrypt_device *zdev, ICAMEX_msg_to_type6MEX_msgX() argument
207 msg->cprbx.domain = AP_QID_QUEUE(zdev->ap_dev->qid); ICAMEX_msg_to_type6MEX_msgX()
210 msg->fr = (zdev->user_space_type == ZCRYPT_PCIXCC_MCL2) ? ICAMEX_msg_to_type6MEX_msgX()
222 * @zdev: crypto device pointer
228 static int ICACRT_msg_to_type6CRT_msgX(struct zcrypt_device *zdev, ICACRT_msg_to_type6CRT_msgX() argument
275 msg->cprbx.domain = AP_QID_QUEUE(zdev->ap_dev->qid); ICACRT_msg_to_type6CRT_msgX()
279 msg->fr = (zdev->user_space_type == ZCRYPT_PCIXCC_MCL2) ? ICACRT_msg_to_type6CRT_msgX()
289 * @zdev: crypto device pointer
300 static int XCRB_msg_to_type6CPRB_msgX(struct zcrypt_device *zdev, XCRB_msg_to_type6CPRB_msgX() argument
395 static int xcrb_msg_to_type6_ep11cprb_msgx(struct zcrypt_device *zdev, xcrb_msg_to_type6_ep11cprb_msgx() argument
463 AP_QID_QUEUE(zdev->ap_dev->qid); xcrb_msg_to_type6_ep11cprb_msgx()
481 AP_QID_QUEUE(zdev->ap_dev->qid); xcrb_msg_to_type6_ep11cprb_msgx()
489 * @zdev: crypto device pointer
511 static int convert_type86_ica(struct zcrypt_device *zdev, convert_type86_ica() argument
566 zdev->min_mod_size = PCIXCC_MIN_MOD_SIZE_OLD; convert_type86_ica()
573 zdev->online = 0; convert_type86_ica()
575 zdev->ap_dev->qid); convert_type86_ica()
576 ZCRYPT_DBF_DEV(DBF_ERR, zdev, "dev%04xo%drc%d", convert_type86_ica()
577 zdev->ap_dev->qid, zdev->online, convert_type86_ica()
614 * @zdev: crypto device pointer
620 static int convert_type86_xcrb(struct zcrypt_device *zdev, convert_type86_xcrb() argument
645 * @zdev: crypto device pointer
651 static int convert_type86_ep11_xcrb(struct zcrypt_device *zdev, convert_type86_ep11_xcrb() argument
669 static int convert_type86_rng(struct zcrypt_device *zdev, convert_type86_rng() argument
686 static int convert_response_ica(struct zcrypt_device *zdev, convert_response_ica() argument
697 return convert_error(zdev, reply); convert_response_ica()
702 if (zdev->max_exp_bit_length <= 17) { convert_response_ica()
703 zdev->max_exp_bit_length = 17; convert_response_ica()
709 return convert_error(zdev, reply); convert_response_ica()
711 return convert_type86_ica(zdev, reply, convert_response_ica()
716 zdev->online = 0; convert_response_ica()
718 zdev->ap_dev->qid); convert_response_ica()
719 ZCRYPT_DBF_DEV(DBF_ERR, zdev, "dev%04xo%dfail", convert_response_ica()
720 zdev->ap_dev->qid, zdev->online); convert_response_ica()
725 static int convert_response_xcrb(struct zcrypt_device *zdev, convert_response_xcrb() argument
736 return convert_error(zdev, reply); convert_response_xcrb()
740 return convert_error(zdev, reply); convert_response_xcrb()
743 return convert_type86_xcrb(zdev, reply, xcRB); convert_response_xcrb()
748 zdev->online = 0; convert_response_xcrb()
750 zdev->ap_dev->qid); convert_response_xcrb()
751 ZCRYPT_DBF_DEV(DBF_ERR, zdev, "dev%04xo%dfail", convert_response_xcrb()
752 zdev->ap_dev->qid, zdev->online); convert_response_xcrb()
757 static int convert_response_ep11_xcrb(struct zcrypt_device *zdev, convert_response_ep11_xcrb() argument
766 return convert_error(zdev, reply); convert_response_ep11_xcrb()
769 return convert_error(zdev, reply); convert_response_ep11_xcrb()
771 return convert_type86_ep11_xcrb(zdev, reply, xcRB); convert_response_ep11_xcrb()
774 zdev->online = 0; convert_response_ep11_xcrb()
776 zdev->ap_dev->qid); convert_response_ep11_xcrb()
777 ZCRYPT_DBF_DEV(DBF_ERR, zdev, "dev%04xo%dfail", convert_response_ep11_xcrb()
778 zdev->ap_dev->qid, zdev->online); convert_response_ep11_xcrb()
783 static int convert_response_rng(struct zcrypt_device *zdev, convert_response_rng() argument
797 return convert_type86_rng(zdev, reply, data); convert_response_rng()
801 zdev->online = 0; convert_response_rng()
803 zdev->ap_dev->qid); convert_response_rng()
804 ZCRYPT_DBF_DEV(DBF_ERR, zdev, "dev%04xo%dfail", convert_response_rng()
805 zdev->ap_dev->qid, zdev->online); convert_response_rng()
907 * @zdev: pointer to zcrypt_device structure that identifies the
911 static long zcrypt_msgtype6_modexpo(struct zcrypt_device *zdev, zcrypt_msgtype6_modexpo() argument
928 rc = ICAMEX_msg_to_type6MEX_msgX(zdev, &ap_msg, mex); zcrypt_msgtype6_modexpo()
932 ap_queue_message(zdev->ap_dev, &ap_msg); zcrypt_msgtype6_modexpo()
937 rc = convert_response_ica(zdev, &ap_msg, zcrypt_msgtype6_modexpo()
942 ap_cancel_message(zdev->ap_dev, &ap_msg); zcrypt_msgtype6_modexpo()
951 * @zdev: pointer to zcrypt_device structure that identifies the
955 static long zcrypt_msgtype6_modexpo_crt(struct zcrypt_device *zdev, zcrypt_msgtype6_modexpo_crt() argument
972 rc = ICACRT_msg_to_type6CRT_msgX(zdev, &ap_msg, crt); zcrypt_msgtype6_modexpo_crt()
976 ap_queue_message(zdev->ap_dev, &ap_msg); zcrypt_msgtype6_modexpo_crt()
981 rc = convert_response_ica(zdev, &ap_msg, zcrypt_msgtype6_modexpo_crt()
986 ap_cancel_message(zdev->ap_dev, &ap_msg); zcrypt_msgtype6_modexpo_crt()
995 * @zdev: pointer to zcrypt_device structure that identifies the
999 static long zcrypt_msgtype6_send_cprb(struct zcrypt_device *zdev, zcrypt_msgtype6_send_cprb() argument
1016 rc = XCRB_msg_to_type6CPRB_msgX(zdev, &ap_msg, xcRB); zcrypt_msgtype6_send_cprb()
1020 ap_queue_message(zdev->ap_dev, &ap_msg); zcrypt_msgtype6_send_cprb()
1025 rc = convert_response_xcrb(zdev, &ap_msg, xcRB); zcrypt_msgtype6_send_cprb()
1028 ap_cancel_message(zdev->ap_dev, &ap_msg); zcrypt_msgtype6_send_cprb()
1037 * @zdev: pointer to zcrypt_device structure that identifies the
1041 static long zcrypt_msgtype6_send_ep11_cprb(struct zcrypt_device *zdev, zcrypt_msgtype6_send_ep11_cprb() argument
1058 rc = xcrb_msg_to_type6_ep11cprb_msgx(zdev, &ap_msg, xcrb); zcrypt_msgtype6_send_ep11_cprb()
1062 ap_queue_message(zdev->ap_dev, &ap_msg); zcrypt_msgtype6_send_ep11_cprb()
1067 rc = convert_response_ep11_xcrb(zdev, &ap_msg, xcrb); zcrypt_msgtype6_send_ep11_cprb()
1070 ap_cancel_message(zdev->ap_dev, &ap_msg); zcrypt_msgtype6_send_ep11_cprb()
1080 * @zdev: pointer to zcrypt_device structure that identifies the
1085 static long zcrypt_msgtype6_rng(struct zcrypt_device *zdev, zcrypt_msgtype6_rng() argument
1102 rng_type6CPRB_msgX(zdev->ap_dev, &ap_msg, ZCRYPT_RNG_BUFFER_SIZE); zcrypt_msgtype6_rng()
1104 ap_queue_message(zdev->ap_dev, &ap_msg); zcrypt_msgtype6_rng()
1109 rc = convert_response_rng(zdev, &ap_msg, buffer); zcrypt_msgtype6_rng()
1112 ap_cancel_message(zdev->ap_dev, &ap_msg); zcrypt_msgtype6_rng()
H A Dzcrypt_pcixcc.c269 struct zcrypt_device *zdev; zcrypt_pcixcc_probe() local
272 zdev = zcrypt_device_alloc(PCIXCC_MAX_XCRB_MESSAGE_SIZE); zcrypt_pcixcc_probe()
273 if (!zdev) zcrypt_pcixcc_probe()
275 zdev->ap_dev = ap_dev; zcrypt_pcixcc_probe()
276 zdev->online = 1; zcrypt_pcixcc_probe()
281 zcrypt_device_free(zdev); zcrypt_pcixcc_probe()
284 zdev->user_space_type = rc; zcrypt_pcixcc_probe()
286 zdev->type_string = "PCIXCC_MCL2"; zcrypt_pcixcc_probe()
287 zdev->speed_rating = PCIXCC_MCL2_SPEED_RATING; zcrypt_pcixcc_probe()
288 zdev->min_mod_size = PCIXCC_MIN_MOD_SIZE_OLD; zcrypt_pcixcc_probe()
289 zdev->max_mod_size = PCIXCC_MAX_MOD_SIZE; zcrypt_pcixcc_probe()
290 zdev->max_exp_bit_length = PCIXCC_MAX_MOD_SIZE; zcrypt_pcixcc_probe()
292 zdev->type_string = "PCIXCC_MCL3"; zcrypt_pcixcc_probe()
293 zdev->speed_rating = PCIXCC_MCL3_SPEED_RATING; zcrypt_pcixcc_probe()
294 zdev->min_mod_size = PCIXCC_MIN_MOD_SIZE; zcrypt_pcixcc_probe()
295 zdev->max_mod_size = PCIXCC_MAX_MOD_SIZE; zcrypt_pcixcc_probe()
296 zdev->max_exp_bit_length = PCIXCC_MAX_MOD_SIZE; zcrypt_pcixcc_probe()
300 zdev->user_space_type = ZCRYPT_CEX2C; zcrypt_pcixcc_probe()
301 zdev->type_string = "CEX2C"; zcrypt_pcixcc_probe()
302 zdev->speed_rating = CEX2C_SPEED_RATING; zcrypt_pcixcc_probe()
303 zdev->min_mod_size = PCIXCC_MIN_MOD_SIZE; zcrypt_pcixcc_probe()
304 zdev->max_mod_size = PCIXCC_MAX_MOD_SIZE; zcrypt_pcixcc_probe()
305 zdev->max_exp_bit_length = PCIXCC_MAX_MOD_SIZE; zcrypt_pcixcc_probe()
308 zdev->user_space_type = ZCRYPT_CEX3C; zcrypt_pcixcc_probe()
309 zdev->type_string = "CEX3C"; zcrypt_pcixcc_probe()
310 zdev->speed_rating = CEX3C_SPEED_RATING; zcrypt_pcixcc_probe()
311 zdev->min_mod_size = CEX3C_MIN_MOD_SIZE; zcrypt_pcixcc_probe()
312 zdev->max_mod_size = CEX3C_MAX_MOD_SIZE; zcrypt_pcixcc_probe()
313 zdev->max_exp_bit_length = CEX3C_MAX_MOD_SIZE; zcrypt_pcixcc_probe()
321 zcrypt_device_free(zdev); zcrypt_pcixcc_probe()
325 zdev->ops = zcrypt_msgtype_request(MSGTYPE06_NAME, zcrypt_pcixcc_probe()
328 zdev->ops = zcrypt_msgtype_request(MSGTYPE06_NAME, zcrypt_pcixcc_probe()
330 ap_dev->reply = &zdev->reply; zcrypt_pcixcc_probe()
331 ap_dev->private = zdev; zcrypt_pcixcc_probe()
332 rc = zcrypt_device_register(zdev); zcrypt_pcixcc_probe()
339 zcrypt_msgtype_release(zdev->ops); zcrypt_pcixcc_probe()
340 zcrypt_device_free(zdev); zcrypt_pcixcc_probe()
350 struct zcrypt_device *zdev = ap_dev->private; zcrypt_pcixcc_remove() local
351 struct zcrypt_ops *zops = zdev->ops; zcrypt_pcixcc_remove()
353 zcrypt_device_unregister(zdev); zcrypt_pcixcc_remove()
H A Dzcrypt_msgtype50.c179 * @zdev: crypto device pointer
185 static int ICAMEX_msg_to_type50MEX_msg(struct zcrypt_device *zdev, ICAMEX_msg_to_type50MEX_msg() argument
237 * @zdev: crypto device pointer
243 static int ICACRT_msg_to_type50CRT_msg(struct zcrypt_device *zdev, ICACRT_msg_to_type50CRT_msg() argument
286 (zdev->max_mod_size == CEX3A_MAX_MOD_SIZE)) { /* >= CEX3A */ ICACRT_msg_to_type50CRT_msg()
320 * @zdev: crypto device pointer
327 static int convert_type80(struct zcrypt_device *zdev, convert_type80() argument
337 zdev->online = 0; convert_type80()
339 zdev->ap_dev->qid); convert_type80()
340 ZCRYPT_DBF_DEV(DBF_ERR, zdev, "dev%04xo%drc%d", convert_type80()
341 zdev->ap_dev->qid, zdev->online, t80h->code); convert_type80()
345 if (zdev->user_space_type == ZCRYPT_CEX2A) convert_type80()
355 static int convert_response(struct zcrypt_device *zdev, convert_response() argument
364 return convert_error(zdev, reply); convert_response()
366 return convert_type80(zdev, reply, convert_response()
369 zdev->online = 0; convert_response()
371 zdev->ap_dev->qid); convert_response()
372 ZCRYPT_DBF_DEV(DBF_ERR, zdev, "dev%04xo%dfail", convert_response()
373 zdev->ap_dev->qid, zdev->online); convert_response()
420 * @zdev: pointer to zcrypt_device structure that identifies the
424 static long zcrypt_cex2a_modexpo(struct zcrypt_device *zdev, zcrypt_cex2a_modexpo() argument
432 if (zdev->user_space_type == ZCRYPT_CEX2A) zcrypt_cex2a_modexpo()
444 rc = ICAMEX_msg_to_type50MEX_msg(zdev, &ap_msg, mex); zcrypt_cex2a_modexpo()
448 ap_queue_message(zdev->ap_dev, &ap_msg); zcrypt_cex2a_modexpo()
453 rc = convert_response(zdev, &ap_msg, mex->outputdata, zcrypt_cex2a_modexpo()
457 ap_cancel_message(zdev->ap_dev, &ap_msg); zcrypt_cex2a_modexpo()
466 * @zdev: pointer to zcrypt_device structure that identifies the
470 static long zcrypt_cex2a_modexpo_crt(struct zcrypt_device *zdev, zcrypt_cex2a_modexpo_crt() argument
478 if (zdev->user_space_type == ZCRYPT_CEX2A) zcrypt_cex2a_modexpo_crt()
490 rc = ICACRT_msg_to_type50CRT_msg(zdev, &ap_msg, crt); zcrypt_cex2a_modexpo_crt()
494 ap_queue_message(zdev->ap_dev, &ap_msg); zcrypt_cex2a_modexpo_crt()
499 rc = convert_response(zdev, &ap_msg, crt->outputdata, zcrypt_cex2a_modexpo_crt()
503 ap_cancel_message(zdev->ap_dev, &ap_msg); zcrypt_cex2a_modexpo_crt()
/linux-4.4.14/arch/s390/pci/
H A Dpci_debug.c44 struct zpci_dev *zdev = m->private; pci_sw_counter_show() local
45 atomic64_t *counter = &zdev->allocated_pages; pci_sw_counter_show()
55 struct zpci_dev *zdev = m->private; pci_perf_show() local
59 if (!zdev) pci_perf_show()
62 mutex_lock(&zdev->lock); pci_perf_show()
63 if (!zdev->fmb) { pci_perf_show()
64 mutex_unlock(&zdev->lock); pci_perf_show()
70 seq_printf(m, "FMB @ %p\n", zdev->fmb); pci_perf_show()
71 seq_printf(m, "Update interval: %u ms\n", zdev->fmb_update); pci_perf_show()
72 seq_printf(m, "Samples: %u\n", zdev->fmb->samples); pci_perf_show()
73 seq_printf(m, "Last update TOD: %Lx\n", zdev->fmb->last_update); pci_perf_show()
76 stat = (u64 *) &zdev->fmb->ld_ops; pci_perf_show()
80 if (zdev->fmb->dma_valid) pci_perf_show()
86 mutex_unlock(&zdev->lock); pci_perf_show()
93 struct zpci_dev *zdev = ((struct seq_file *) file->private_data)->private; pci_perf_seq_write() local
97 if (!zdev) pci_perf_seq_write()
104 mutex_lock(&zdev->lock); pci_perf_seq_write()
107 rc = zpci_fmb_disable_device(zdev); pci_perf_seq_write()
110 rc = zpci_fmb_enable_device(zdev); pci_perf_seq_write()
113 mutex_unlock(&zdev->lock); pci_perf_seq_write()
131 void zpci_debug_init_device(struct zpci_dev *zdev) zpci_debug_init_device() argument
133 zdev->debugfs_dev = debugfs_create_dir(dev_name(&zdev->pdev->dev), zpci_debug_init_device()
135 if (IS_ERR(zdev->debugfs_dev)) zpci_debug_init_device()
136 zdev->debugfs_dev = NULL; zpci_debug_init_device()
138 zdev->debugfs_perf = debugfs_create_file("statistics", zpci_debug_init_device()
140 zdev->debugfs_dev, zdev, zpci_debug_init_device()
142 if (IS_ERR(zdev->debugfs_perf)) zpci_debug_init_device()
143 zdev->debugfs_perf = NULL; zpci_debug_init_device()
146 void zpci_debug_exit_device(struct zpci_dev *zdev) zpci_debug_exit_device() argument
148 debugfs_remove(zdev->debugfs_perf); zpci_debug_exit_device()
149 debugfs_remove(zdev->debugfs_dev); zpci_debug_exit_device()
H A Dpci_clp.c59 static void clp_store_query_pci_fngrp(struct zpci_dev *zdev, clp_store_query_pci_fngrp() argument
62 zdev->tlb_refresh = response->refresh; clp_store_query_pci_fngrp()
63 zdev->dma_mask = response->dasm; clp_store_query_pci_fngrp()
64 zdev->msi_addr = response->msia; clp_store_query_pci_fngrp()
65 zdev->max_msi = response->noi; clp_store_query_pci_fngrp()
66 zdev->fmb_update = response->mui; clp_store_query_pci_fngrp()
70 zdev->max_bus_speed = PCIE_SPEED_5_0GT; clp_store_query_pci_fngrp()
73 zdev->max_bus_speed = PCI_SPEED_UNKNOWN; clp_store_query_pci_fngrp()
78 static int clp_query_pci_fngrp(struct zpci_dev *zdev, u8 pfgid) clp_query_pci_fngrp() argument
95 clp_store_query_pci_fngrp(zdev, &rrb->response); clp_query_pci_fngrp()
105 static int clp_store_query_pci_fn(struct zpci_dev *zdev, clp_store_query_pci_fn() argument
111 zdev->bars[i].val = le32_to_cpu(response->bar[i]); clp_store_query_pci_fn()
112 zdev->bars[i].size = response->bar_size[i]; clp_store_query_pci_fn()
114 zdev->start_dma = response->sdma; clp_store_query_pci_fn()
115 zdev->end_dma = response->edma; clp_store_query_pci_fn()
116 zdev->pchid = response->pchid; clp_store_query_pci_fn()
117 zdev->pfgid = response->pfgid; clp_store_query_pci_fn()
118 zdev->pft = response->pft; clp_store_query_pci_fn()
119 zdev->vfn = response->vfn; clp_store_query_pci_fn()
120 zdev->uid = response->uid; clp_store_query_pci_fn()
122 memcpy(zdev->pfip, response->pfip, sizeof(zdev->pfip)); clp_store_query_pci_fn()
124 memcpy(zdev->util_str, response->util_str, clp_store_query_pci_fn()
125 sizeof(zdev->util_str)); clp_store_query_pci_fn()
131 static int clp_query_pci_fn(struct zpci_dev *zdev, u32 fh) clp_query_pci_fn() argument
148 rc = clp_store_query_pci_fn(zdev, &rrb->response); clp_query_pci_fn()
152 rc = clp_query_pci_fngrp(zdev, rrb->response.pfgid); clp_query_pci_fn()
165 struct zpci_dev *zdev; clp_add_pci_device() local
169 zdev = kzalloc(sizeof(*zdev), GFP_KERNEL); clp_add_pci_device()
170 if (!zdev) clp_add_pci_device()
173 zdev->fh = fh; clp_add_pci_device()
174 zdev->fid = fid; clp_add_pci_device()
176 /* Query function properties and update zdev */ clp_add_pci_device()
177 rc = clp_query_pci_fn(zdev, fh); clp_add_pci_device()
182 zdev->state = ZPCI_FN_STATE_CONFIGURED; clp_add_pci_device()
184 zdev->state = ZPCI_FN_STATE_STANDBY; clp_add_pci_device()
186 rc = zpci_create_device(zdev); clp_add_pci_device()
192 kfree(zdev); clp_add_pci_device()
237 int clp_enable_fh(struct zpci_dev *zdev, u8 nr_dma_as) clp_enable_fh() argument
239 u32 fh = zdev->fh; clp_enable_fh()
244 /* Success -> store enabled handle in zdev */ clp_enable_fh()
245 zdev->fh = fh; clp_enable_fh()
247 zpci_dbg(3, "ena fid:%x, fh:%x, rc:%d\n", zdev->fid, zdev->fh, rc); clp_enable_fh()
251 int clp_disable_fh(struct zpci_dev *zdev) clp_disable_fh() argument
253 u32 fh = zdev->fh; clp_disable_fh()
256 if (!zdev_enabled(zdev)) clp_disable_fh()
261 /* Success -> store disabled handle in zdev */ clp_disable_fh()
262 zdev->fh = fh; clp_disable_fh()
264 zpci_dbg(3, "dis fid:%x, fh:%x, rc:%d\n", zdev->fid, zdev->fh, rc); clp_disable_fh()
315 struct zpci_dev *zdev; __clp_rescan() local
320 zdev = get_zdev_by_fid(entry->fid); __clp_rescan()
321 if (!zdev) { __clp_rescan()
332 zpci_stop_device(zdev); __clp_rescan()
338 struct zpci_dev *zdev; __clp_update() local
343 zdev = get_zdev_by_fid(entry->fid); __clp_update()
344 if (!zdev) __clp_update()
347 zdev->fh = entry->fh; __clp_update()
H A Dpci.c81 struct zpci_dev *tmp, *zdev = NULL; get_zdev_by_fid() local
86 zdev = tmp; get_zdev_by_fid()
91 return zdev; get_zdev_by_fid()
112 static int zpci_set_airq(struct zpci_dev *zdev) zpci_set_airq() argument
114 u64 req = ZPCI_CREATE_REQ(zdev->fh, 0, ZPCI_MOD_FC_REG_INT); zpci_set_airq()
119 fib.noi = airq_iv_end(zdev->aibv); zpci_set_airq()
120 fib.aibv = (unsigned long) zdev->aibv->vector; zpci_set_airq()
121 fib.aibvo = 0; /* each zdev has its own interrupt vector */ zpci_set_airq()
122 fib.aisb = (unsigned long) zpci_aisb_iv->vector + (zdev->aisb/64)*8; zpci_set_airq()
123 fib.aisbo = zdev->aisb & 63; zpci_set_airq()
135 static int mod_pci(struct zpci_dev *zdev, int fn, u8 dmaas, struct mod_pci_args *args) mod_pci() argument
137 u64 req = ZPCI_CREATE_REQ(zdev->fh, dmaas, fn); mod_pci()
149 int zpci_register_ioat(struct zpci_dev *zdev, u8 dmaas, zpci_register_ioat() argument
156 return mod_pci(zdev, ZPCI_MOD_FC_REG_IOAT, dmaas, &args); zpci_register_ioat()
160 int zpci_unregister_ioat(struct zpci_dev *zdev, u8 dmaas) zpci_unregister_ioat() argument
164 return mod_pci(zdev, ZPCI_MOD_FC_DEREG_IOAT, dmaas, &args); zpci_unregister_ioat()
168 static int zpci_clear_airq(struct zpci_dev *zdev) zpci_clear_airq() argument
172 return mod_pci(zdev, ZPCI_MOD_FC_DEREG_INT, 0, &args); zpci_clear_airq()
176 int zpci_fmb_enable_device(struct zpci_dev *zdev) zpci_fmb_enable_device() argument
180 if (zdev->fmb) zpci_fmb_enable_device()
183 zdev->fmb = kmem_cache_zalloc(zdev_fmb_cache, GFP_KERNEL); zpci_fmb_enable_device()
184 if (!zdev->fmb) zpci_fmb_enable_device()
186 WARN_ON((u64) zdev->fmb & 0xf); zpci_fmb_enable_device()
189 atomic64_set(&zdev->allocated_pages, 0); zpci_fmb_enable_device()
190 atomic64_set(&zdev->mapped_pages, 0); zpci_fmb_enable_device()
191 atomic64_set(&zdev->unmapped_pages, 0); zpci_fmb_enable_device()
193 args.fmb_addr = virt_to_phys(zdev->fmb); zpci_fmb_enable_device()
194 return mod_pci(zdev, ZPCI_MOD_FC_SET_MEASURE, 0, &args); zpci_fmb_enable_device()
198 int zpci_fmb_disable_device(struct zpci_dev *zdev) zpci_fmb_disable_device() argument
203 if (!zdev->fmb) zpci_fmb_disable_device()
207 rc = mod_pci(zdev, ZPCI_MOD_FC_SET_MEASURE, 0, &args); zpci_fmb_disable_device()
209 kmem_cache_free(zdev_fmb_cache, zdev->fmb); zpci_fmb_disable_device()
210 zdev->fmb = NULL; zpci_fmb_disable_device()
216 static int zpci_cfg_load(struct zpci_dev *zdev, int offset, u32 *val, u8 len) zpci_cfg_load() argument
218 u64 req = ZPCI_CREATE_REQ(zdev->fh, ZPCI_PCIAS_CFGSPC, len); zpci_cfg_load()
232 static int zpci_cfg_store(struct zpci_dev *zdev, int offset, u32 val, u8 len) zpci_cfg_store() argument
234 u64 req = ZPCI_CREATE_REQ(zdev->fh, ZPCI_PCIAS_CFGSPC, len); zpci_cfg_store()
267 struct zpci_dev *zdev = to_zpci(pdev); pci_iomap_range() local
274 idx = zdev->bars[bar].map_idx; pci_iomap_range()
277 BUG_ON(zpci_iomap_start[idx].fh != zdev->fh || pci_iomap_range()
280 zpci_iomap_start[idx].fh = zdev->fh; pci_iomap_range()
317 struct zpci_dev *zdev = get_zdev_by_bus(bus); pci_read() local
320 if (!zdev || devfn != ZPCI_DEVFN) pci_read()
323 ret = zpci_cfg_load(zdev, where, val, size); pci_read()
331 struct zpci_dev *zdev = get_zdev_by_bus(bus); pci_write() local
334 if (!zdev || devfn != ZPCI_DEVFN) pci_write()
337 ret = zpci_cfg_store(zdev, where, val, size); pci_write()
383 struct zpci_dev *zdev = to_zpci(pdev); arch_setup_msi_irqs() local
392 msi_vecs = min_t(unsigned int, nvec, zdev->max_msi); arch_setup_msi_irqs()
399 zdev->aisb = aisb; arch_setup_msi_irqs()
403 zdev->aibv = airq_iv_create(msi_vecs, AIRQ_IV_DATA | AIRQ_IV_BITLOCK); arch_setup_msi_irqs()
404 if (!zdev->aibv) arch_setup_msi_irqs()
408 zpci_aibv[aisb] = zdev->aibv; arch_setup_msi_irqs()
423 msg.address_lo = zdev->msi_addr & 0xffffffff; for_each_pci_msi_entry()
424 msg.address_hi = zdev->msi_addr >> 32; for_each_pci_msi_entry()
426 airq_iv_set_data(zdev->aibv, hwirq, irq); for_each_pci_msi_entry()
431 rc = zpci_set_airq(zdev);
449 airq_iv_release(zdev->aibv);
458 struct zpci_dev *zdev = to_zpci(pdev); arch_teardown_msi_irqs() local
463 rc = zpci_clear_airq(zdev); arch_teardown_msi_irqs()
481 zpci_aibv[zdev->aisb] = NULL;
482 airq_iv_release(zdev->aibv);
483 airq_iv_free_bit(zpci_aisb_iv, zdev->aisb);
545 static int zpci_alloc_iomap(struct zpci_dev *zdev) zpci_alloc_iomap() argument
560 static void zpci_free_iomap(struct zpci_dev *zdev, int entry) zpci_free_iomap() argument
568 static struct resource *__alloc_res(struct zpci_dev *zdev, unsigned long start, __alloc_res() argument
580 r->name = zdev->res_name; __alloc_res()
589 static int zpci_setup_bus_resources(struct zpci_dev *zdev, zpci_setup_bus_resources() argument
596 snprintf(zdev->res_name, sizeof(zdev->res_name), zpci_setup_bus_resources()
597 "PCI Bus %04x:%02x", zdev->domain, ZPCI_BUS_NR); zpci_setup_bus_resources()
600 if (!zdev->bars[i].size) zpci_setup_bus_resources()
602 entry = zpci_alloc_iomap(zdev); zpci_setup_bus_resources()
605 zdev->bars[i].map_idx = entry; zpci_setup_bus_resources()
609 if (zdev->bars[i].val & 8) zpci_setup_bus_resources()
611 if (zdev->bars[i].val & 4) zpci_setup_bus_resources()
616 size = 1UL << zdev->bars[i].size; zpci_setup_bus_resources()
618 res = __alloc_res(zdev, addr, size, flags); zpci_setup_bus_resources()
620 zpci_free_iomap(zdev, entry); zpci_setup_bus_resources()
623 zdev->bars[i].res = res; zpci_setup_bus_resources()
630 static void zpci_cleanup_bus_resources(struct zpci_dev *zdev) zpci_cleanup_bus_resources() argument
635 if (!zdev->bars[i].size || !zdev->bars[i].res) zpci_cleanup_bus_resources()
638 zpci_free_iomap(zdev, zdev->bars[i].map_idx); zpci_cleanup_bus_resources()
639 release_resource(zdev->bars[i].res); zpci_cleanup_bus_resources()
640 kfree(zdev->bars[i].res); zpci_cleanup_bus_resources()
646 struct zpci_dev *zdev = to_zpci(pdev); pcibios_add_device() local
650 zdev->pdev = pdev; pcibios_add_device()
671 struct zpci_dev *zdev = to_zpci(pdev); pcibios_enable_device() local
673 zdev->pdev = pdev; pcibios_enable_device()
674 zpci_debug_init_device(zdev); pcibios_enable_device()
675 zpci_fmb_enable_device(zdev); pcibios_enable_device()
682 struct zpci_dev *zdev = to_zpci(pdev); pcibios_disable_device() local
684 zpci_fmb_disable_device(zdev); pcibios_disable_device()
685 zpci_debug_exit_device(zdev); pcibios_disable_device()
686 zdev->pdev = NULL; pcibios_disable_device()
693 struct zpci_dev *zdev = to_zpci(pdev); zpci_restore() local
696 if (zdev->state != ZPCI_FN_STATE_ONLINE) zpci_restore()
699 ret = clp_enable_fh(zdev, ZPCI_NR_DMA_SPACES); zpci_restore()
704 zpci_register_ioat(zdev, 0, zdev->start_dma + PAGE_OFFSET, zpci_restore()
705 zdev->start_dma + zdev->iommu_size - 1, zpci_restore()
706 (u64) zdev->dma_table); zpci_restore()
715 struct zpci_dev *zdev = to_zpci(pdev); zpci_freeze() local
717 if (zdev->state != ZPCI_FN_STATE_ONLINE) zpci_freeze()
720 zpci_unregister_ioat(zdev, 0); zpci_freeze()
722 return clp_disable_fh(zdev); zpci_freeze()
733 static int zpci_alloc_domain(struct zpci_dev *zdev) zpci_alloc_domain() argument
736 zdev->domain = find_first_zero_bit(zpci_domain, ZPCI_NR_DEVICES); zpci_alloc_domain()
737 if (zdev->domain == ZPCI_NR_DEVICES) { zpci_alloc_domain()
741 set_bit(zdev->domain, zpci_domain); zpci_alloc_domain()
746 static void zpci_free_domain(struct zpci_dev *zdev) zpci_free_domain() argument
749 clear_bit(zdev->domain, zpci_domain); zpci_free_domain()
755 struct zpci_dev *zdev = get_zdev_by_bus(bus); pcibios_remove_bus() local
757 zpci_exit_slot(zdev); pcibios_remove_bus()
758 zpci_cleanup_bus_resources(zdev); pcibios_remove_bus()
759 zpci_free_domain(zdev); pcibios_remove_bus()
762 list_del(&zdev->entry); pcibios_remove_bus()
765 kfree(zdev); pcibios_remove_bus()
768 static int zpci_scan_bus(struct zpci_dev *zdev) zpci_scan_bus() argument
773 ret = zpci_setup_bus_resources(zdev, &resources); zpci_scan_bus()
777 zdev->bus = pci_scan_root_bus(NULL, ZPCI_BUS_NR, &pci_root_ops, zpci_scan_bus()
778 zdev, &resources); zpci_scan_bus()
779 if (!zdev->bus) { zpci_scan_bus()
783 zdev->bus->max_bus_speed = zdev->max_bus_speed; zpci_scan_bus()
784 pci_bus_add_devices(zdev->bus); zpci_scan_bus()
788 zpci_cleanup_bus_resources(zdev); zpci_scan_bus()
793 int zpci_enable_device(struct zpci_dev *zdev) zpci_enable_device() argument
797 rc = clp_enable_fh(zdev, ZPCI_NR_DMA_SPACES); zpci_enable_device()
801 rc = zpci_dma_init_device(zdev); zpci_enable_device()
805 zdev->state = ZPCI_FN_STATE_ONLINE; zpci_enable_device()
809 clp_disable_fh(zdev); zpci_enable_device()
815 int zpci_disable_device(struct zpci_dev *zdev) zpci_disable_device() argument
817 zpci_dma_exit_device(zdev); zpci_disable_device()
818 return clp_disable_fh(zdev); zpci_disable_device()
822 int zpci_create_device(struct zpci_dev *zdev) zpci_create_device() argument
826 rc = zpci_alloc_domain(zdev); zpci_create_device()
830 mutex_init(&zdev->lock); zpci_create_device()
831 if (zdev->state == ZPCI_FN_STATE_CONFIGURED) { zpci_create_device()
832 rc = zpci_enable_device(zdev); zpci_create_device()
836 rc = zpci_scan_bus(zdev); zpci_create_device()
841 list_add_tail(&zdev->entry, &zpci_list); zpci_create_device()
844 zpci_init_slot(zdev); zpci_create_device()
849 if (zdev->state == ZPCI_FN_STATE_ONLINE) zpci_create_device()
850 zpci_disable_device(zdev); zpci_create_device()
852 zpci_free_domain(zdev); zpci_create_device()
857 void zpci_stop_device(struct zpci_dev *zdev) zpci_stop_device() argument
859 zpci_dma_exit_device(zdev); zpci_stop_device()
H A Dpci_event.c48 struct zpci_dev *zdev = get_zdev_by_fid(ccdf->fid); __zpci_event_error() local
49 struct pci_dev *pdev = zdev ? zdev->pdev : NULL; __zpci_event_error()
66 struct zpci_dev *zdev = get_zdev_by_fid(ccdf->fid); __zpci_event_availability() local
67 struct pci_dev *pdev = zdev ? zdev->pdev : NULL; __zpci_event_availability()
77 if (!zdev) { __zpci_event_availability()
81 zdev = get_zdev_by_fid(ccdf->fid); __zpci_event_availability()
83 if (!zdev || zdev->state != ZPCI_FN_STATE_STANDBY) __zpci_event_availability()
85 zdev->state = ZPCI_FN_STATE_CONFIGURED; __zpci_event_availability()
86 zdev->fh = ccdf->fh; __zpci_event_availability()
87 ret = zpci_enable_device(zdev); __zpci_event_availability()
91 pci_rescan_bus(zdev->bus); __zpci_event_availability()
95 if (!zdev) __zpci_event_availability()
102 ret = zpci_disable_device(zdev); __zpci_event_availability()
106 ret = sclp_pci_deconfigure(zdev->fid); __zpci_event_availability()
107 zpci_dbg(3, "deconf fid:%x, rc:%d\n", zdev->fid, ret); __zpci_event_availability()
109 zdev->state = ZPCI_FN_STATE_STANDBY; __zpci_event_availability()
120 zdev->fh = ccdf->fh; __zpci_event_availability()
121 zpci_disable_device(zdev); __zpci_event_availability()
122 zdev->state = ZPCI_FN_STATE_STANDBY; __zpci_event_availability()
128 if (!zdev) __zpci_event_availability()
130 pci_stop_root_bus(zdev->bus); __zpci_event_availability()
131 pci_remove_root_bus(zdev->bus); __zpci_event_availability()
H A Dpci_dma.c21 static int zpci_refresh_global(struct zpci_dev *zdev) zpci_refresh_global() argument
23 return zpci_refresh_trans((u64) zdev->fh << 32, zdev->start_dma, zpci_refresh_global()
24 zdev->iommu_pages * PAGE_SIZE); zpci_refresh_global()
132 static int dma_update_trans(struct zpci_dev *zdev, unsigned long pa, dma_update_trans() argument
145 spin_lock_irqsave(&zdev->dma_table_lock, irq_flags); dma_update_trans()
146 if (!zdev->dma_table) { dma_update_trans()
152 entry = dma_walk_cpu_trans(zdev->dma_table, dma_addr); dma_update_trans()
163 * With zdev->tlb_refresh == 0, rpcit is not required to establish new dma_update_trans()
169 if (!zdev->tlb_refresh && dma_update_trans()
174 rc = zpci_refresh_trans((u64) zdev->fh << 32, start_dma_addr, dma_update_trans()
182 entry = dma_walk_cpu_trans(zdev->dma_table, dma_addr); dma_update_trans()
190 spin_unlock_irqrestore(&zdev->dma_table_lock, irq_flags); dma_update_trans()
220 static unsigned long __dma_alloc_iommu(struct zpci_dev *zdev, __dma_alloc_iommu() argument
225 boundary_size = ALIGN(dma_get_seg_boundary(&zdev->pdev->dev) + 1, __dma_alloc_iommu()
227 return iommu_area_alloc(zdev->iommu_bitmap, zdev->iommu_pages, __dma_alloc_iommu()
231 static unsigned long dma_alloc_iommu(struct zpci_dev *zdev, int size) dma_alloc_iommu() argument
236 spin_lock_irqsave(&zdev->iommu_bitmap_lock, flags); dma_alloc_iommu()
237 offset = __dma_alloc_iommu(zdev, zdev->next_bit, size); dma_alloc_iommu()
240 offset = __dma_alloc_iommu(zdev, 0, size); dma_alloc_iommu()
245 zdev->next_bit = offset + size; dma_alloc_iommu()
246 if (!zdev->tlb_refresh && !s390_iommu_strict && wrap) dma_alloc_iommu()
248 zpci_refresh_global(zdev); dma_alloc_iommu()
250 spin_unlock_irqrestore(&zdev->iommu_bitmap_lock, flags); dma_alloc_iommu()
254 static void dma_free_iommu(struct zpci_dev *zdev, unsigned long offset, int size) dma_free_iommu() argument
258 spin_lock_irqsave(&zdev->iommu_bitmap_lock, flags); dma_free_iommu()
259 if (!zdev->iommu_bitmap) dma_free_iommu()
261 bitmap_clear(zdev->iommu_bitmap, offset, size); dma_free_iommu()
266 if (!s390_iommu_strict && offset >= zdev->next_bit) dma_free_iommu()
267 zdev->next_bit = offset + size; dma_free_iommu()
269 spin_unlock_irqrestore(&zdev->iommu_bitmap_lock, flags); dma_free_iommu()
287 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); s390_dma_map_pages() local
296 iommu_page_index = dma_alloc_iommu(zdev, nr_pages); s390_dma_map_pages()
305 dma_addr = zdev->start_dma + iommu_page_index * PAGE_SIZE; s390_dma_map_pages()
306 if (dma_addr + size > zdev->end_dma) { s390_dma_map_pages()
314 ret = dma_update_trans(zdev, pa, dma_addr, size, flags); s390_dma_map_pages()
318 atomic64_add(nr_pages, &zdev->mapped_pages); s390_dma_map_pages()
322 dma_free_iommu(zdev, iommu_page_index, nr_pages); s390_dma_map_pages()
333 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); s390_dma_unmap_pages() local
339 ret = dma_update_trans(zdev, 0, dma_addr, npages * PAGE_SIZE, s390_dma_unmap_pages()
347 atomic64_add(npages, &zdev->unmapped_pages); s390_dma_unmap_pages()
348 iommu_page_index = (dma_addr - zdev->start_dma) >> PAGE_SHIFT; s390_dma_unmap_pages()
349 dma_free_iommu(zdev, iommu_page_index, npages); s390_dma_unmap_pages()
356 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); s390_dma_alloc() local
376 atomic64_add(size / PAGE_SIZE, &zdev->allocated_pages); s390_dma_alloc()
386 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); s390_dma_free() local
389 atomic64_sub(size / PAGE_SIZE, &zdev->allocated_pages); s390_dma_free()
441 int zpci_dma_init_device(struct zpci_dev *zdev) zpci_dma_init_device() argument
450 WARN_ON(zdev->s390_domain); zpci_dma_init_device()
452 spin_lock_init(&zdev->iommu_bitmap_lock); zpci_dma_init_device()
453 spin_lock_init(&zdev->dma_table_lock); zpci_dma_init_device()
455 zdev->dma_table = dma_alloc_cpu_table(); zpci_dma_init_device()
456 if (!zdev->dma_table) { zpci_dma_init_device()
461 zdev->iommu_size = (unsigned long) high_memory - PAGE_OFFSET; zpci_dma_init_device()
462 zdev->iommu_pages = zdev->iommu_size >> PAGE_SHIFT; zpci_dma_init_device()
463 zdev->iommu_bitmap = vzalloc(zdev->iommu_pages / 8); zpci_dma_init_device()
464 if (!zdev->iommu_bitmap) { zpci_dma_init_device()
469 rc = zpci_register_ioat(zdev, zpci_dma_init_device()
471 zdev->start_dma + PAGE_OFFSET, zpci_dma_init_device()
472 zdev->start_dma + zdev->iommu_size - 1, zpci_dma_init_device()
473 (u64) zdev->dma_table); zpci_dma_init_device()
479 dma_free_cpu_table(zdev->dma_table); zpci_dma_init_device()
484 void zpci_dma_exit_device(struct zpci_dev *zdev) zpci_dma_exit_device() argument
491 WARN_ON(zdev->s390_domain); zpci_dma_exit_device()
493 zpci_unregister_ioat(zdev, 0); zpci_dma_exit_device()
494 dma_cleanup_tables(zdev->dma_table); zpci_dma_exit_device()
495 zdev->dma_table = NULL; zpci_dma_exit_device()
496 vfree(zdev->iommu_bitmap); zpci_dma_exit_device()
497 zdev->iommu_bitmap = NULL; zpci_dma_exit_device()
498 zdev->next_bit = 0; zpci_dma_exit_device()
H A Dpci_sysfs.c19 struct zpci_dev *zdev = to_zpci(to_pci_dev(dev)); \
21 return sprintf(buf, fmt, zdev->member); \
41 struct zpci_dev *zdev = to_zpci(pdev); recover_store() local
49 ret = zpci_disable_device(zdev); recover_store()
53 ret = zpci_enable_device(zdev); recover_store()
57 pci_rescan_bus(zdev->bus); recover_store()
74 struct zpci_dev *zdev = to_zpci(pdev); util_string_read() local
76 return memory_read_from_buffer(buf, count, &off, zdev->util_str, util_string_read()
77 sizeof(zdev->util_str)); util_string_read()
/linux-4.4.14/drivers/pci/hotplug/
H A Ds390_pci_hpc.c40 struct zpci_dev *zdev; member in struct:slot
45 int ret = sclp_pci_configure(slot->zdev->fid); slot_configure()
47 zpci_dbg(3, "conf fid:%x, rc:%d\n", slot->zdev->fid, ret); slot_configure()
49 slot->zdev->state = ZPCI_FN_STATE_CONFIGURED; slot_configure()
56 int ret = sclp_pci_deconfigure(slot->zdev->fid); slot_deconfigure()
58 zpci_dbg(3, "deconf fid:%x, rc:%d\n", slot->zdev->fid, ret); slot_deconfigure()
60 slot->zdev->state = ZPCI_FN_STATE_STANDBY; slot_deconfigure()
70 if (slot->zdev->state != ZPCI_FN_STATE_STANDBY) enable_slot()
77 rc = zpci_enable_device(slot->zdev); enable_slot()
81 pci_scan_slot(slot->zdev->bus, ZPCI_DEVFN); enable_slot()
83 pci_bus_add_devices(slot->zdev->bus); enable_slot()
98 if (!zpci_fn_configured(slot->zdev->state)) disable_slot()
101 if (slot->zdev->pdev) disable_slot()
102 pci_stop_and_remove_bus_device_locked(slot->zdev->pdev); disable_slot()
104 rc = zpci_disable_device(slot->zdev); disable_slot()
115 switch (slot->zdev->state) { get_power_status()
149 int zpci_init_slot(struct zpci_dev *zdev) zpci_init_slot() argument
157 if (!zdev) zpci_init_slot()
170 slot->zdev = zdev; zpci_init_slot()
183 snprintf(name, SLOT_NAME_SIZE, "%08x", zdev->fid); zpci_init_slot()
184 rc = pci_hp_register(slot->hotplug_slot, zdev->bus, zpci_init_slot()
202 void zpci_exit_slot(struct zpci_dev *zdev) zpci_exit_slot() argument
209 if (slot->zdev != zdev) zpci_exit_slot()
/linux-4.4.14/drivers/iommu/
H A Ds390-iommu.c32 struct zpci_dev *zdev; member in struct:s390_domain_device
88 struct zpci_dev *zdev = to_pci_dev(dev)->sysdata; s390_iommu_attach_device() local
93 if (!zdev) s390_iommu_attach_device()
100 if (zdev->dma_table) s390_iommu_attach_device()
101 zpci_dma_exit_device(zdev); s390_iommu_attach_device()
103 zdev->dma_table = s390_domain->dma_table; s390_iommu_attach_device()
104 rc = zpci_register_ioat(zdev, 0, zdev->start_dma + PAGE_OFFSET, s390_iommu_attach_device()
105 zdev->start_dma + zdev->iommu_size - 1, s390_iommu_attach_device()
106 (u64) zdev->dma_table); s390_iommu_attach_device()
113 domain->geometry.aperture_start = zdev->start_dma; s390_iommu_attach_device()
114 domain->geometry.aperture_end = zdev->end_dma; s390_iommu_attach_device()
117 } else if (domain->geometry.aperture_start != zdev->start_dma || s390_iommu_attach_device()
118 domain->geometry.aperture_end != zdev->end_dma) { s390_iommu_attach_device()
123 domain_device->zdev = zdev; s390_iommu_attach_device()
124 zdev->s390_domain = s390_domain; s390_iommu_attach_device()
131 zpci_dma_init_device(zdev); s390_iommu_attach_device()
141 struct zpci_dev *zdev = to_pci_dev(dev)->sysdata; s390_iommu_detach_device() local
146 if (!zdev) s390_iommu_detach_device()
152 if (domain_device->zdev == zdev) { s390_iommu_detach_device()
162 zdev->s390_domain = NULL; s390_iommu_detach_device()
163 zpci_unregister_ioat(zdev, 0); s390_iommu_detach_device()
164 zpci_dma_init_device(zdev); s390_iommu_detach_device()
188 struct zpci_dev *zdev = to_pci_dev(dev)->sysdata; s390_iommu_remove_device() local
202 if (zdev && zdev->s390_domain) { s390_iommu_remove_device()
244 rc = zpci_refresh_trans((u64) domain_device->zdev->fh << 32, s390_iommu_update_trans()
/linux-4.4.14/arch/s390/include/asm/
H A Dpci.h128 static inline bool zdev_enabled(struct zpci_dev *zdev) zdev_enabled() argument
130 return (zdev->fh & (1UL << 31)) ? true : false; zdev_enabled()
170 static inline int zpci_init_slot(struct zpci_dev *zdev) zpci_init_slot() argument
174 static inline void zpci_exit_slot(struct zpci_dev *zdev) {} zpci_exit_slot() argument
/linux-4.4.14/drivers/video/fbdev/
H A Dcirrusfb.c1945 struct zorro_dev *zdev = to_zorro_dev(info->device); cirrusfb_zorro_unmap() local
1952 zorro_release_device(zdev); cirrusfb_zorro_unmap()

Completed in 560 milliseconds