Lines Matching refs:zdev
82 struct zcrypt_device *zdev = to_ap_dev(dev)->private; in zcrypt_type_show() local
83 return snprintf(buf, PAGE_SIZE, "%s\n", zdev->type_string); in zcrypt_type_show()
91 struct zcrypt_device *zdev = to_ap_dev(dev)->private; in zcrypt_online_show() local
92 return snprintf(buf, PAGE_SIZE, "%d\n", zdev->online); in zcrypt_online_show()
99 struct zcrypt_device *zdev = to_ap_dev(dev)->private; in zcrypt_online_store() local
104 zdev->online = online; in zcrypt_online_store()
105 ZCRYPT_DBF_DEV(DBF_INFO, zdev, "dev%04xo%dman", zdev->ap_dev->qid, in zcrypt_online_store()
106 zdev->online); in zcrypt_online_store()
108 ap_flush_queue(zdev->ap_dev); in zcrypt_online_store()
150 static void __zcrypt_increase_preference(struct zcrypt_device *zdev) in __zcrypt_increase_preference() argument
155 if (zdev->speed_rating == 0) in __zcrypt_increase_preference()
157 for (l = zdev->list.prev; l != &zcrypt_device_list; l = l->prev) { in __zcrypt_increase_preference()
160 (zdev->request_count + 1) * zdev->speed_rating && in __zcrypt_increase_preference()
164 if (l == zdev->list.prev) in __zcrypt_increase_preference()
167 list_move(&zdev->list, l); in __zcrypt_increase_preference()
178 static void __zcrypt_decrease_preference(struct zcrypt_device *zdev) in __zcrypt_decrease_preference() argument
183 if (zdev->speed_rating == 0) in __zcrypt_decrease_preference()
185 for (l = zdev->list.next; l != &zcrypt_device_list; l = l->next) { in __zcrypt_decrease_preference()
188 (zdev->request_count + 1) * zdev->speed_rating || in __zcrypt_decrease_preference()
192 if (l == zdev->list.next) in __zcrypt_decrease_preference()
195 list_move_tail(&zdev->list, l); in __zcrypt_decrease_preference()
200 struct zcrypt_device *zdev = in zcrypt_device_release() local
202 zcrypt_device_free(zdev); in zcrypt_device_release()
205 void zcrypt_device_get(struct zcrypt_device *zdev) in zcrypt_device_get() argument
207 kref_get(&zdev->refcount); in zcrypt_device_get()
211 int zcrypt_device_put(struct zcrypt_device *zdev) in zcrypt_device_put() argument
213 return kref_put(&zdev->refcount, zcrypt_device_release); in zcrypt_device_put()
219 struct zcrypt_device *zdev; in zcrypt_device_alloc() local
221 zdev = kzalloc(sizeof(struct zcrypt_device), GFP_KERNEL); in zcrypt_device_alloc()
222 if (!zdev) in zcrypt_device_alloc()
224 zdev->reply.message = kmalloc(max_response_size, GFP_KERNEL); in zcrypt_device_alloc()
225 if (!zdev->reply.message) in zcrypt_device_alloc()
227 zdev->reply.length = max_response_size; in zcrypt_device_alloc()
228 spin_lock_init(&zdev->lock); in zcrypt_device_alloc()
229 INIT_LIST_HEAD(&zdev->list); in zcrypt_device_alloc()
230 zdev->dbf_area = zcrypt_dbf_devices; in zcrypt_device_alloc()
231 return zdev; in zcrypt_device_alloc()
234 kfree(zdev); in zcrypt_device_alloc()
239 void zcrypt_device_free(struct zcrypt_device *zdev) in zcrypt_device_free() argument
241 kfree(zdev->reply.message); in zcrypt_device_free()
242 kfree(zdev); in zcrypt_device_free()
252 int zcrypt_device_register(struct zcrypt_device *zdev) in zcrypt_device_register() argument
256 if (!zdev->ops) in zcrypt_device_register()
258 rc = sysfs_create_group(&zdev->ap_dev->device.kobj, in zcrypt_device_register()
262 get_device(&zdev->ap_dev->device); in zcrypt_device_register()
263 kref_init(&zdev->refcount); in zcrypt_device_register()
265 zdev->online = 1; /* New devices are online by default. */ in zcrypt_device_register()
266 ZCRYPT_DBF_DEV(DBF_INFO, zdev, "dev%04xo%dreg", zdev->ap_dev->qid, in zcrypt_device_register()
267 zdev->online); in zcrypt_device_register()
268 list_add_tail(&zdev->list, &zcrypt_device_list); in zcrypt_device_register()
269 __zcrypt_increase_preference(zdev); in zcrypt_device_register()
272 if (zdev->ops->rng) { in zcrypt_device_register()
282 list_del_init(&zdev->list); in zcrypt_device_register()
284 sysfs_remove_group(&zdev->ap_dev->device.kobj, in zcrypt_device_register()
286 put_device(&zdev->ap_dev->device); in zcrypt_device_register()
287 zcrypt_device_put(zdev); in zcrypt_device_register()
299 void zcrypt_device_unregister(struct zcrypt_device *zdev) in zcrypt_device_unregister() argument
301 if (zdev->ops->rng) in zcrypt_device_unregister()
305 list_del_init(&zdev->list); in zcrypt_device_unregister()
307 sysfs_remove_group(&zdev->ap_dev->device.kobj, in zcrypt_device_unregister()
309 put_device(&zdev->ap_dev->device); in zcrypt_device_unregister()
310 zcrypt_device_put(zdev); in zcrypt_device_unregister()
423 struct zcrypt_device *zdev; in zcrypt_rsa_modexpo() local
436 list_for_each_entry(zdev, &zcrypt_device_list, list) { in zcrypt_rsa_modexpo()
437 if (!zdev->online || in zcrypt_rsa_modexpo()
438 !zdev->ops->rsa_modexpo || in zcrypt_rsa_modexpo()
439 zdev->min_mod_size > mex->inputdatalength || in zcrypt_rsa_modexpo()
440 zdev->max_mod_size < mex->inputdatalength) in zcrypt_rsa_modexpo()
442 zcrypt_device_get(zdev); in zcrypt_rsa_modexpo()
443 get_device(&zdev->ap_dev->device); in zcrypt_rsa_modexpo()
444 zdev->request_count++; in zcrypt_rsa_modexpo()
445 __zcrypt_decrease_preference(zdev); in zcrypt_rsa_modexpo()
446 if (try_module_get(zdev->ap_dev->drv->driver.owner)) { in zcrypt_rsa_modexpo()
448 rc = zdev->ops->rsa_modexpo(zdev, mex); in zcrypt_rsa_modexpo()
450 module_put(zdev->ap_dev->drv->driver.owner); in zcrypt_rsa_modexpo()
454 zdev->request_count--; in zcrypt_rsa_modexpo()
455 __zcrypt_increase_preference(zdev); in zcrypt_rsa_modexpo()
456 put_device(&zdev->ap_dev->device); in zcrypt_rsa_modexpo()
457 zcrypt_device_put(zdev); in zcrypt_rsa_modexpo()
467 struct zcrypt_device *zdev; in zcrypt_rsa_crt() local
484 list_for_each_entry(zdev, &zcrypt_device_list, list) { in zcrypt_rsa_crt()
485 if (!zdev->online || in zcrypt_rsa_crt()
486 !zdev->ops->rsa_modexpo_crt || in zcrypt_rsa_crt()
487 zdev->min_mod_size > crt->inputdatalength || in zcrypt_rsa_crt()
488 zdev->max_mod_size < crt->inputdatalength) in zcrypt_rsa_crt()
490 if (zdev->short_crt && crt->inputdatalength > 240) { in zcrypt_rsa_crt()
527 zcrypt_device_get(zdev); in zcrypt_rsa_crt()
528 get_device(&zdev->ap_dev->device); in zcrypt_rsa_crt()
529 zdev->request_count++; in zcrypt_rsa_crt()
530 __zcrypt_decrease_preference(zdev); in zcrypt_rsa_crt()
531 if (try_module_get(zdev->ap_dev->drv->driver.owner)) { in zcrypt_rsa_crt()
533 rc = zdev->ops->rsa_modexpo_crt(zdev, crt); in zcrypt_rsa_crt()
535 module_put(zdev->ap_dev->drv->driver.owner); in zcrypt_rsa_crt()
539 zdev->request_count--; in zcrypt_rsa_crt()
540 __zcrypt_increase_preference(zdev); in zcrypt_rsa_crt()
541 put_device(&zdev->ap_dev->device); in zcrypt_rsa_crt()
542 zcrypt_device_put(zdev); in zcrypt_rsa_crt()
552 struct zcrypt_device *zdev; in zcrypt_send_cprb() local
556 list_for_each_entry(zdev, &zcrypt_device_list, list) { in zcrypt_send_cprb()
557 if (!zdev->online || !zdev->ops->send_cprb || in zcrypt_send_cprb()
558 (zdev->ops->variant == MSGTYPE06_VARIANT_EP11) || in zcrypt_send_cprb()
560 AP_QID_DEVICE(zdev->ap_dev->qid) != xcRB->user_defined)) in zcrypt_send_cprb()
562 zcrypt_device_get(zdev); in zcrypt_send_cprb()
563 get_device(&zdev->ap_dev->device); in zcrypt_send_cprb()
564 zdev->request_count++; in zcrypt_send_cprb()
565 __zcrypt_decrease_preference(zdev); in zcrypt_send_cprb()
566 if (try_module_get(zdev->ap_dev->drv->driver.owner)) { in zcrypt_send_cprb()
568 rc = zdev->ops->send_cprb(zdev, xcRB); in zcrypt_send_cprb()
570 module_put(zdev->ap_dev->drv->driver.owner); in zcrypt_send_cprb()
574 zdev->request_count--; in zcrypt_send_cprb()
575 __zcrypt_increase_preference(zdev); in zcrypt_send_cprb()
576 put_device(&zdev->ap_dev->device); in zcrypt_send_cprb()
577 zcrypt_device_put(zdev); in zcrypt_send_cprb()
606 struct zcrypt_device *zdev; in zcrypt_send_ep11_cprb() local
635 list_for_each_entry(zdev, &zcrypt_device_list, list) { in zcrypt_send_ep11_cprb()
637 if (!zdev->online || in zcrypt_send_ep11_cprb()
638 zdev->ops->variant != MSGTYPE06_VARIANT_EP11) in zcrypt_send_ep11_cprb()
642 if (!is_desired_ep11dev(zdev->ap_dev->qid, ep11_dev_list) && in zcrypt_send_ep11_cprb()
646 zcrypt_device_get(zdev); in zcrypt_send_ep11_cprb()
647 get_device(&zdev->ap_dev->device); in zcrypt_send_ep11_cprb()
648 zdev->request_count++; in zcrypt_send_ep11_cprb()
649 __zcrypt_decrease_preference(zdev); in zcrypt_send_ep11_cprb()
650 if (try_module_get(zdev->ap_dev->drv->driver.owner)) { in zcrypt_send_ep11_cprb()
652 rc = zdev->ops->send_ep11_cprb(zdev, xcrb); in zcrypt_send_ep11_cprb()
654 module_put(zdev->ap_dev->drv->driver.owner); in zcrypt_send_ep11_cprb()
658 zdev->request_count--; in zcrypt_send_ep11_cprb()
659 __zcrypt_increase_preference(zdev); in zcrypt_send_ep11_cprb()
660 put_device(&zdev->ap_dev->device); in zcrypt_send_ep11_cprb()
661 zcrypt_device_put(zdev); in zcrypt_send_ep11_cprb()
671 struct zcrypt_device *zdev; in zcrypt_rng() local
675 list_for_each_entry(zdev, &zcrypt_device_list, list) { in zcrypt_rng()
676 if (!zdev->online || !zdev->ops->rng) in zcrypt_rng()
678 zcrypt_device_get(zdev); in zcrypt_rng()
679 get_device(&zdev->ap_dev->device); in zcrypt_rng()
680 zdev->request_count++; in zcrypt_rng()
681 __zcrypt_decrease_preference(zdev); in zcrypt_rng()
682 if (try_module_get(zdev->ap_dev->drv->driver.owner)) { in zcrypt_rng()
684 rc = zdev->ops->rng(zdev, buffer); in zcrypt_rng()
686 module_put(zdev->ap_dev->drv->driver.owner); in zcrypt_rng()
689 zdev->request_count--; in zcrypt_rng()
690 __zcrypt_increase_preference(zdev); in zcrypt_rng()
691 put_device(&zdev->ap_dev->device); in zcrypt_rng()
692 zcrypt_device_put(zdev); in zcrypt_rng()
702 struct zcrypt_device *zdev; in zcrypt_status_mask() local
706 list_for_each_entry(zdev, &zcrypt_device_list, list) in zcrypt_status_mask()
707 status[AP_QID_DEVICE(zdev->ap_dev->qid)] = in zcrypt_status_mask()
708 zdev->online ? zdev->user_space_type : 0x0d; in zcrypt_status_mask()
714 struct zcrypt_device *zdev; in zcrypt_qdepth_mask() local
718 list_for_each_entry(zdev, &zcrypt_device_list, list) { in zcrypt_qdepth_mask()
719 spin_lock(&zdev->ap_dev->lock); in zcrypt_qdepth_mask()
720 qdepth[AP_QID_DEVICE(zdev->ap_dev->qid)] = in zcrypt_qdepth_mask()
721 zdev->ap_dev->pendingq_count + in zcrypt_qdepth_mask()
722 zdev->ap_dev->requestq_count; in zcrypt_qdepth_mask()
723 spin_unlock(&zdev->ap_dev->lock); in zcrypt_qdepth_mask()
730 struct zcrypt_device *zdev; in zcrypt_perdev_reqcnt() local
734 list_for_each_entry(zdev, &zcrypt_device_list, list) { in zcrypt_perdev_reqcnt()
735 spin_lock(&zdev->ap_dev->lock); in zcrypt_perdev_reqcnt()
736 reqcnt[AP_QID_DEVICE(zdev->ap_dev->qid)] = in zcrypt_perdev_reqcnt()
737 zdev->ap_dev->total_request_count; in zcrypt_perdev_reqcnt()
738 spin_unlock(&zdev->ap_dev->lock); in zcrypt_perdev_reqcnt()
745 struct zcrypt_device *zdev; in zcrypt_pendingq_count() local
749 list_for_each_entry(zdev, &zcrypt_device_list, list) { in zcrypt_pendingq_count()
750 spin_lock(&zdev->ap_dev->lock); in zcrypt_pendingq_count()
751 pendingq_count += zdev->ap_dev->pendingq_count; in zcrypt_pendingq_count()
752 spin_unlock(&zdev->ap_dev->lock); in zcrypt_pendingq_count()
760 struct zcrypt_device *zdev; in zcrypt_requestq_count() local
764 list_for_each_entry(zdev, &zcrypt_device_list, list) { in zcrypt_requestq_count()
765 spin_lock(&zdev->ap_dev->lock); in zcrypt_requestq_count()
766 requestq_count += zdev->ap_dev->requestq_count; in zcrypt_requestq_count()
767 spin_unlock(&zdev->ap_dev->lock); in zcrypt_requestq_count()
775 struct zcrypt_device *zdev; in zcrypt_count_type() local
779 list_for_each_entry(zdev, &zcrypt_device_list, list) in zcrypt_count_type()
780 if (zdev->user_space_type == type) in zcrypt_count_type()
1259 struct zcrypt_device *zdev; in zcrypt_disable_card() local
1262 list_for_each_entry(zdev, &zcrypt_device_list, list) in zcrypt_disable_card()
1263 if (AP_QID_DEVICE(zdev->ap_dev->qid) == index) { in zcrypt_disable_card()
1264 zdev->online = 0; in zcrypt_disable_card()
1265 ap_flush_queue(zdev->ap_dev); in zcrypt_disable_card()
1273 struct zcrypt_device *zdev; in zcrypt_enable_card() local
1276 list_for_each_entry(zdev, &zcrypt_device_list, list) in zcrypt_enable_card()
1277 if (AP_QID_DEVICE(zdev->ap_dev->qid) == index) { in zcrypt_enable_card()
1278 zdev->online = 1; in zcrypt_enable_card()