Lines Matching refs:cc
54 struct crypt_config *cc; member
77 int (*ctr)(struct crypt_config *cc, struct dm_target *ti,
79 void (*dtr)(struct crypt_config *cc);
80 int (*init)(struct crypt_config *cc);
81 int (*wipe)(struct crypt_config *cc);
82 int (*generator)(struct crypt_config *cc, u8 *iv,
84 int (*post)(struct crypt_config *cc, u8 *iv,
187 static u8 *iv_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq);
192 static struct crypto_ablkcipher *any_tfm(struct crypt_config *cc) in any_tfm() argument
194 return cc->tfms[0]; in any_tfm()
244 static int crypt_iv_plain_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_plain_gen() argument
247 memset(iv, 0, cc->iv_size); in crypt_iv_plain_gen()
253 static int crypt_iv_plain64_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_plain64_gen() argument
256 memset(iv, 0, cc->iv_size); in crypt_iv_plain64_gen()
263 static int crypt_iv_essiv_init(struct crypt_config *cc) in crypt_iv_essiv_init() argument
265 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; in crypt_iv_essiv_init()
271 sg_init_one(&sg, cc->key, cc->key_size); in crypt_iv_essiv_init()
275 err = crypto_hash_digest(&desc, &sg, cc->key_size, essiv->salt); in crypt_iv_essiv_init()
279 essiv_tfm = cc->iv_private; in crypt_iv_essiv_init()
290 static int crypt_iv_essiv_wipe(struct crypt_config *cc) in crypt_iv_essiv_wipe() argument
292 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; in crypt_iv_essiv_wipe()
299 essiv_tfm = cc->iv_private; in crypt_iv_essiv_wipe()
308 static struct crypto_cipher *setup_essiv_cpu(struct crypt_config *cc, in setup_essiv_cpu() argument
316 essiv_tfm = crypto_alloc_cipher(cc->cipher, 0, CRYPTO_ALG_ASYNC); in setup_essiv_cpu()
323 crypto_ablkcipher_ivsize(any_tfm(cc))) { in setup_essiv_cpu()
340 static void crypt_iv_essiv_dtr(struct crypt_config *cc) in crypt_iv_essiv_dtr() argument
343 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; in crypt_iv_essiv_dtr()
351 essiv_tfm = cc->iv_private; in crypt_iv_essiv_dtr()
356 cc->iv_private = NULL; in crypt_iv_essiv_dtr()
359 static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_essiv_ctr() argument
387 cc->iv_gen_private.essiv.salt = salt; in crypt_iv_essiv_ctr()
388 cc->iv_gen_private.essiv.hash_tfm = hash_tfm; in crypt_iv_essiv_ctr()
390 essiv_tfm = setup_essiv_cpu(cc, ti, salt, in crypt_iv_essiv_ctr()
393 crypt_iv_essiv_dtr(cc); in crypt_iv_essiv_ctr()
396 cc->iv_private = essiv_tfm; in crypt_iv_essiv_ctr()
407 static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_essiv_gen() argument
410 struct crypto_cipher *essiv_tfm = cc->iv_private; in crypt_iv_essiv_gen()
412 memset(iv, 0, cc->iv_size); in crypt_iv_essiv_gen()
419 static int crypt_iv_benbi_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_benbi_ctr() argument
422 unsigned bs = crypto_ablkcipher_blocksize(any_tfm(cc)); in crypt_iv_benbi_ctr()
438 cc->iv_gen_private.benbi.shift = 9 - log; in crypt_iv_benbi_ctr()
443 static void crypt_iv_benbi_dtr(struct crypt_config *cc) in crypt_iv_benbi_dtr() argument
447 static int crypt_iv_benbi_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_benbi_gen() argument
452 memset(iv, 0, cc->iv_size - sizeof(u64)); /* rest is cleared below */ in crypt_iv_benbi_gen()
454 val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); in crypt_iv_benbi_gen()
455 put_unaligned(val, (__be64 *)(iv + cc->iv_size - sizeof(u64))); in crypt_iv_benbi_gen()
460 static int crypt_iv_null_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_null_gen() argument
463 memset(iv, 0, cc->iv_size); in crypt_iv_null_gen()
468 static void crypt_iv_lmk_dtr(struct crypt_config *cc) in crypt_iv_lmk_dtr() argument
470 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_dtr()
480 static int crypt_iv_lmk_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_lmk_ctr() argument
483 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_ctr()
492 if (cc->key_parts == cc->tfms_count) { in crypt_iv_lmk_ctr()
499 crypt_iv_lmk_dtr(cc); in crypt_iv_lmk_ctr()
507 static int crypt_iv_lmk_init(struct crypt_config *cc) in crypt_iv_lmk_init() argument
509 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_init()
510 int subkey_size = cc->key_size / cc->key_parts; in crypt_iv_lmk_init()
514 memcpy(lmk->seed, cc->key + (cc->tfms_count * subkey_size), in crypt_iv_lmk_init()
520 static int crypt_iv_lmk_wipe(struct crypt_config *cc) in crypt_iv_lmk_wipe() argument
522 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_wipe()
530 static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_one() argument
534 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_one()
574 memcpy(iv, &md5state.hash, cc->iv_size); in crypt_iv_lmk_one()
579 static int crypt_iv_lmk_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_gen() argument
587 r = crypt_iv_lmk_one(cc, iv, dmreq, src + dmreq->sg_in.offset); in crypt_iv_lmk_gen()
590 memset(iv, 0, cc->iv_size); in crypt_iv_lmk_gen()
595 static int crypt_iv_lmk_post(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_post() argument
605 r = crypt_iv_lmk_one(cc, iv, dmreq, dst + dmreq->sg_out.offset); in crypt_iv_lmk_post()
609 crypto_xor(dst + dmreq->sg_out.offset, iv, cc->iv_size); in crypt_iv_lmk_post()
615 static void crypt_iv_tcw_dtr(struct crypt_config *cc) in crypt_iv_tcw_dtr() argument
617 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_dtr()
629 static int crypt_iv_tcw_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_tcw_ctr() argument
632 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_ctr()
634 if (cc->key_size <= (cc->iv_size + TCW_WHITENING_SIZE)) { in crypt_iv_tcw_ctr()
645 tcw->iv_seed = kzalloc(cc->iv_size, GFP_KERNEL); in crypt_iv_tcw_ctr()
648 crypt_iv_tcw_dtr(cc); in crypt_iv_tcw_ctr()
656 static int crypt_iv_tcw_init(struct crypt_config *cc) in crypt_iv_tcw_init() argument
658 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_init()
659 int key_offset = cc->key_size - cc->iv_size - TCW_WHITENING_SIZE; in crypt_iv_tcw_init()
661 memcpy(tcw->iv_seed, &cc->key[key_offset], cc->iv_size); in crypt_iv_tcw_init()
662 memcpy(tcw->whitening, &cc->key[key_offset + cc->iv_size], in crypt_iv_tcw_init()
668 static int crypt_iv_tcw_wipe(struct crypt_config *cc) in crypt_iv_tcw_wipe() argument
670 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_wipe()
672 memset(tcw->iv_seed, 0, cc->iv_size); in crypt_iv_tcw_wipe()
678 static int crypt_iv_tcw_whitening(struct crypt_config *cc, in crypt_iv_tcw_whitening() argument
682 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_whitening()
718 static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_tcw_gen() argument
721 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_gen()
729 r = crypt_iv_tcw_whitening(cc, dmreq, src + dmreq->sg_in.offset); in crypt_iv_tcw_gen()
734 memcpy(iv, tcw->iv_seed, cc->iv_size); in crypt_iv_tcw_gen()
736 if (cc->iv_size > 8) in crypt_iv_tcw_gen()
737 crypto_xor(&iv[8], (u8 *)§or, cc->iv_size - 8); in crypt_iv_tcw_gen()
742 static int crypt_iv_tcw_post(struct crypt_config *cc, u8 *iv, in crypt_iv_tcw_post() argument
753 r = crypt_iv_tcw_whitening(cc, dmreq, dst + dmreq->sg_out.offset); in crypt_iv_tcw_post()
803 static void crypt_convert_init(struct crypt_config *cc, in crypt_convert_init() argument
814 ctx->cc_sector = sector + cc->iv_offset; in crypt_convert_init()
818 static struct dm_crypt_request *dmreq_of_req(struct crypt_config *cc, in dmreq_of_req() argument
821 return (struct dm_crypt_request *)((char *)req + cc->dmreq_start); in dmreq_of_req()
824 static struct ablkcipher_request *req_of_dmreq(struct crypt_config *cc, in req_of_dmreq() argument
827 return (struct ablkcipher_request *)((char *)dmreq - cc->dmreq_start); in req_of_dmreq()
830 static u8 *iv_of_dmreq(struct crypt_config *cc, in iv_of_dmreq() argument
834 crypto_ablkcipher_alignmask(any_tfm(cc)) + 1); in iv_of_dmreq()
837 static int crypt_convert_block(struct crypt_config *cc, in crypt_convert_block() argument
847 dmreq = dmreq_of_req(cc, req); in crypt_convert_block()
848 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block()
863 if (cc->iv_gen_ops) { in crypt_convert_block()
864 r = cc->iv_gen_ops->generator(cc, iv, dmreq); in crypt_convert_block()
877 if (!r && cc->iv_gen_ops && cc->iv_gen_ops->post) in crypt_convert_block()
878 r = cc->iv_gen_ops->post(cc, iv, dmreq); in crypt_convert_block()
886 static void crypt_alloc_req(struct crypt_config *cc, in crypt_alloc_req() argument
889 unsigned key_index = ctx->cc_sector & (cc->tfms_count - 1); in crypt_alloc_req()
892 ctx->req = mempool_alloc(cc->req_pool, GFP_NOIO); in crypt_alloc_req()
894 ablkcipher_request_set_tfm(ctx->req, cc->tfms[key_index]); in crypt_alloc_req()
902 kcryptd_async_done, dmreq_of_req(cc, ctx->req)); in crypt_alloc_req()
905 static void crypt_free_req(struct crypt_config *cc, in crypt_free_req() argument
908 struct dm_crypt_io *io = dm_per_bio_data(base_bio, cc->per_bio_data_size); in crypt_free_req()
911 mempool_free(req, cc->req_pool); in crypt_free_req()
917 static int crypt_convert(struct crypt_config *cc, in crypt_convert() argument
926 crypt_alloc_req(cc, ctx); in crypt_convert()
930 r = crypt_convert_block(cc, ctx, ctx->req); in crypt_convert()
968 static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone);
989 struct crypt_config *cc = io->cc; in crypt_alloc_buffer() local
999 mutex_lock(&cc->bio_alloc_lock); in crypt_alloc_buffer()
1001 clone = bio_alloc_bioset(GFP_NOIO, nr_iovecs, cc->bs); in crypt_alloc_buffer()
1010 page = mempool_alloc(cc->page_pool, gfp_mask); in crypt_alloc_buffer()
1012 crypt_free_buffer_pages(cc, clone); in crypt_alloc_buffer()
1032 mutex_unlock(&cc->bio_alloc_lock); in crypt_alloc_buffer()
1037 static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone) in crypt_free_buffer_pages() argument
1044 mempool_free(bv->bv_page, cc->page_pool); in crypt_free_buffer_pages()
1049 static void crypt_io_init(struct dm_crypt_io *io, struct crypt_config *cc, in crypt_io_init() argument
1052 io->cc = cc; in crypt_io_init()
1071 struct crypt_config *cc = io->cc; in crypt_dec_pending() local
1079 crypt_free_req(cc, io->ctx.req, base_bio); in crypt_dec_pending()
1105 struct crypt_config *cc = io->cc; in crypt_endio() local
1113 crypt_free_buffer_pages(cc, clone); in crypt_endio()
1131 struct crypt_config *cc = io->cc; in clone_init() local
1135 clone->bi_bdev = cc->dev->bdev; in clone_init()
1141 struct crypt_config *cc = io->cc; in kcryptd_io_read() local
1150 clone = bio_clone_fast(io->base_bio, gfp, cc->bs); in kcryptd_io_read()
1157 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_io_read()
1175 struct crypt_config *cc = io->cc; in kcryptd_queue_read() local
1178 queue_work(cc->io_queue, &io->work); in kcryptd_queue_read()
1192 struct crypt_config *cc = data; in dmcrypt_write() local
1201 spin_lock_irq(&cc->write_thread_wait.lock); in dmcrypt_write()
1204 if (!RB_EMPTY_ROOT(&cc->write_tree)) in dmcrypt_write()
1207 if (unlikely(test_bit(DM_CRYPT_EXIT_THREAD, &cc->flags))) { in dmcrypt_write()
1208 spin_unlock_irq(&cc->write_thread_wait.lock); in dmcrypt_write()
1213 __add_wait_queue(&cc->write_thread_wait, &wait); in dmcrypt_write()
1215 spin_unlock_irq(&cc->write_thread_wait.lock); in dmcrypt_write()
1219 spin_lock_irq(&cc->write_thread_wait.lock); in dmcrypt_write()
1220 __remove_wait_queue(&cc->write_thread_wait, &wait); in dmcrypt_write()
1224 write_tree = cc->write_tree; in dmcrypt_write()
1225 cc->write_tree = RB_ROOT; in dmcrypt_write()
1226 spin_unlock_irq(&cc->write_thread_wait.lock); in dmcrypt_write()
1248 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_io_submit() local
1254 crypt_free_buffer_pages(cc, clone); in kcryptd_crypt_write_io_submit()
1263 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_crypt_write_io_submit()
1265 if (likely(!async) && test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags)) { in kcryptd_crypt_write_io_submit()
1270 spin_lock_irqsave(&cc->write_thread_wait.lock, flags); in kcryptd_crypt_write_io_submit()
1271 rbp = &cc->write_tree.rb_node; in kcryptd_crypt_write_io_submit()
1282 rb_insert_color(&io->rb_node, &cc->write_tree); in kcryptd_crypt_write_io_submit()
1284 wake_up_locked(&cc->write_thread_wait); in kcryptd_crypt_write_io_submit()
1285 spin_unlock_irqrestore(&cc->write_thread_wait.lock, flags); in kcryptd_crypt_write_io_submit()
1290 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_convert() local
1300 crypt_convert_init(cc, &io->ctx, NULL, io->base_bio, sector); in kcryptd_crypt_write_convert()
1314 r = crypt_convert(cc, &io->ctx); in kcryptd_crypt_write_convert()
1336 struct crypt_config *cc = io->cc; in kcryptd_crypt_read_convert() local
1341 crypt_convert_init(cc, &io->ctx, io->base_bio, io->base_bio, in kcryptd_crypt_read_convert()
1344 r = crypt_convert(cc, &io->ctx); in kcryptd_crypt_read_convert()
1360 struct crypt_config *cc = io->cc; in kcryptd_async_done() local
1372 if (!error && cc->iv_gen_ops && cc->iv_gen_ops->post) in kcryptd_async_done()
1373 error = cc->iv_gen_ops->post(cc, iv_of_dmreq(cc, dmreq), dmreq); in kcryptd_async_done()
1378 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()
1401 struct crypt_config *cc = io->cc; in kcryptd_queue_crypt() local
1404 queue_work(cc->crypt_queue, &io->work); in kcryptd_queue_crypt()
1431 static void crypt_free_tfms(struct crypt_config *cc) in crypt_free_tfms() argument
1435 if (!cc->tfms) in crypt_free_tfms()
1438 for (i = 0; i < cc->tfms_count; i++) in crypt_free_tfms()
1439 if (cc->tfms[i] && !IS_ERR(cc->tfms[i])) { in crypt_free_tfms()
1440 crypto_free_ablkcipher(cc->tfms[i]); in crypt_free_tfms()
1441 cc->tfms[i] = NULL; in crypt_free_tfms()
1444 kfree(cc->tfms); in crypt_free_tfms()
1445 cc->tfms = NULL; in crypt_free_tfms()
1448 static int crypt_alloc_tfms(struct crypt_config *cc, char *ciphermode) in crypt_alloc_tfms() argument
1453 cc->tfms = kmalloc(cc->tfms_count * sizeof(struct crypto_ablkcipher *), in crypt_alloc_tfms()
1455 if (!cc->tfms) in crypt_alloc_tfms()
1458 for (i = 0; i < cc->tfms_count; i++) { in crypt_alloc_tfms()
1459 cc->tfms[i] = crypto_alloc_ablkcipher(ciphermode, 0, 0); in crypt_alloc_tfms()
1460 if (IS_ERR(cc->tfms[i])) { in crypt_alloc_tfms()
1461 err = PTR_ERR(cc->tfms[i]); in crypt_alloc_tfms()
1462 crypt_free_tfms(cc); in crypt_alloc_tfms()
1470 static int crypt_setkey_allcpus(struct crypt_config *cc) in crypt_setkey_allcpus() argument
1476 subkey_size = (cc->key_size - cc->key_extra_size) >> ilog2(cc->tfms_count); in crypt_setkey_allcpus()
1478 for (i = 0; i < cc->tfms_count; i++) { in crypt_setkey_allcpus()
1479 r = crypto_ablkcipher_setkey(cc->tfms[i], in crypt_setkey_allcpus()
1480 cc->key + (i * subkey_size), in crypt_setkey_allcpus()
1489 static int crypt_set_key(struct crypt_config *cc, char *key) in crypt_set_key() argument
1495 if (cc->key_size != (key_string_len >> 1)) in crypt_set_key()
1499 if (!cc->key_size && strcmp(key, "-")) in crypt_set_key()
1502 if (cc->key_size && crypt_decode_key(cc->key, key, cc->key_size) < 0) in crypt_set_key()
1505 set_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_set_key()
1507 r = crypt_setkey_allcpus(cc); in crypt_set_key()
1516 static int crypt_wipe_key(struct crypt_config *cc) in crypt_wipe_key() argument
1518 clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_wipe_key()
1519 memset(&cc->key, 0, cc->key_size * sizeof(u8)); in crypt_wipe_key()
1521 return crypt_setkey_allcpus(cc); in crypt_wipe_key()
1526 struct crypt_config *cc = ti->private; in crypt_dtr() local
1530 if (!cc) in crypt_dtr()
1533 if (cc->write_thread) { in crypt_dtr()
1534 spin_lock_irq(&cc->write_thread_wait.lock); in crypt_dtr()
1535 set_bit(DM_CRYPT_EXIT_THREAD, &cc->flags); in crypt_dtr()
1536 wake_up_locked(&cc->write_thread_wait); in crypt_dtr()
1537 spin_unlock_irq(&cc->write_thread_wait.lock); in crypt_dtr()
1538 kthread_stop(cc->write_thread); in crypt_dtr()
1541 if (cc->io_queue) in crypt_dtr()
1542 destroy_workqueue(cc->io_queue); in crypt_dtr()
1543 if (cc->crypt_queue) in crypt_dtr()
1544 destroy_workqueue(cc->crypt_queue); in crypt_dtr()
1546 crypt_free_tfms(cc); in crypt_dtr()
1548 if (cc->bs) in crypt_dtr()
1549 bioset_free(cc->bs); in crypt_dtr()
1551 mempool_destroy(cc->page_pool); in crypt_dtr()
1552 mempool_destroy(cc->req_pool); in crypt_dtr()
1554 if (cc->iv_gen_ops && cc->iv_gen_ops->dtr) in crypt_dtr()
1555 cc->iv_gen_ops->dtr(cc); in crypt_dtr()
1557 if (cc->dev) in crypt_dtr()
1558 dm_put_device(ti, cc->dev); in crypt_dtr()
1560 kzfree(cc->cipher); in crypt_dtr()
1561 kzfree(cc->cipher_string); in crypt_dtr()
1564 kzfree(cc); in crypt_dtr()
1570 struct crypt_config *cc = ti->private; in crypt_ctr_cipher() local
1582 cc->cipher_string = kstrdup(cipher_in, GFP_KERNEL); in crypt_ctr_cipher()
1583 if (!cc->cipher_string) in crypt_ctr_cipher()
1595 cc->tfms_count = 1; in crypt_ctr_cipher()
1596 else if (sscanf(keycount, "%u%c", &cc->tfms_count, &dummy) != 1 || in crypt_ctr_cipher()
1597 !is_power_of_2(cc->tfms_count)) { in crypt_ctr_cipher()
1601 cc->key_parts = cc->tfms_count; in crypt_ctr_cipher()
1602 cc->key_extra_size = 0; in crypt_ctr_cipher()
1604 cc->cipher = kstrdup(cipher, GFP_KERNEL); in crypt_ctr_cipher()
1605 if (!cc->cipher) in crypt_ctr_cipher()
1641 ret = crypt_alloc_tfms(cc, cipher_api); in crypt_ctr_cipher()
1648 cc->iv_size = crypto_ablkcipher_ivsize(any_tfm(cc)); in crypt_ctr_cipher()
1649 if (cc->iv_size) in crypt_ctr_cipher()
1651 cc->iv_size = max(cc->iv_size, in crypt_ctr_cipher()
1660 cc->iv_gen_ops = NULL; in crypt_ctr_cipher()
1662 cc->iv_gen_ops = &crypt_iv_plain_ops; in crypt_ctr_cipher()
1664 cc->iv_gen_ops = &crypt_iv_plain64_ops; in crypt_ctr_cipher()
1666 cc->iv_gen_ops = &crypt_iv_essiv_ops; in crypt_ctr_cipher()
1668 cc->iv_gen_ops = &crypt_iv_benbi_ops; in crypt_ctr_cipher()
1670 cc->iv_gen_ops = &crypt_iv_null_ops; in crypt_ctr_cipher()
1672 cc->iv_gen_ops = &crypt_iv_lmk_ops; in crypt_ctr_cipher()
1679 if (cc->key_size % cc->key_parts) { in crypt_ctr_cipher()
1680 cc->key_parts++; in crypt_ctr_cipher()
1681 cc->key_extra_size = cc->key_size / cc->key_parts; in crypt_ctr_cipher()
1684 cc->iv_gen_ops = &crypt_iv_tcw_ops; in crypt_ctr_cipher()
1685 cc->key_parts += 2; /* IV + whitening */ in crypt_ctr_cipher()
1686 cc->key_extra_size = cc->iv_size + TCW_WHITENING_SIZE; in crypt_ctr_cipher()
1694 ret = crypt_set_key(cc, key); in crypt_ctr_cipher()
1701 if (cc->iv_gen_ops && cc->iv_gen_ops->ctr) { in crypt_ctr_cipher()
1702 ret = cc->iv_gen_ops->ctr(cc, ti, ivopts); in crypt_ctr_cipher()
1710 if (cc->iv_gen_ops && cc->iv_gen_ops->init) { in crypt_ctr_cipher()
1711 ret = cc->iv_gen_ops->init(cc); in crypt_ctr_cipher()
1734 struct crypt_config *cc; in crypt_ctr() local
1754 cc = kzalloc(sizeof(*cc) + key_size * sizeof(u8), GFP_KERNEL); in crypt_ctr()
1755 if (!cc) { in crypt_ctr()
1759 cc->key_size = key_size; in crypt_ctr()
1761 ti->private = cc; in crypt_ctr()
1766 cc->dmreq_start = sizeof(struct ablkcipher_request); in crypt_ctr()
1767 cc->dmreq_start += crypto_ablkcipher_reqsize(any_tfm(cc)); in crypt_ctr()
1768 cc->dmreq_start = ALIGN(cc->dmreq_start, __alignof__(struct dm_crypt_request)); in crypt_ctr()
1770 if (crypto_ablkcipher_alignmask(any_tfm(cc)) < CRYPTO_MINALIGN) { in crypt_ctr()
1772 iv_size_padding = -(cc->dmreq_start + sizeof(struct dm_crypt_request)) in crypt_ctr()
1773 & crypto_ablkcipher_alignmask(any_tfm(cc)); in crypt_ctr()
1780 iv_size_padding = crypto_ablkcipher_alignmask(any_tfm(cc)); in crypt_ctr()
1784 cc->req_pool = mempool_create_kmalloc_pool(MIN_IOS, cc->dmreq_start + in crypt_ctr()
1785 sizeof(struct dm_crypt_request) + iv_size_padding + cc->iv_size); in crypt_ctr()
1786 if (!cc->req_pool) { in crypt_ctr()
1791 cc->per_bio_data_size = ti->per_bio_data_size = in crypt_ctr()
1792 ALIGN(sizeof(struct dm_crypt_io) + cc->dmreq_start + in crypt_ctr()
1793 sizeof(struct dm_crypt_request) + iv_size_padding + cc->iv_size, in crypt_ctr()
1796 cc->page_pool = mempool_create_page_pool(BIO_MAX_PAGES, 0); in crypt_ctr()
1797 if (!cc->page_pool) { in crypt_ctr()
1802 cc->bs = bioset_create(MIN_IOS, 0); in crypt_ctr()
1803 if (!cc->bs) { in crypt_ctr()
1808 mutex_init(&cc->bio_alloc_lock); in crypt_ctr()
1815 cc->iv_offset = tmpll; in crypt_ctr()
1817 ret = dm_get_device(ti, argv[3], dm_table_get_mode(ti->table), &cc->dev); in crypt_ctr()
1828 cc->start = tmpll; in crypt_ctr()
1854 set_bit(DM_CRYPT_SAME_CPU, &cc->flags); in crypt_ctr()
1857 set_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags); in crypt_ctr()
1867 cc->io_queue = alloc_workqueue("kcryptd_io", WQ_MEM_RECLAIM, 1); in crypt_ctr()
1868 if (!cc->io_queue) { in crypt_ctr()
1873 if (test_bit(DM_CRYPT_SAME_CPU, &cc->flags)) in crypt_ctr()
1874 cc->crypt_queue = alloc_workqueue("kcryptd", WQ_CPU_INTENSIVE | WQ_MEM_RECLAIM, 1); in crypt_ctr()
1876 cc->crypt_queue = alloc_workqueue("kcryptd", WQ_CPU_INTENSIVE | WQ_MEM_RECLAIM | WQ_UNBOUND, in crypt_ctr()
1878 if (!cc->crypt_queue) { in crypt_ctr()
1883 init_waitqueue_head(&cc->write_thread_wait); in crypt_ctr()
1884 cc->write_tree = RB_ROOT; in crypt_ctr()
1886 cc->write_thread = kthread_create(dmcrypt_write, cc, "dmcrypt_write"); in crypt_ctr()
1887 if (IS_ERR(cc->write_thread)) { in crypt_ctr()
1888 ret = PTR_ERR(cc->write_thread); in crypt_ctr()
1889 cc->write_thread = NULL; in crypt_ctr()
1893 wake_up_process(cc->write_thread); in crypt_ctr()
1908 struct crypt_config *cc = ti->private; in crypt_map() local
1916 bio->bi_bdev = cc->dev->bdev; in crypt_map()
1918 bio->bi_iter.bi_sector = cc->start + in crypt_map()
1923 io = dm_per_bio_data(bio, cc->per_bio_data_size); in crypt_map()
1924 crypt_io_init(io, cc, bio, dm_target_offset(ti, bio->bi_iter.bi_sector)); in crypt_map()
1939 struct crypt_config *cc = ti->private; in crypt_status() local
1949 DMEMIT("%s ", cc->cipher_string); in crypt_status()
1951 if (cc->key_size > 0) in crypt_status()
1952 for (i = 0; i < cc->key_size; i++) in crypt_status()
1953 DMEMIT("%02x", cc->key[i]); in crypt_status()
1957 DMEMIT(" %llu %s %llu", (unsigned long long)cc->iv_offset, in crypt_status()
1958 cc->dev->name, (unsigned long long)cc->start); in crypt_status()
1961 num_feature_args += test_bit(DM_CRYPT_SAME_CPU, &cc->flags); in crypt_status()
1962 num_feature_args += test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags); in crypt_status()
1967 if (test_bit(DM_CRYPT_SAME_CPU, &cc->flags)) in crypt_status()
1969 if (test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags)) in crypt_status()
1979 struct crypt_config *cc = ti->private; in crypt_postsuspend() local
1981 set_bit(DM_CRYPT_SUSPENDED, &cc->flags); in crypt_postsuspend()
1986 struct crypt_config *cc = ti->private; in crypt_preresume() local
1988 if (!test_bit(DM_CRYPT_KEY_VALID, &cc->flags)) { in crypt_preresume()
1998 struct crypt_config *cc = ti->private; in crypt_resume() local
2000 clear_bit(DM_CRYPT_SUSPENDED, &cc->flags); in crypt_resume()
2009 struct crypt_config *cc = ti->private; in crypt_message() local
2016 if (!test_bit(DM_CRYPT_SUSPENDED, &cc->flags)) { in crypt_message()
2021 ret = crypt_set_key(cc, argv[2]); in crypt_message()
2024 if (cc->iv_gen_ops && cc->iv_gen_ops->init) in crypt_message()
2025 ret = cc->iv_gen_ops->init(cc); in crypt_message()
2029 if (cc->iv_gen_ops && cc->iv_gen_ops->wipe) { in crypt_message()
2030 ret = cc->iv_gen_ops->wipe(cc); in crypt_message()
2034 return crypt_wipe_key(cc); in crypt_message()
2046 struct crypt_config *cc = ti->private; in crypt_iterate_devices() local
2048 return fn(ti, cc->dev, cc->start, ti->len, data); in crypt_iterate_devices()