Lines Matching refs:cc

54 	struct crypt_config *cc;  member
77 int (*ctr)(struct crypt_config *cc, struct dm_target *ti,
79 void (*dtr)(struct crypt_config *cc);
80 int (*init)(struct crypt_config *cc);
81 int (*wipe)(struct crypt_config *cc);
82 int (*generator)(struct crypt_config *cc, u8 *iv,
84 int (*post)(struct crypt_config *cc, u8 *iv,
186 static u8 *iv_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq);
191 static struct crypto_ablkcipher *any_tfm(struct crypt_config *cc) in any_tfm() argument
193 return cc->tfms[0]; in any_tfm()
243 static int crypt_iv_plain_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_plain_gen() argument
246 memset(iv, 0, cc->iv_size); in crypt_iv_plain_gen()
252 static int crypt_iv_plain64_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_plain64_gen() argument
255 memset(iv, 0, cc->iv_size); in crypt_iv_plain64_gen()
262 static int crypt_iv_essiv_init(struct crypt_config *cc) in crypt_iv_essiv_init() argument
264 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; in crypt_iv_essiv_init()
270 sg_init_one(&sg, cc->key, cc->key_size); in crypt_iv_essiv_init()
274 err = crypto_hash_digest(&desc, &sg, cc->key_size, essiv->salt); in crypt_iv_essiv_init()
278 essiv_tfm = cc->iv_private; in crypt_iv_essiv_init()
289 static int crypt_iv_essiv_wipe(struct crypt_config *cc) in crypt_iv_essiv_wipe() argument
291 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; in crypt_iv_essiv_wipe()
298 essiv_tfm = cc->iv_private; in crypt_iv_essiv_wipe()
307 static struct crypto_cipher *setup_essiv_cpu(struct crypt_config *cc, in setup_essiv_cpu() argument
315 essiv_tfm = crypto_alloc_cipher(cc->cipher, 0, CRYPTO_ALG_ASYNC); in setup_essiv_cpu()
322 crypto_ablkcipher_ivsize(any_tfm(cc))) { in setup_essiv_cpu()
339 static void crypt_iv_essiv_dtr(struct crypt_config *cc) in crypt_iv_essiv_dtr() argument
342 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv; in crypt_iv_essiv_dtr()
350 essiv_tfm = cc->iv_private; in crypt_iv_essiv_dtr()
355 cc->iv_private = NULL; in crypt_iv_essiv_dtr()
358 static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_essiv_ctr() argument
386 cc->iv_gen_private.essiv.salt = salt; in crypt_iv_essiv_ctr()
387 cc->iv_gen_private.essiv.hash_tfm = hash_tfm; in crypt_iv_essiv_ctr()
389 essiv_tfm = setup_essiv_cpu(cc, ti, salt, in crypt_iv_essiv_ctr()
392 crypt_iv_essiv_dtr(cc); in crypt_iv_essiv_ctr()
395 cc->iv_private = essiv_tfm; in crypt_iv_essiv_ctr()
406 static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_essiv_gen() argument
409 struct crypto_cipher *essiv_tfm = cc->iv_private; in crypt_iv_essiv_gen()
411 memset(iv, 0, cc->iv_size); in crypt_iv_essiv_gen()
418 static int crypt_iv_benbi_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_benbi_ctr() argument
421 unsigned bs = crypto_ablkcipher_blocksize(any_tfm(cc)); in crypt_iv_benbi_ctr()
437 cc->iv_gen_private.benbi.shift = 9 - log; in crypt_iv_benbi_ctr()
442 static void crypt_iv_benbi_dtr(struct crypt_config *cc) in crypt_iv_benbi_dtr() argument
446 static int crypt_iv_benbi_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_benbi_gen() argument
451 memset(iv, 0, cc->iv_size - sizeof(u64)); /* rest is cleared below */ in crypt_iv_benbi_gen()
453 val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); in crypt_iv_benbi_gen()
454 put_unaligned(val, (__be64 *)(iv + cc->iv_size - sizeof(u64))); in crypt_iv_benbi_gen()
459 static int crypt_iv_null_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_null_gen() argument
462 memset(iv, 0, cc->iv_size); in crypt_iv_null_gen()
467 static void crypt_iv_lmk_dtr(struct crypt_config *cc) in crypt_iv_lmk_dtr() argument
469 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_dtr()
479 static int crypt_iv_lmk_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_lmk_ctr() argument
482 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_ctr()
491 if (cc->key_parts == cc->tfms_count) { in crypt_iv_lmk_ctr()
498 crypt_iv_lmk_dtr(cc); in crypt_iv_lmk_ctr()
506 static int crypt_iv_lmk_init(struct crypt_config *cc) in crypt_iv_lmk_init() argument
508 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_init()
509 int subkey_size = cc->key_size / cc->key_parts; in crypt_iv_lmk_init()
513 memcpy(lmk->seed, cc->key + (cc->tfms_count * subkey_size), in crypt_iv_lmk_init()
519 static int crypt_iv_lmk_wipe(struct crypt_config *cc) in crypt_iv_lmk_wipe() argument
521 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_wipe()
529 static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_one() argument
533 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_one()
573 memcpy(iv, &md5state.hash, cc->iv_size); in crypt_iv_lmk_one()
578 static int crypt_iv_lmk_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_gen() argument
586 r = crypt_iv_lmk_one(cc, iv, dmreq, src + dmreq->sg_in.offset); in crypt_iv_lmk_gen()
589 memset(iv, 0, cc->iv_size); in crypt_iv_lmk_gen()
594 static int crypt_iv_lmk_post(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_post() argument
604 r = crypt_iv_lmk_one(cc, iv, dmreq, dst + dmreq->sg_out.offset); in crypt_iv_lmk_post()
608 crypto_xor(dst + dmreq->sg_out.offset, iv, cc->iv_size); in crypt_iv_lmk_post()
614 static void crypt_iv_tcw_dtr(struct crypt_config *cc) in crypt_iv_tcw_dtr() argument
616 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_dtr()
628 static int crypt_iv_tcw_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_tcw_ctr() argument
631 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_ctr()
633 if (cc->key_size <= (cc->iv_size + TCW_WHITENING_SIZE)) { in crypt_iv_tcw_ctr()
644 tcw->iv_seed = kzalloc(cc->iv_size, GFP_KERNEL); in crypt_iv_tcw_ctr()
647 crypt_iv_tcw_dtr(cc); in crypt_iv_tcw_ctr()
655 static int crypt_iv_tcw_init(struct crypt_config *cc) in crypt_iv_tcw_init() argument
657 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_init()
658 int key_offset = cc->key_size - cc->iv_size - TCW_WHITENING_SIZE; in crypt_iv_tcw_init()
660 memcpy(tcw->iv_seed, &cc->key[key_offset], cc->iv_size); in crypt_iv_tcw_init()
661 memcpy(tcw->whitening, &cc->key[key_offset + cc->iv_size], in crypt_iv_tcw_init()
667 static int crypt_iv_tcw_wipe(struct crypt_config *cc) in crypt_iv_tcw_wipe() argument
669 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_wipe()
671 memset(tcw->iv_seed, 0, cc->iv_size); in crypt_iv_tcw_wipe()
677 static int crypt_iv_tcw_whitening(struct crypt_config *cc, in crypt_iv_tcw_whitening() argument
681 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_whitening()
717 static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_tcw_gen() argument
720 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_gen()
728 r = crypt_iv_tcw_whitening(cc, dmreq, src + dmreq->sg_in.offset); in crypt_iv_tcw_gen()
733 memcpy(iv, tcw->iv_seed, cc->iv_size); in crypt_iv_tcw_gen()
735 if (cc->iv_size > 8) in crypt_iv_tcw_gen()
736 crypto_xor(&iv[8], (u8 *)&sector, cc->iv_size - 8); in crypt_iv_tcw_gen()
741 static int crypt_iv_tcw_post(struct crypt_config *cc, u8 *iv, in crypt_iv_tcw_post() argument
752 r = crypt_iv_tcw_whitening(cc, dmreq, dst + dmreq->sg_out.offset); in crypt_iv_tcw_post()
802 static void crypt_convert_init(struct crypt_config *cc, in crypt_convert_init() argument
813 ctx->cc_sector = sector + cc->iv_offset; in crypt_convert_init()
817 static struct dm_crypt_request *dmreq_of_req(struct crypt_config *cc, in dmreq_of_req() argument
820 return (struct dm_crypt_request *)((char *)req + cc->dmreq_start); in dmreq_of_req()
823 static struct ablkcipher_request *req_of_dmreq(struct crypt_config *cc, in req_of_dmreq() argument
826 return (struct ablkcipher_request *)((char *)dmreq - cc->dmreq_start); in req_of_dmreq()
829 static u8 *iv_of_dmreq(struct crypt_config *cc, in iv_of_dmreq() argument
833 crypto_ablkcipher_alignmask(any_tfm(cc)) + 1); in iv_of_dmreq()
836 static int crypt_convert_block(struct crypt_config *cc, in crypt_convert_block() argument
846 dmreq = dmreq_of_req(cc, req); in crypt_convert_block()
847 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block()
862 if (cc->iv_gen_ops) { in crypt_convert_block()
863 r = cc->iv_gen_ops->generator(cc, iv, dmreq); in crypt_convert_block()
876 if (!r && cc->iv_gen_ops && cc->iv_gen_ops->post) in crypt_convert_block()
877 r = cc->iv_gen_ops->post(cc, iv, dmreq); in crypt_convert_block()
885 static void crypt_alloc_req(struct crypt_config *cc, in crypt_alloc_req() argument
888 unsigned key_index = ctx->cc_sector & (cc->tfms_count - 1); in crypt_alloc_req()
891 ctx->req = mempool_alloc(cc->req_pool, GFP_NOIO); in crypt_alloc_req()
893 ablkcipher_request_set_tfm(ctx->req, cc->tfms[key_index]); in crypt_alloc_req()
896 kcryptd_async_done, dmreq_of_req(cc, ctx->req)); in crypt_alloc_req()
899 static void crypt_free_req(struct crypt_config *cc, in crypt_free_req() argument
902 struct dm_crypt_io *io = dm_per_bio_data(base_bio, cc->per_bio_data_size); in crypt_free_req()
905 mempool_free(req, cc->req_pool); in crypt_free_req()
911 static int crypt_convert(struct crypt_config *cc, in crypt_convert() argument
920 crypt_alloc_req(cc, ctx); in crypt_convert()
924 r = crypt_convert_block(cc, ctx, ctx->req); in crypt_convert()
954 static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone);
975 struct crypt_config *cc = io->cc; in crypt_alloc_buffer() local
985 mutex_lock(&cc->bio_alloc_lock); in crypt_alloc_buffer()
987 clone = bio_alloc_bioset(GFP_NOIO, nr_iovecs, cc->bs); in crypt_alloc_buffer()
996 page = mempool_alloc(cc->page_pool, gfp_mask); in crypt_alloc_buffer()
998 crypt_free_buffer_pages(cc, clone); in crypt_alloc_buffer()
1018 mutex_unlock(&cc->bio_alloc_lock); in crypt_alloc_buffer()
1023 static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone) in crypt_free_buffer_pages() argument
1030 mempool_free(bv->bv_page, cc->page_pool); in crypt_free_buffer_pages()
1035 static void crypt_io_init(struct dm_crypt_io *io, struct crypt_config *cc, in crypt_io_init() argument
1038 io->cc = cc; in crypt_io_init()
1057 struct crypt_config *cc = io->cc; in crypt_dec_pending() local
1065 crypt_free_req(cc, io->ctx.req, base_bio); in crypt_dec_pending()
1090 struct crypt_config *cc = io->cc; in crypt_endio() local
1100 crypt_free_buffer_pages(cc, clone); in crypt_endio()
1117 struct crypt_config *cc = io->cc; in clone_init() local
1121 clone->bi_bdev = cc->dev->bdev; in clone_init()
1127 struct crypt_config *cc = io->cc; in kcryptd_io_read() local
1136 clone = bio_clone_fast(io->base_bio, gfp, cc->bs); in kcryptd_io_read()
1143 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_io_read()
1161 struct crypt_config *cc = io->cc; in kcryptd_queue_read() local
1164 queue_work(cc->io_queue, &io->work); in kcryptd_queue_read()
1178 struct crypt_config *cc = data; in dmcrypt_write() local
1187 spin_lock_irq(&cc->write_thread_wait.lock); in dmcrypt_write()
1190 if (!RB_EMPTY_ROOT(&cc->write_tree)) in dmcrypt_write()
1194 __add_wait_queue(&cc->write_thread_wait, &wait); in dmcrypt_write()
1196 spin_unlock_irq(&cc->write_thread_wait.lock); in dmcrypt_write()
1200 remove_wait_queue(&cc->write_thread_wait, &wait); in dmcrypt_write()
1207 spin_lock_irq(&cc->write_thread_wait.lock); in dmcrypt_write()
1208 __remove_wait_queue(&cc->write_thread_wait, &wait); in dmcrypt_write()
1212 write_tree = cc->write_tree; in dmcrypt_write()
1213 cc->write_tree = RB_ROOT; in dmcrypt_write()
1214 spin_unlock_irq(&cc->write_thread_wait.lock); in dmcrypt_write()
1236 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_io_submit() local
1242 crypt_free_buffer_pages(cc, clone); in kcryptd_crypt_write_io_submit()
1251 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_crypt_write_io_submit()
1253 if (likely(!async) && test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags)) { in kcryptd_crypt_write_io_submit()
1258 spin_lock_irqsave(&cc->write_thread_wait.lock, flags); in kcryptd_crypt_write_io_submit()
1259 rbp = &cc->write_tree.rb_node; in kcryptd_crypt_write_io_submit()
1270 rb_insert_color(&io->rb_node, &cc->write_tree); in kcryptd_crypt_write_io_submit()
1272 wake_up_locked(&cc->write_thread_wait); in kcryptd_crypt_write_io_submit()
1273 spin_unlock_irqrestore(&cc->write_thread_wait.lock, flags); in kcryptd_crypt_write_io_submit()
1278 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_convert() local
1288 crypt_convert_init(cc, &io->ctx, NULL, io->base_bio, sector); in kcryptd_crypt_write_convert()
1302 r = crypt_convert(cc, &io->ctx); in kcryptd_crypt_write_convert()
1324 struct crypt_config *cc = io->cc; in kcryptd_crypt_read_convert() local
1329 crypt_convert_init(cc, &io->ctx, io->base_bio, io->base_bio, in kcryptd_crypt_read_convert()
1332 r = crypt_convert(cc, &io->ctx); in kcryptd_crypt_read_convert()
1348 struct crypt_config *cc = io->cc; in kcryptd_async_done() local
1355 if (!error && cc->iv_gen_ops && cc->iv_gen_ops->post) in kcryptd_async_done()
1356 error = cc->iv_gen_ops->post(cc, iv_of_dmreq(cc, dmreq), dmreq); in kcryptd_async_done()
1361 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()
1384 struct crypt_config *cc = io->cc; in kcryptd_queue_crypt() local
1387 queue_work(cc->crypt_queue, &io->work); in kcryptd_queue_crypt()
1414 static void crypt_free_tfms(struct crypt_config *cc) in crypt_free_tfms() argument
1418 if (!cc->tfms) in crypt_free_tfms()
1421 for (i = 0; i < cc->tfms_count; i++) in crypt_free_tfms()
1422 if (cc->tfms[i] && !IS_ERR(cc->tfms[i])) { in crypt_free_tfms()
1423 crypto_free_ablkcipher(cc->tfms[i]); in crypt_free_tfms()
1424 cc->tfms[i] = NULL; in crypt_free_tfms()
1427 kfree(cc->tfms); in crypt_free_tfms()
1428 cc->tfms = NULL; in crypt_free_tfms()
1431 static int crypt_alloc_tfms(struct crypt_config *cc, char *ciphermode) in crypt_alloc_tfms() argument
1436 cc->tfms = kmalloc(cc->tfms_count * sizeof(struct crypto_ablkcipher *), in crypt_alloc_tfms()
1438 if (!cc->tfms) in crypt_alloc_tfms()
1441 for (i = 0; i < cc->tfms_count; i++) { in crypt_alloc_tfms()
1442 cc->tfms[i] = crypto_alloc_ablkcipher(ciphermode, 0, 0); in crypt_alloc_tfms()
1443 if (IS_ERR(cc->tfms[i])) { in crypt_alloc_tfms()
1444 err = PTR_ERR(cc->tfms[i]); in crypt_alloc_tfms()
1445 crypt_free_tfms(cc); in crypt_alloc_tfms()
1453 static int crypt_setkey_allcpus(struct crypt_config *cc) in crypt_setkey_allcpus() argument
1459 subkey_size = (cc->key_size - cc->key_extra_size) >> ilog2(cc->tfms_count); in crypt_setkey_allcpus()
1461 for (i = 0; i < cc->tfms_count; i++) { in crypt_setkey_allcpus()
1462 r = crypto_ablkcipher_setkey(cc->tfms[i], in crypt_setkey_allcpus()
1463 cc->key + (i * subkey_size), in crypt_setkey_allcpus()
1472 static int crypt_set_key(struct crypt_config *cc, char *key) in crypt_set_key() argument
1478 if (cc->key_size != (key_string_len >> 1)) in crypt_set_key()
1482 if (!cc->key_size && strcmp(key, "-")) in crypt_set_key()
1485 if (cc->key_size && crypt_decode_key(cc->key, key, cc->key_size) < 0) in crypt_set_key()
1488 set_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_set_key()
1490 r = crypt_setkey_allcpus(cc); in crypt_set_key()
1499 static int crypt_wipe_key(struct crypt_config *cc) in crypt_wipe_key() argument
1501 clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_wipe_key()
1502 memset(&cc->key, 0, cc->key_size * sizeof(u8)); in crypt_wipe_key()
1504 return crypt_setkey_allcpus(cc); in crypt_wipe_key()
1509 struct crypt_config *cc = ti->private; in crypt_dtr() local
1513 if (!cc) in crypt_dtr()
1516 if (cc->write_thread) in crypt_dtr()
1517 kthread_stop(cc->write_thread); in crypt_dtr()
1519 if (cc->io_queue) in crypt_dtr()
1520 destroy_workqueue(cc->io_queue); in crypt_dtr()
1521 if (cc->crypt_queue) in crypt_dtr()
1522 destroy_workqueue(cc->crypt_queue); in crypt_dtr()
1524 crypt_free_tfms(cc); in crypt_dtr()
1526 if (cc->bs) in crypt_dtr()
1527 bioset_free(cc->bs); in crypt_dtr()
1529 if (cc->page_pool) in crypt_dtr()
1530 mempool_destroy(cc->page_pool); in crypt_dtr()
1531 if (cc->req_pool) in crypt_dtr()
1532 mempool_destroy(cc->req_pool); in crypt_dtr()
1534 if (cc->iv_gen_ops && cc->iv_gen_ops->dtr) in crypt_dtr()
1535 cc->iv_gen_ops->dtr(cc); in crypt_dtr()
1537 if (cc->dev) in crypt_dtr()
1538 dm_put_device(ti, cc->dev); in crypt_dtr()
1540 kzfree(cc->cipher); in crypt_dtr()
1541 kzfree(cc->cipher_string); in crypt_dtr()
1544 kzfree(cc); in crypt_dtr()
1550 struct crypt_config *cc = ti->private; in crypt_ctr_cipher() local
1562 cc->cipher_string = kstrdup(cipher_in, GFP_KERNEL); in crypt_ctr_cipher()
1563 if (!cc->cipher_string) in crypt_ctr_cipher()
1575 cc->tfms_count = 1; in crypt_ctr_cipher()
1576 else if (sscanf(keycount, "%u%c", &cc->tfms_count, &dummy) != 1 || in crypt_ctr_cipher()
1577 !is_power_of_2(cc->tfms_count)) { in crypt_ctr_cipher()
1581 cc->key_parts = cc->tfms_count; in crypt_ctr_cipher()
1582 cc->key_extra_size = 0; in crypt_ctr_cipher()
1584 cc->cipher = kstrdup(cipher, GFP_KERNEL); in crypt_ctr_cipher()
1585 if (!cc->cipher) in crypt_ctr_cipher()
1621 ret = crypt_alloc_tfms(cc, cipher_api); in crypt_ctr_cipher()
1628 cc->iv_size = crypto_ablkcipher_ivsize(any_tfm(cc)); in crypt_ctr_cipher()
1629 if (cc->iv_size) in crypt_ctr_cipher()
1631 cc->iv_size = max(cc->iv_size, in crypt_ctr_cipher()
1640 cc->iv_gen_ops = NULL; in crypt_ctr_cipher()
1642 cc->iv_gen_ops = &crypt_iv_plain_ops; in crypt_ctr_cipher()
1644 cc->iv_gen_ops = &crypt_iv_plain64_ops; in crypt_ctr_cipher()
1646 cc->iv_gen_ops = &crypt_iv_essiv_ops; in crypt_ctr_cipher()
1648 cc->iv_gen_ops = &crypt_iv_benbi_ops; in crypt_ctr_cipher()
1650 cc->iv_gen_ops = &crypt_iv_null_ops; in crypt_ctr_cipher()
1652 cc->iv_gen_ops = &crypt_iv_lmk_ops; in crypt_ctr_cipher()
1659 if (cc->key_size % cc->key_parts) { in crypt_ctr_cipher()
1660 cc->key_parts++; in crypt_ctr_cipher()
1661 cc->key_extra_size = cc->key_size / cc->key_parts; in crypt_ctr_cipher()
1664 cc->iv_gen_ops = &crypt_iv_tcw_ops; in crypt_ctr_cipher()
1665 cc->key_parts += 2; /* IV + whitening */ in crypt_ctr_cipher()
1666 cc->key_extra_size = cc->iv_size + TCW_WHITENING_SIZE; in crypt_ctr_cipher()
1674 ret = crypt_set_key(cc, key); in crypt_ctr_cipher()
1681 if (cc->iv_gen_ops && cc->iv_gen_ops->ctr) { in crypt_ctr_cipher()
1682 ret = cc->iv_gen_ops->ctr(cc, ti, ivopts); in crypt_ctr_cipher()
1690 if (cc->iv_gen_ops && cc->iv_gen_ops->init) { in crypt_ctr_cipher()
1691 ret = cc->iv_gen_ops->init(cc); in crypt_ctr_cipher()
1714 struct crypt_config *cc; in crypt_ctr() local
1734 cc = kzalloc(sizeof(*cc) + key_size * sizeof(u8), GFP_KERNEL); in crypt_ctr()
1735 if (!cc) { in crypt_ctr()
1739 cc->key_size = key_size; in crypt_ctr()
1741 ti->private = cc; in crypt_ctr()
1746 cc->dmreq_start = sizeof(struct ablkcipher_request); in crypt_ctr()
1747 cc->dmreq_start += crypto_ablkcipher_reqsize(any_tfm(cc)); in crypt_ctr()
1748 cc->dmreq_start = ALIGN(cc->dmreq_start, __alignof__(struct dm_crypt_request)); in crypt_ctr()
1750 if (crypto_ablkcipher_alignmask(any_tfm(cc)) < CRYPTO_MINALIGN) { in crypt_ctr()
1752 iv_size_padding = -(cc->dmreq_start + sizeof(struct dm_crypt_request)) in crypt_ctr()
1753 & crypto_ablkcipher_alignmask(any_tfm(cc)); in crypt_ctr()
1760 iv_size_padding = crypto_ablkcipher_alignmask(any_tfm(cc)); in crypt_ctr()
1764 cc->req_pool = mempool_create_kmalloc_pool(MIN_IOS, cc->dmreq_start + in crypt_ctr()
1765 sizeof(struct dm_crypt_request) + iv_size_padding + cc->iv_size); in crypt_ctr()
1766 if (!cc->req_pool) { in crypt_ctr()
1771 cc->per_bio_data_size = ti->per_bio_data_size = in crypt_ctr()
1772 ALIGN(sizeof(struct dm_crypt_io) + cc->dmreq_start + in crypt_ctr()
1773 sizeof(struct dm_crypt_request) + iv_size_padding + cc->iv_size, in crypt_ctr()
1776 cc->page_pool = mempool_create_page_pool(BIO_MAX_PAGES, 0); in crypt_ctr()
1777 if (!cc->page_pool) { in crypt_ctr()
1782 cc->bs = bioset_create(MIN_IOS, 0); in crypt_ctr()
1783 if (!cc->bs) { in crypt_ctr()
1788 mutex_init(&cc->bio_alloc_lock); in crypt_ctr()
1795 cc->iv_offset = tmpll; in crypt_ctr()
1797 if (dm_get_device(ti, argv[3], dm_table_get_mode(ti->table), &cc->dev)) { in crypt_ctr()
1806 cc->start = tmpll; in crypt_ctr()
1832 set_bit(DM_CRYPT_SAME_CPU, &cc->flags); in crypt_ctr()
1835 set_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags); in crypt_ctr()
1845 cc->io_queue = alloc_workqueue("kcryptd_io", WQ_MEM_RECLAIM, 1); in crypt_ctr()
1846 if (!cc->io_queue) { in crypt_ctr()
1851 if (test_bit(DM_CRYPT_SAME_CPU, &cc->flags)) in crypt_ctr()
1852 cc->crypt_queue = alloc_workqueue("kcryptd", WQ_CPU_INTENSIVE | WQ_MEM_RECLAIM, 1); in crypt_ctr()
1854 cc->crypt_queue = alloc_workqueue("kcryptd", WQ_CPU_INTENSIVE | WQ_MEM_RECLAIM | WQ_UNBOUND, in crypt_ctr()
1856 if (!cc->crypt_queue) { in crypt_ctr()
1861 init_waitqueue_head(&cc->write_thread_wait); in crypt_ctr()
1862 cc->write_tree = RB_ROOT; in crypt_ctr()
1864 cc->write_thread = kthread_create(dmcrypt_write, cc, "dmcrypt_write"); in crypt_ctr()
1865 if (IS_ERR(cc->write_thread)) { in crypt_ctr()
1866 ret = PTR_ERR(cc->write_thread); in crypt_ctr()
1867 cc->write_thread = NULL; in crypt_ctr()
1871 wake_up_process(cc->write_thread); in crypt_ctr()
1886 struct crypt_config *cc = ti->private; in crypt_map() local
1894 bio->bi_bdev = cc->dev->bdev; in crypt_map()
1896 bio->bi_iter.bi_sector = cc->start + in crypt_map()
1901 io = dm_per_bio_data(bio, cc->per_bio_data_size); in crypt_map()
1902 crypt_io_init(io, cc, bio, dm_target_offset(ti, bio->bi_iter.bi_sector)); in crypt_map()
1917 struct crypt_config *cc = ti->private; in crypt_status() local
1927 DMEMIT("%s ", cc->cipher_string); in crypt_status()
1929 if (cc->key_size > 0) in crypt_status()
1930 for (i = 0; i < cc->key_size; i++) in crypt_status()
1931 DMEMIT("%02x", cc->key[i]); in crypt_status()
1935 DMEMIT(" %llu %s %llu", (unsigned long long)cc->iv_offset, in crypt_status()
1936 cc->dev->name, (unsigned long long)cc->start); in crypt_status()
1939 num_feature_args += test_bit(DM_CRYPT_SAME_CPU, &cc->flags); in crypt_status()
1940 num_feature_args += test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags); in crypt_status()
1945 if (test_bit(DM_CRYPT_SAME_CPU, &cc->flags)) in crypt_status()
1947 if (test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags)) in crypt_status()
1957 struct crypt_config *cc = ti->private; in crypt_postsuspend() local
1959 set_bit(DM_CRYPT_SUSPENDED, &cc->flags); in crypt_postsuspend()
1964 struct crypt_config *cc = ti->private; in crypt_preresume() local
1966 if (!test_bit(DM_CRYPT_KEY_VALID, &cc->flags)) { in crypt_preresume()
1976 struct crypt_config *cc = ti->private; in crypt_resume() local
1978 clear_bit(DM_CRYPT_SUSPENDED, &cc->flags); in crypt_resume()
1987 struct crypt_config *cc = ti->private; in crypt_message() local
1994 if (!test_bit(DM_CRYPT_SUSPENDED, &cc->flags)) { in crypt_message()
1999 ret = crypt_set_key(cc, argv[2]); in crypt_message()
2002 if (cc->iv_gen_ops && cc->iv_gen_ops->init) in crypt_message()
2003 ret = cc->iv_gen_ops->init(cc); in crypt_message()
2007 if (cc->iv_gen_ops && cc->iv_gen_ops->wipe) { in crypt_message()
2008 ret = cc->iv_gen_ops->wipe(cc); in crypt_message()
2012 return crypt_wipe_key(cc); in crypt_message()
2024 struct crypt_config *cc = ti->private; in crypt_merge() local
2025 struct request_queue *q = bdev_get_queue(cc->dev->bdev); in crypt_merge()
2030 bvm->bi_bdev = cc->dev->bdev; in crypt_merge()
2031 bvm->bi_sector = cc->start + dm_target_offset(ti, bvm->bi_sector); in crypt_merge()
2039 struct crypt_config *cc = ti->private; in crypt_iterate_devices() local
2041 return fn(ti, cc->dev, cc->start, ti->len, data); in crypt_iterate_devices()