• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-R7000-V1.0.7.12_1.2.5/components/opensource/linux/linux-2.6.36/drivers/md/

Lines Matching defs:cc

71 	int (*ctr)(struct crypt_config *cc, struct dm_target *ti,
73 void (*dtr)(struct crypt_config *cc);
74 int (*init)(struct crypt_config *cc);
75 int (*wipe)(struct crypt_config *cc);
76 int (*generator)(struct crypt_config *cc, u8 *iv, sector_t sector);
175 static int crypt_iv_plain_gen(struct crypt_config *cc, u8 *iv, sector_t sector)
177 memset(iv, 0, cc->iv_size);
183 static int crypt_iv_plain64_gen(struct crypt_config *cc, u8 *iv,
186 memset(iv, 0, cc->iv_size);
193 static int crypt_iv_essiv_init(struct crypt_config *cc)
195 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv;
200 sg_init_one(&sg, cc->key, cc->key_size);
204 err = crypto_hash_digest(&desc, &sg, cc->key_size, essiv->salt);
213 static int crypt_iv_essiv_wipe(struct crypt_config *cc)
215 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv;
223 static void crypt_iv_essiv_dtr(struct crypt_config *cc)
225 struct iv_essiv_private *essiv = &cc->iv_gen_private.essiv;
237 static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti,
266 essiv_tfm = crypto_alloc_cipher(cc->cipher, 0, CRYPTO_ALG_ASYNC);
273 crypto_ablkcipher_ivsize(cc->tfm)) {
280 cc->iv_gen_private.essiv.salt = salt;
281 cc->iv_gen_private.essiv.tfm = essiv_tfm;
282 cc->iv_gen_private.essiv.hash_tfm = hash_tfm;
295 static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, sector_t sector)
297 memset(iv, 0, cc->iv_size);
299 crypto_cipher_encrypt_one(cc->iv_gen_private.essiv.tfm, iv, iv);
303 static int crypt_iv_benbi_ctr(struct crypt_config *cc, struct dm_target *ti,
306 unsigned bs = crypto_ablkcipher_blocksize(cc->tfm);
322 cc->iv_gen_private.benbi.shift = 9 - log;
327 static void crypt_iv_benbi_dtr(struct crypt_config *cc)
331 static int crypt_iv_benbi_gen(struct crypt_config *cc, u8 *iv, sector_t sector)
335 memset(iv, 0, cc->iv_size - sizeof(u64)); /* rest is cleared below */
337 val = cpu_to_be64(((u64)sector << cc->iv_gen_private.benbi.shift) + 1);
338 put_unaligned(val, (__be64 *)(iv + cc->iv_size - sizeof(u64)));
343 static int crypt_iv_null_gen(struct crypt_config *cc, u8 *iv, sector_t sector)
345 memset(iv, 0, cc->iv_size);
376 static void crypt_convert_init(struct crypt_config *cc,
387 ctx->sector = sector + cc->iv_offset;
391 static struct dm_crypt_request *dmreq_of_req(struct crypt_config *cc,
394 return (struct dm_crypt_request *)((char *)req + cc->dmreq_start);
397 static struct ablkcipher_request *req_of_dmreq(struct crypt_config *cc,
400 return (struct ablkcipher_request *)((char *)dmreq - cc->dmreq_start);
403 static int crypt_convert_block(struct crypt_config *cc,
413 dmreq = dmreq_of_req(cc, req);
415 crypto_ablkcipher_alignmask(cc->tfm) + 1);
438 if (cc->iv_gen_ops) {
439 r = cc->iv_gen_ops->generator(cc, iv, ctx->sector);
457 static void crypt_alloc_req(struct crypt_config *cc,
460 if (!cc->req)
461 cc->req = mempool_alloc(cc->req_pool, GFP_NOIO);
462 ablkcipher_request_set_tfm(cc->req, cc->tfm);
463 ablkcipher_request_set_callback(cc->req, CRYPTO_TFM_REQ_MAY_BACKLOG |
466 dmreq_of_req(cc, cc->req));
472 static int crypt_convert(struct crypt_config *cc,
482 crypt_alloc_req(cc, ctx);
486 r = crypt_convert_block(cc, ctx, cc->req);
495 cc->req = NULL;
519 struct crypt_config *cc = io->target->private;
521 bio_free(bio, cc->bs);
533 struct crypt_config *cc = io->target->private;
540 clone = bio_alloc_bioset(GFP_NOIO, nr_iovecs, cc->bs);
548 page = mempool_alloc(cc->page_pool, gfp_mask);
565 mempool_free(page, cc->page_pool);
580 static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone)
588 mempool_free(bv->bv_page, cc->page_pool);
596 struct crypt_config *cc = ti->private;
599 io = mempool_alloc(cc->io_pool, GFP_NOIO);
622 struct crypt_config *cc = io->target->private;
630 mempool_free(io, cc->io_pool);
658 struct crypt_config *cc = io->target->private;
668 crypt_free_buffer_pages(cc, clone);
685 struct crypt_config *cc = io->target->private;
689 clone->bi_bdev = cc->dev->bdev;
696 struct crypt_config *cc = io->target->private;
707 clone = bio_alloc_bioset(GFP_NOIO, bio_segments(base_bio), cc->bs);
718 clone->bi_sector = cc->start + io->sector;
743 struct crypt_config *cc = io->target->private;
746 queue_work(cc->io_queue, &io->work);
753 struct crypt_config *cc = io->target->private;
756 crypt_free_buffer_pages(cc, clone);
766 clone->bi_sector = cc->start + io->sector;
776 struct crypt_config *cc = io->target->private;
789 crypt_convert_init(cc, &io->ctx, NULL, io->base_bio, sector);
809 r = crypt_convert(cc, &io->ctx);
841 crypt_convert_init(cc, &new_io->ctx, NULL,
875 struct crypt_config *cc = io->target->private;
880 crypt_convert_init(cc, &io->ctx, io->base_bio, io->base_bio,
883 r = crypt_convert(cc, &io->ctx);
897 struct crypt_config *cc = io->target->private;
904 mempool_free(req_of_dmreq(cc, dmreq), cc->req_pool);
927 struct crypt_config *cc = io->target->private;
930 queue_work(cc->crypt_queue, &io->work);
974 static int crypt_set_key(struct crypt_config *cc, char *key)
978 if (cc->key_size && cc->key_size != key_size)
981 cc->key_size = key_size; /* initial settings */
984 (key_size && crypt_decode_key(cc->key, key, key_size) < 0))
987 set_bit(DM_CRYPT_KEY_VALID, &cc->flags);
989 return crypto_ablkcipher_setkey(cc->tfm, cc->key, cc->key_size);
992 static int crypt_wipe_key(struct crypt_config *cc)
994 clear_bit(DM_CRYPT_KEY_VALID, &cc->flags);
995 memset(&cc->key, 0, cc->key_size * sizeof(u8));
996 return crypto_ablkcipher_setkey(cc->tfm, cc->key, cc->key_size);
1001 struct crypt_config *cc = ti->private;
1005 if (!cc)
1008 if (cc->io_queue)
1009 destroy_workqueue(cc->io_queue);
1010 if (cc->crypt_queue)
1011 destroy_workqueue(cc->crypt_queue);
1013 if (cc->bs)
1014 bioset_free(cc->bs);
1016 if (cc->page_pool)
1017 mempool_destroy(cc->page_pool);
1018 if (cc->req_pool)
1019 mempool_destroy(cc->req_pool);
1020 if (cc->io_pool)
1021 mempool_destroy(cc->io_pool);
1023 if (cc->iv_gen_ops && cc->iv_gen_ops->dtr)
1024 cc->iv_gen_ops->dtr(cc);
1026 if (cc->tfm && !IS_ERR(cc->tfm))
1027 crypto_free_ablkcipher(cc->tfm);
1029 if (cc->dev)
1030 dm_put_device(ti, cc->dev);
1032 kzfree(cc->cipher);
1033 kzfree(cc->cipher_mode);
1036 kzfree(cc);
1042 struct crypt_config *cc = ti->private;
1060 cc->cipher = kstrdup(cipher, GFP_KERNEL);
1061 if (!cc->cipher)
1065 cc->cipher_mode = kstrdup(tmp, GFP_KERNEL);
1066 if (!cc->cipher_mode)
1079 kfree(cc->cipher_mode);
1080 cc->cipher_mode = kstrdup("cbc-plain", GFP_KERNEL);
1102 cc->tfm = crypto_alloc_ablkcipher(cipher_api, 0, 0);
1103 if (IS_ERR(cc->tfm)) {
1104 ret = PTR_ERR(cc->tfm);
1110 ret = crypt_set_key(cc, key);
1117 cc->iv_size = crypto_ablkcipher_ivsize(cc->tfm);
1118 if (cc->iv_size)
1120 cc->iv_size = max(cc->iv_size,
1129 cc->iv_gen_ops = NULL;
1131 cc->iv_gen_ops = &crypt_iv_plain_ops;
1133 cc->iv_gen_ops = &crypt_iv_plain64_ops;
1135 cc->iv_gen_ops = &crypt_iv_essiv_ops;
1137 cc->iv_gen_ops = &crypt_iv_benbi_ops;
1139 cc->iv_gen_ops = &crypt_iv_null_ops;
1147 if (cc->iv_gen_ops && cc->iv_gen_ops->ctr) {
1148 ret = cc->iv_gen_ops->ctr(cc, ti, ivopts);
1156 if (cc->iv_gen_ops && cc->iv_gen_ops->init) {
1157 ret = cc->iv_gen_ops->init(cc);
1180 struct crypt_config *cc;
1192 cc = kzalloc(sizeof(*cc) + key_size * sizeof(u8), GFP_KERNEL);
1193 if (!cc) {
1198 ti->private = cc;
1204 cc->io_pool = mempool_create_slab_pool(MIN_IOS, _crypt_io_pool);
1205 if (!cc->io_pool) {
1210 cc->dmreq_start = sizeof(struct ablkcipher_request);
1211 cc->dmreq_start += crypto_ablkcipher_reqsize(cc->tfm);
1212 cc->dmreq_start = ALIGN(cc->dmreq_start, crypto_tfm_ctx_alignment());
1213 cc->dmreq_start += crypto_ablkcipher_alignmask(cc->tfm) &
1216 cc->req_pool = mempool_create_kmalloc_pool(MIN_IOS, cc->dmreq_start +
1217 sizeof(struct dm_crypt_request) + cc->iv_size);
1218 if (!cc->req_pool) {
1222 cc->req = NULL;
1224 cc->page_pool = mempool_create_page_pool(MIN_POOL_PAGES, 0);
1225 if (!cc->page_pool) {
1230 cc->bs = bioset_create(MIN_IOS, 0);
1231 if (!cc->bs) {
1241 cc->iv_offset = tmpll;
1243 if (dm_get_device(ti, argv[3], dm_table_get_mode(ti->table), &cc->dev)) {
1252 cc->start = tmpll;
1255 cc->io_queue = create_singlethread_workqueue("kcryptd_io");
1256 if (!cc->io_queue) {
1261 cc->crypt_queue = create_singlethread_workqueue("kcryptd");
1262 if (!cc->crypt_queue) {
1279 struct crypt_config *cc;
1282 cc = ti->private;
1283 bio->bi_bdev = cc->dev->bdev;
1300 struct crypt_config *cc = ti->private;
1309 if (cc->cipher_mode)
1310 DMEMIT("%s-%s ", cc->cipher, cc->cipher_mode);
1312 DMEMIT("%s ", cc->cipher);
1314 if (cc->key_size > 0) {
1315 if ((maxlen - sz) < ((cc->key_size << 1) + 1))
1318 crypt_encode_key(result + sz, cc->key, cc->key_size);
1319 sz += cc->key_size << 1;
1326 DMEMIT(" %llu %s %llu", (unsigned long long)cc->iv_offset,
1327 cc->dev->name, (unsigned long long)cc->start);
1335 struct crypt_config *cc = ti->private;
1337 set_bit(DM_CRYPT_SUSPENDED, &cc->flags);
1342 struct crypt_config *cc = ti->private;
1344 if (!test_bit(DM_CRYPT_KEY_VALID, &cc->flags)) {
1354 struct crypt_config *cc = ti->private;
1356 clear_bit(DM_CRYPT_SUSPENDED, &cc->flags);
1365 struct crypt_config *cc = ti->private;
1372 if (!test_bit(DM_CRYPT_SUSPENDED, &cc->flags)) {
1377 ret = crypt_set_key(cc, argv[2]);
1380 if (cc->iv_gen_ops && cc->iv_gen_ops->init)
1381 ret = cc->iv_gen_ops->init(cc);
1385 if (cc->iv_gen_ops && cc->iv_gen_ops->wipe) {
1386 ret = cc->iv_gen_ops->wipe(cc);
1390 return crypt_wipe_key(cc);
1402 struct crypt_config *cc = ti->private;
1403 struct request_queue *q = bdev_get_queue(cc->dev->bdev);
1408 bvm->bi_bdev = cc->dev->bdev;
1409 bvm->bi_sector = cc->start + dm_target_offset(ti, bvm->bi_sector);
1417 struct crypt_config *cc = ti->private;
1419 return fn(ti, cc->dev, cc->start, ti->len, data);