• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /netgear-WNDR4500v2-V1.0.0.60_1.0.38/src/linux/linux-2.6/drivers/md/

Lines Matching refs:cc

59 	int (*ctr)(struct crypt_config *cc, struct dm_target *ti,
61 void (*dtr)(struct crypt_config *cc);
62 const char *(*status)(struct crypt_config *cc);
63 int (*generator)(struct crypt_config *cc, u8 *iv, sector_t sector);
131 static int crypt_iv_plain_gen(struct crypt_config *cc, u8 *iv, sector_t sector)
133 memset(iv, 0, cc->iv_size);
139 static int crypt_iv_essiv_ctr(struct crypt_config *cc, struct dm_target *ti,
170 sg_set_buf(&sg, cc->key, cc->key_size);
173 err = crypto_hash_digest(&desc, &sg, cc->key_size, salt);
182 essiv_tfm = crypto_alloc_cipher(cc->cipher, 0, CRYPTO_ALG_ASYNC);
189 crypto_blkcipher_ivsize(cc->tfm)) {
205 cc->iv_gen_private.essiv_tfm = essiv_tfm;
209 static void crypt_iv_essiv_dtr(struct crypt_config *cc)
211 crypto_free_cipher(cc->iv_gen_private.essiv_tfm);
212 cc->iv_gen_private.essiv_tfm = NULL;
215 static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, sector_t sector)
217 memset(iv, 0, cc->iv_size);
219 crypto_cipher_encrypt_one(cc->iv_gen_private.essiv_tfm, iv, iv);
223 static int crypt_iv_benbi_ctr(struct crypt_config *cc, struct dm_target *ti,
226 unsigned int bs = crypto_blkcipher_blocksize(cc->tfm);
242 cc->iv_gen_private.benbi_shift = 9 - log;
247 static void crypt_iv_benbi_dtr(struct crypt_config *cc)
251 static int crypt_iv_benbi_gen(struct crypt_config *cc, u8 *iv, sector_t sector)
255 memset(iv, 0, cc->iv_size - sizeof(u64)); /* rest is cleared below */
257 val = cpu_to_be64(((u64)sector << cc->iv_gen_private.benbi_shift) + 1);
258 put_unaligned(val, (__be64 *)(iv + cc->iv_size - sizeof(u64)));
263 static int crypt_iv_null_gen(struct crypt_config *cc, u8 *iv, sector_t sector)
265 memset(iv, 0, cc->iv_size);
291 crypt_convert_scatterlist(struct crypt_config *cc, struct scatterlist *out,
295 u8 iv[cc->iv_size] __attribute__ ((aligned(__alignof__(u64))));
297 .tfm = cc->tfm,
303 if (cc->iv_gen_ops) {
304 r = cc->iv_gen_ops->generator(cc, iv, sector);
323 crypt_convert_init(struct crypt_config *cc, struct convert_context *ctx,
333 ctx->sector = sector + cc->iv_offset;
340 static int crypt_convert(struct crypt_config *cc,
372 r = crypt_convert_scatterlist(cc, &sg_out, &sg_in, sg_in.length,
386 struct crypt_config *cc = io->target->private;
388 bio_free(bio, cc->bs);
398 struct crypt_config *cc = io->target->private;
404 clone = bio_alloc_bioset(GFP_NOIO, nr_iovecs, cc->bs);
413 bv->bv_page = mempool_alloc(cc->page_pool, gfp_mask);
444 static void crypt_free_buffer_pages(struct crypt_config *cc,
473 mempool_free(bv->bv_page, cc->page_pool);
484 struct crypt_config *cc = (struct crypt_config *) io->target->private;
494 mempool_free(io, cc->io_pool);
515 struct crypt_config *cc = io->target->private;
523 crypt_free_buffer_pages(cc, clone, done);
550 struct crypt_config *cc = io->target->private;
554 clone->bi_bdev = cc->dev->bdev;
561 struct crypt_config *cc = io->target->private;
573 clone = bio_alloc_bioset(GFP_NOIO, bio_segments(base_bio), cc->bs);
583 clone->bi_sector = cc->start + sector;
592 struct crypt_config *cc = io->target->private;
601 crypt_convert_init(cc, &ctx, NULL, base_bio, sector, 1);
617 if (unlikely(crypt_convert(cc, &ctx) < 0)) {
618 crypt_free_buffer_pages(cc, clone, clone->bi_size);
627 clone->bi_sector = cc->start + sector;
649 struct crypt_config *cc = io->target->private;
652 crypt_convert_init(cc, &ctx, io->base_bio, io->base_bio,
655 dec_pending(io, crypt_convert(cc, &ctx));
711 static int crypt_set_key(struct crypt_config *cc, char *key)
715 if (cc->key_size && cc->key_size != key_size)
718 cc->key_size = key_size; /* initial settings */
721 (key_size && crypt_decode_key(cc->key, key, key_size) < 0))
724 set_bit(DM_CRYPT_KEY_VALID, &cc->flags);
729 static int crypt_wipe_key(struct crypt_config *cc)
731 clear_bit(DM_CRYPT_KEY_VALID, &cc->flags);
732 memset(&cc->key, 0, cc->key_size * sizeof(u8));
742 struct crypt_config *cc;
768 cc = kzalloc(sizeof(*cc) + key_size * sizeof(u8), GFP_KERNEL);
769 if (cc == NULL) {
775 if (crypt_set_key(cc, argv[1])) {
791 if (snprintf(cc->cipher, CRYPTO_MAX_ALG_NAME, "%s(%s)", chainmode,
797 tfm = crypto_alloc_blkcipher(cc->cipher, 0, CRYPTO_ALG_ASYNC);
803 strcpy(cc->cipher, cipher);
804 strcpy(cc->chainmode, chainmode);
805 cc->tfm = tfm;
813 cc->iv_gen_ops = NULL;
815 cc->iv_gen_ops = &crypt_iv_plain_ops;
817 cc->iv_gen_ops = &crypt_iv_essiv_ops;
819 cc->iv_gen_ops = &crypt_iv_benbi_ops;
821 cc->iv_gen_ops = &crypt_iv_null_ops;
827 if (cc->iv_gen_ops && cc->iv_gen_ops->ctr &&
828 cc->iv_gen_ops->ctr(cc, ti, ivopts) < 0)
831 cc->iv_size = crypto_blkcipher_ivsize(tfm);
832 if (cc->iv_size)
834 cc->iv_size = max(cc->iv_size,
837 if (cc->iv_gen_ops) {
839 if (cc->iv_gen_ops->dtr)
840 cc->iv_gen_ops->dtr(cc);
841 cc->iv_gen_ops = NULL;
845 cc->io_pool = mempool_create_slab_pool(MIN_IOS, _crypt_io_pool);
846 if (!cc->io_pool) {
851 cc->page_pool = mempool_create_page_pool(MIN_POOL_PAGES, 0);
852 if (!cc->page_pool) {
857 cc->bs = bioset_create(MIN_IOS, MIN_IOS);
858 if (!cc->bs) {
863 if (crypto_blkcipher_setkey(tfm, cc->key, key_size) < 0) {
872 cc->iv_offset = tmpll;
878 cc->start = tmpll;
880 if (dm_get_device(ti, argv[3], cc->start, ti->len,
881 dm_table_get_mode(ti->table), &cc->dev)) {
886 if (ivmode && cc->iv_gen_ops) {
889 cc->iv_mode = kmalloc(strlen(ivmode) + 1, GFP_KERNEL);
890 if (!cc->iv_mode) {
894 strcpy(cc->iv_mode, ivmode);
896 cc->iv_mode = NULL;
898 ti->private = cc;
902 bioset_free(cc->bs);
904 mempool_destroy(cc->page_pool);
906 mempool_destroy(cc->io_pool);
908 if (cc->iv_gen_ops && cc->iv_gen_ops->dtr)
909 cc->iv_gen_ops->dtr(cc);
914 memset(cc, 0, sizeof(*cc) + cc->key_size * sizeof(u8));
915 kfree(cc);
921 struct crypt_config *cc = (struct crypt_config *) ti->private;
923 bioset_free(cc->bs);
924 mempool_destroy(cc->page_pool);
925 mempool_destroy(cc->io_pool);
927 kfree(cc->iv_mode);
928 if (cc->iv_gen_ops && cc->iv_gen_ops->dtr)
929 cc->iv_gen_ops->dtr(cc);
930 crypto_free_blkcipher(cc->tfm);
931 dm_put_device(ti, cc->dev);
934 memset(cc, 0, sizeof(*cc) + cc->key_size * sizeof(u8));
935 kfree(cc);
941 struct crypt_config *cc = ti->private;
947 io = mempool_alloc(cc->io_pool, GFP_NOIO);
960 struct crypt_config *cc = (struct crypt_config *) ti->private;
969 if (cc->iv_mode)
970 DMEMIT("%s-%s-%s ", cc->cipher, cc->chainmode,
971 cc->iv_mode);
973 DMEMIT("%s-%s ", cc->cipher, cc->chainmode);
975 if (cc->key_size > 0) {
976 if ((maxlen - sz) < ((cc->key_size << 1) + 1))
979 crypt_encode_key(result + sz, cc->key, cc->key_size);
980 sz += cc->key_size << 1;
987 DMEMIT(" %llu %s %llu", (unsigned long long)cc->iv_offset,
988 cc->dev->name, (unsigned long long)cc->start);
996 struct crypt_config *cc = ti->private;
998 set_bit(DM_CRYPT_SUSPENDED, &cc->flags);
1003 struct crypt_config *cc = ti->private;
1005 if (!test_bit(DM_CRYPT_KEY_VALID, &cc->flags)) {
1015 struct crypt_config *cc = ti->private;
1017 clear_bit(DM_CRYPT_SUSPENDED, &cc->flags);
1026 struct crypt_config *cc = ti->private;
1032 if (!test_bit(DM_CRYPT_SUSPENDED, &cc->flags)) {
1037 return crypt_set_key(cc, argv[2]);
1039 return crypt_wipe_key(cc);