2 * Copyright (c) 2010 The DragonFly Project. All rights reserved.
4 * This code is derived from software contributed to The DragonFly Project
5 * by Alex Hornung <ahornung@gmail.com>
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * 1. Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
17 * 3. Neither the name of The DragonFly Project nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific, prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
24 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
25 * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
26 * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
27 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
29 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
31 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
36 * This file implements initial version of device-mapper crypt target.
38 #include <sys/types.h>
39 #include <sys/param.h>
43 #include <sys/malloc.h>
45 #include <sys/vnode.h>
46 #include <crypto/sha1.h>
47 #include <crypto/sha2/sha2.h>
48 #include <opencrypto/cryptodev.h>
49 #include <opencrypto/rmd160.h>
52 MALLOC_DEFINE(M_DMCRYPT, "dm_crypt", "Device Mapper Target Crypt");
54 struct target_crypt_config;
55 typedef void ivgen_t(struct target_crypt_config *, u_int8_t *, size_t, off_t);
57 typedef struct target_crypt_config {
63 u_int8_t crypto_key[512>>3];
64 u_int8_t crypto_keyhash[SHA512_DIGEST_LENGTH];
66 u_int64_t block_offset;
68 SHA512_CTX essivsha512_ctx;
69 struct cryptoini crypto_session;
70 ivgen_t *crypto_ivgen;
72 } dm_target_crypt_config_t;
79 static void dm_target_crypt_work(dm_target_crypt_config_t *priv, struct bio *bio);
80 static void dm_target_crypt_read_done(struct bio *bio);
81 static void dm_target_crypt_write_done(struct bio *bio);
82 static int dm_target_crypt_crypto_done_read(struct cryptop *crp);
83 static int dm_target_crypt_crypto_done_write(struct cryptop *crp);
88 essiv_hash_mkey(dm_target_crypt_config_t *priv, char *iv_hash)
92 klen = (priv->crypto_klen >> 3);
97 if (!strcmp(iv_hash, "sha1")) {
100 if (klen != SHA1_RESULTLEN)
104 SHA1Update(&ctx, priv->crypto_key, priv->crypto_klen>>3);
105 SHA1Final(priv->crypto_keyhash, &ctx);
106 } else if (!strcmp(iv_hash, "sha256")) {
109 if (klen != SHA256_DIGEST_LENGTH)
113 SHA256_Update(&ctx, priv->crypto_key, priv->crypto_klen>>3);
114 SHA256_Final(priv->crypto_keyhash, &ctx);
115 } else if (!strcmp(iv_hash, "sha384")) {
118 if (klen != SHA384_DIGEST_LENGTH)
122 SHA384_Update(&ctx, priv->crypto_key, priv->crypto_klen>>3);
123 SHA384_Final(priv->crypto_keyhash, &ctx);
124 } else if (!strcmp(iv_hash, "sha512")) {
127 if (klen != SHA512_DIGEST_LENGTH)
131 SHA512_Update(&ctx, priv->crypto_key, priv->crypto_klen>>3);
132 SHA512_Final(priv->crypto_keyhash, &ctx);
133 } else if (!strcmp(iv_hash, "md5")) {
136 if (klen != MD5_DIGEST_LENGTH)
140 MD5Update(&ctx, priv->crypto_key, priv->crypto_klen>>3);
141 MD5Final(priv->crypto_keyhash, &ctx);
142 } else if (!strcmp(iv_hash, "rmd160") || !strcmp(iv_hash, "ripemd160")) {
149 RMD160Update(&ctx, priv->crypto_key, priv->crypto_klen>>3);
150 RMD160Final(priv->crypto_keyhash, &ctx);
159 essiv_ivgen_done(struct cryptop *crp)
162 if (crp->crp_etype == EAGAIN)
163 return crypto_dispatch(crp);
165 if (crp->crp_etype != 0) {
166 kprintf("essiv_ivgen_done, crp->crp_etype = %d\n", crp->crp_etype);
169 atomic_add_int((int *)crp->crp_opaque, 1);
170 wakeup(crp->crp_opaque);
175 plain_ivgen(dm_target_crypt_config_t *priv, u_int8_t *iv, size_t iv_len, off_t sector)
178 *((off_t *)iv) = sector + priv->iv_offset;
182 essiv_ivgen(dm_target_crypt_config_t *priv, u_int8_t *iv, size_t iv_len, off_t sector)
184 struct cryptodesc crd;
190 *((off_t *)iv) = sector + priv->iv_offset;
191 crp.crp_buf = (caddr_t)iv;
193 crp.crp_sid = priv->crypto_sid;
194 crp.crp_ilen = crp.crp_olen = iv_len;
196 crp.crp_opaque = (void *)&id;
198 crp.crp_callback = essiv_ivgen_done;
202 crp.crp_flags = CRYPTO_F_CBIFSYNC | CRYPTO_F_REL;
204 crd.crd_alg = priv->crypto_alg;
205 crd.crd_key = (caddr_t)priv->crypto_keyhash;
206 crd.crd_klen = priv->crypto_klen;
208 bzero(crd.crd_iv, sizeof(crd.crd_iv));
211 crd.crd_len = iv_len;
212 crd.crd_flags = CRD_F_KEY_EXPLICIT | CRD_F_IV_EXPLICIT | CRD_F_IV_PRESENT;
213 crd.crd_flags |= CRD_F_ENCRYPT;
216 error = crypto_dispatch(&crp);
218 kprintf("essiv_ivgen, error = %d\n", error);
221 * id is modified in the callback, so that if crypto_dispatch finishes
222 * synchronously we don't tsleep() forever.
225 tsleep((void *)&error, 0, "essivgen", 0);
230 geli_ivgen(dm_target_crypt_config_t *priv, u_int8_t *iv, size_t iv_len, off_t sector)
234 u_int8_t md[SHA512_DIGEST_LENGTH]; /* Max. Digest Size */
236 memcpy(&ctx512, &priv->essivsha512_ctx, sizeof(SHA512_CTX));
237 SHA512_Update(&ctx512, (u_int8_t*)§or, sizeof(off_t));
238 SHA512_Final(md, &ctx512);
240 memcpy(iv, md, iv_len);
245 dm_target_crypt_work(dm_target_crypt_config_t *priv, struct bio *bio)
247 struct dmtc_helper *dmtc;
248 struct cryptodesc *crd;
250 struct cryptoini *cri;
252 int error, i, bytes, isector, sectors, write, sz;
253 u_char *ptr, *space, *data;
255 cri = &priv->crypto_session;
257 write = (bio->bio_buf->b_cmd == BUF_CMD_WRITE) ? 1 : 0;
258 bytes = bio->bio_buf->b_bcount; /* XXX: b_resid no good after reads... == 0 */
259 isector = bio->bio_offset/DEV_BSIZE; /* Initial sector */
260 sectors = bytes/DEV_BSIZE; /* Number of sectors affected by bio */
261 sz = sectors * (sizeof(*crp) + sizeof(*crd));
264 space = kmalloc(sizeof(struct dmtc_helper) + sz + bytes, M_DMCRYPT, M_WAITOK);
265 dmtc = (struct dmtc_helper *)space;
266 dmtc->free_addr = space;
267 dmtc->orig_buf = bio->bio_buf->b_data;
268 space += sizeof(struct dmtc_helper);
269 memcpy(space + sz, bio->bio_buf->b_data, bytes);
270 bio->bio_caller_info2.ptr = dmtc;
271 bio->bio_buf->b_data = data = space + sz;
273 space = kmalloc(sz, M_DMCRYPT, M_WAITOK);
274 data = bio->bio_buf->b_data;
275 bio->bio_caller_info2.ptr = space;
279 bio->bio_caller_info3.value = sectors;
280 kprintf("Write? %d, bytes = %d (b_bcount), sectors = %d (bio = %p, b_cmd = %d)\n", write, bytes, sectors, bio, bio->bio_buf->b_cmd);
282 for (i = 0; i < sectors; i++) {
283 crp = (struct cryptop *)ptr;
285 crd = (struct cryptodesc *)ptr;
286 ptr += sizeof (*crd);
288 crp->crp_buf = (data + i*DEV_BSIZE);
290 crp->crp_sid = priv->crypto_sid;
291 crp->crp_ilen = crp->crp_olen = DEV_BSIZE;
293 crp->crp_opaque = (void *)bio;
296 crp->crp_callback = dm_target_crypt_crypto_done_write;
298 crp->crp_callback = dm_target_crypt_crypto_done_read;
301 crp->crp_flags = CRYPTO_F_CBIFSYNC | CRYPTO_F_REL;
303 crd->crd_alg = priv->crypto_alg;
304 crd->crd_key = (caddr_t)priv->crypto_key;
305 crd->crd_klen = priv->crypto_klen;
307 priv->crypto_ivgen(priv, crd->crd_iv, sizeof(crd->crd_iv), isector + i);
310 crd->crd_len = DEV_BSIZE /* XXX */;
311 crd->crd_flags = CRD_F_KEY_EXPLICIT | CRD_F_IV_EXPLICIT | CRD_F_IV_PRESENT;
312 crd->crd_next = NULL;
315 crd->crd_flags |= CRD_F_ENCRYPT;
317 crd->crd_flags &= ~CRD_F_ENCRYPT;
319 error = crypto_dispatch(crp);
324 dm_target_crypt_read_done(struct bio *bio)
326 dm_target_crypt_config_t *priv;
328 priv = bio->bio_caller_info1.ptr;
329 kprintf("dm_target_crypt_read_done %p\n", bio);
331 dm_target_crypt_work(priv, bio);
335 dm_target_crypt_write_done(struct bio *bio)
337 struct dmtc_helper *dmtc;
340 kprintf("dm_target_crypt_write_done %p\n", bio);
341 dmtc = bio->bio_caller_info2.ptr;
342 bio->bio_buf->b_data = dmtc->orig_buf;
343 kfree(dmtc->free_addr, M_DMCRYPT);
349 dm_target_crypt_crypto_done_read(struct cryptop *crp)
351 struct bio *bio, *obio;
354 if (crp->crp_etype == EAGAIN)
355 return crypto_dispatch(crp);
357 bio = (struct bio *)crp->crp_opaque;
358 KKASSERT(bio != NULL);
360 n = atomic_fetchadd_int(&bio->bio_caller_info3.value, -1);
362 kprintf("dm_target_crypt_crypto_done_read %p, n = %d\n", bio, n);
364 if (crp->crp_etype != 0) {
365 kprintf("dm_target_crypt_crypto_done_read crp_etype = %d\n", crp->crp_etype);
366 /* XXX: Print something out */
367 bio->bio_buf->b_error = crp->crp_etype;
370 kprintf("dm_target_crypt_crypt_done_read: n == 1\n");
371 kfree(bio->bio_caller_info2.ptr, M_DMCRYPT);
372 /* This is the last chunk of the read */
381 dm_target_crypt_crypto_done_write(struct cryptop *crp)
383 struct dmtc_helper *dmtc;
384 dm_target_crypt_config_t *priv;
385 struct bio *bio, *obio;
388 if (crp->crp_etype == EAGAIN)
389 return crypto_dispatch(crp);
391 bio = (struct bio *)crp->crp_opaque;
392 KKASSERT(bio != NULL);
394 n = atomic_fetchadd_int(&bio->bio_caller_info3.value, -1);
396 kprintf("dm_target_crypt_crypto_done_write %p, n = %d\n", bio, n);
398 if (crp->crp_etype != 0) {
399 kprintf("dm_target_crypt_crypto_done_write crp_etype = %d\n", crp->crp_etype);
400 /* XXX: Print something out */
401 bio->bio_buf->b_error = crp->crp_etype;
404 kprintf("dm_target_crypt_crypt_done_write: n == 1\n");
405 /* This is the last chunk of the write */
406 if (bio->bio_buf->b_error != 0) {
408 dmtc = bio->bio_caller_info2.ptr;
409 bio->bio_buf->b_data = dmtc->orig_buf;
410 kfree(dmtc->free_addr, M_DMCRYPT);
414 priv = (dm_target_crypt_config_t *)bio->bio_caller_info1.ptr;
415 vn_strategy(priv->pdev->pdev_vnode, bio);
423 strategy -> read_done -> crypto_work -> crypto_done_read -> FINISH READ
424 strategy -> crypto_work -> crypto_done_write -> write dispatch -> write_done -> FINISH WRITE
427 #ifdef DM_TARGET_MODULE
429 * Every target can be compiled directly to dm driver or as a
430 * separate module this part of target is used for loading targets
432 * Target can be unloaded from kernel only if there are no users of
433 * it e.g. there are no devices which uses that target.
435 #include <sys/kernel.h>
436 #include <sys/module.h>
439 dm_target_crypt_modcmd(modcmd_t cmd, void *arg)
446 case MODULE_CMD_INIT:
447 if ((dmt = dm_target_lookup("crypt")) != NULL) {
448 dm_target_unbusy(dmt);
451 dmt = dm_target_alloc("crypt");
456 strlcpy(dmt->name, "crypt", DM_MAX_TYPE_NAME);
457 dmt->init = &dm_target_crypt_init;
458 dmt->status = &dm_target_crypt_status;
459 dmt->strategy = &dm_target_crypt_strategy;
460 dmt->deps = &dm_target_crypt_deps;
461 dmt->destroy = &dm_target_crypt_destroy;
462 dmt->upcall = &dm_target_crypt_upcall;
464 r = dm_target_insert(dmt);
468 case MODULE_CMD_FINI:
469 r = dm_target_rem("crypt");
472 case MODULE_CMD_STAT:
484 * Init function called from dm_table_load_ioctl.
486 * <device> <crypto algorithm>[-<keysize>] <iv generator> <passphrase>
487 * /dev/foo aes-256 essiv foobar
488 * cryptsetup actually passes us this:
489 * aes-cbc-essiv:sha256 7997f8af... 0 /dev/ad0s0a 8
493 hex2key(char *hex, size_t hex_length, u_int8_t *key)
499 bzero(hex_buf, sizeof(hex_buf));
501 for (; hex_length > 0; hex_length -= 2) {
504 key[key_idx++] = (u_int8_t)strtoul(hex_buf, NULL, 16);
511 dm_target_crypt_init(dm_dev_t * dmv, void **target_config, char *params)
513 dm_target_crypt_config_t *priv;
516 char *crypto_alg, *crypto_mode, *iv_mode, *iv_opt, *key, *dev;
518 int argc, klen, error;
519 uint64_t iv_offset, block_offset;
524 len = strlen(params) + 1;
527 status_str = kstrdup(params, M_DMCRYPT);
529 * Parse a string, containing tokens delimited by white space,
530 * into an argument vector
532 for (ap = args; ap < &args[5] &&
533 (*ap = strsep(¶ms, " \t")) != NULL;) {
540 kprintf("\nCrypto target init function called, argc = %d!!\n", argc);
542 kprintf("not enough arguments for target crypt, need exactly 5\n");
543 kfree(status_str, M_DMCRYPT);
544 return ENOMEM; /* XXX */
547 crypto_alg = strsep(&args[0], "-");
548 crypto_mode = strsep(&args[0], "-");
549 iv_opt = strsep(&args[0], "-");
550 iv_mode = strsep(&iv_opt, ":");
552 iv_offset = strtouq(args[2], NULL, 0);
554 block_offset = strtouq(args[4], NULL, 0);
555 /* bits / 8 = bytes, 1 byte = 2 hexa chars, so << 2 */
556 klen = strlen(key) << 2;
558 kprintf("crypto target - dev=%s, crypto_alg=%s, crypto_mode=%s, "
559 "iv_mode=%s, iv_opt=%s, key=%s, iv_offset=%ju, block_offset=%ju\n",
560 dev, crypto_alg, crypto_mode, iv_mode, iv_opt, key, iv_offset,
563 if ((priv = kmalloc(sizeof(dm_target_crypt_config_t), M_DMCRYPT, M_NOWAIT))
565 kprintf("kmalloc in dm_target_crypt_init failed, M_NOWAIT to blame\n");
566 kfree(status_str, M_DMCRYPT);
570 /* Insert dmp to global pdev list */
571 if ((priv->pdev = dm_pdev_insert(dev)) == NULL) {
572 kprintf("dm_pdev_insert failed\n");
573 kfree(status_str, M_DMCRYPT);
577 if (strcmp(crypto_mode, "cbc") != 0) {
578 kprintf("dm_target_crypt: only support 'cbc' chaining mode, invalid mode '%s'\n", crypto_mode);
582 if (!strcmp(crypto_alg, "aes")) {
583 priv->crypto_alg = CRYPTO_AES_CBC;
584 if (klen != 128 && klen != 192 && klen != 256)
586 priv->crypto_klen = klen;
588 } else if (!strcmp(crypto_alg, "blowfish")) {
589 priv->crypto_alg = CRYPTO_BLF_CBC;
590 if (klen < 128 || klen > 448 || (klen % 8) != 0)
592 priv->crypto_klen = klen;
594 } else if (!strcmp(crypto_alg, "3des") || !strncmp(crypto_alg, "des3", 4)) {
595 priv->crypto_alg = CRYPTO_3DES_CBC;
598 priv->crypto_klen = 168;
600 } else if (!strcmp(crypto_alg, "camellia")) {
601 priv->crypto_alg = CRYPTO_CAMELLIA_CBC;
602 if (klen != 128 && klen != 192 && klen != 256)
604 priv->crypto_klen = klen;
606 } else if (!strcmp(crypto_alg, "skipjack")) {
607 priv->crypto_alg = CRYPTO_SKIPJACK_CBC;
610 priv->crypto_klen = 80;
612 } else if (!strcmp(crypto_alg, "cast5")) {
613 priv->crypto_alg = CRYPTO_CAST_CBC;
616 priv->crypto_klen = 128;
618 } else if (!strcmp(crypto_alg, "null")) {
619 priv->crypto_alg = CRYPTO_NULL_CBC;
622 priv->crypto_klen = 128;
625 kprintf("Unsupported crypto algorithm: %s\n", crypto_alg);
629 /* Save length of param string */
630 priv->params_len = len;
631 priv->block_offset = block_offset;
632 priv->iv_offset = iv_offset;
634 *target_config = priv;
636 dmv->dev_type = DM_CRYPTO_DEV;
638 priv->crypto_session.cri_alg = priv->crypto_alg;
639 priv->crypto_session.cri_klen = priv->crypto_klen;
640 priv->crypto_session.cri_mlen = 0;
642 error = hex2key(key, priv->crypto_klen >> 3, (u_int8_t *)priv->crypto_key);
644 kprintf("hex2key failed!!\n");
648 kprintf("priv->crypto_klen >> 3 = %d\n", priv->crypto_klen >> 3);
651 if (!strcmp(iv_mode, "essiv")) {
652 error = essiv_hash_mkey(priv, iv_opt);
654 kprintf("essiv_hash_mkey returned error!\n");
657 priv->crypto_ivgen = essiv_ivgen;
658 } else if (!strcmp(iv_mode, "plain")) {
659 priv->crypto_ivgen = plain_ivgen;
661 kprintf("dm_target_crypt: only support iv_mode='essiv' and 'plain', iv_mode='%s' unsupported\n", iv_mode);
664 priv->crypto_session.cri_key = (u_int8_t *)priv->crypto_key;
665 priv->crypto_session.cri_next = NULL;
667 error = crypto_newsession(&priv->crypto_sid,
668 &priv->crypto_session,
669 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_HARDWARE);
671 kprintf("Error during crypto_newsession, error = %d\n", error);
675 priv->status_str = status_str;
679 kprintf("returning ENOTSUP from crypt_init thingie... notsup label\n");
680 kfree(status_str, M_DMCRYPT);
684 /* Status routine called to get params string. */
686 dm_target_crypt_status(void *target_config)
688 dm_target_crypt_config_t *priv;
691 priv = target_config;
693 if ((params = kmalloc(DM_MAX_PARAMS_SIZE, M_DMCRYPT, M_WAITOK)) == NULL)
696 ksnprintf(params, DM_MAX_PARAMS_SIZE, "%s",
702 /* Strategy routine called from dm_strategy. */
704 * Do IO operation, called from dmstrategy routine.
707 dm_target_crypt_strategy(dm_table_entry_t * table_en, struct buf * bp)
711 dm_target_crypt_config_t *priv;
712 priv = table_en->target_config;
714 /* Get rid of stuff we can't really handle */
715 if ((bp->b_cmd == BUF_CMD_READ) || (bp->b_cmd == BUF_CMD_WRITE)) {
716 if (((bp->b_bcount % DEV_BSIZE) != 0) || (bp->b_bcount == 0)) {
717 kprintf("dm_target_crypt_strategy: can't really handle bp->b_bcount = %d\n", bp->b_bcount);
718 bp->b_error = EINVAL;
719 bp->b_flags |= B_ERROR | B_INVAL;
720 biodone(&bp->b_bio1);
727 bio = push_bio(&bp->b_bio1);
728 bio->bio_offset = bp->b_bio1.bio_offset + priv->block_offset*DEV_BSIZE;
729 bio->bio_caller_info1.ptr = priv;
730 bio->bio_done = dm_target_crypt_read_done;
731 vn_strategy(priv->pdev->pdev_vnode, bio);
735 bio = push_bio(&bp->b_bio1);
736 bio->bio_offset = bp->b_bio1.bio_offset + priv->block_offset*DEV_BSIZE;
737 bio->bio_caller_info1.ptr = priv;
738 bio->bio_done = dm_target_crypt_write_done;
739 dm_target_crypt_work(priv, bio);
743 vn_strategy(priv->pdev->pdev_vnode, &bp->b_bio1);
751 dm_target_crypt_destroy(dm_table_entry_t * table_en)
753 dm_target_crypt_config_t *priv;
755 priv = table_en->target_config;
760 dm_pdev_decr(priv->pdev);
762 /* Unbusy target so we can unload it */
763 dm_target_unbusy(table_en->target);
765 kfree(priv->status_str, M_DMCRYPT);
766 kfree(priv, M_DMCRYPT);
768 table_en->target_config = NULL;
774 dm_target_crypt_deps(dm_table_entry_t * table_en, prop_array_t prop_array)
776 dm_target_crypt_config_t *priv;
781 if (table_en->target_config == NULL)
784 priv = table_en->target_config;
786 if ((error = VOP_GETATTR(priv->pdev->pdev_vnode, &va)) != 0)
789 prop_array_add_uint64(prop_array, (uint64_t) makeudev(va.va_rmajor, va.va_rminor));
794 /* Unsupported for this target. */
796 dm_target_crypt_upcall(dm_table_entry_t * table_en, struct buf * bp)