crypto - Update crypto for AES XTS and CTR
authorAlex Hornung <ahornung@gmail.com>
Sat, 21 Aug 2010 10:51:38 +0000 (11:51 +0100)
committerAlex Hornung <ahornung@gmail.com>
Sat, 21 Aug 2010 13:12:43 +0000 (14:12 +0100)
* Sync a bit up with OpenBSD, adding 'reinit' and 'ivsize' to the xform
  structure.

* Add support for AES-XTS and AES-CTR.

Obtained-from: OpenBSD

sys/opencrypto/cryptodev.c
sys/opencrypto/cryptodev.h
sys/opencrypto/cryptosoft.c
sys/opencrypto/xform.c
sys/opencrypto/xform.h

index e13fab3..beb9e99 100644 (file)
@@ -185,6 +185,12 @@ cryptof_ioctl(struct file *fp, u_long cmd, caddr_t data,
                case CRYPTO_AES_CBC:
                        txform = &enc_xform_rijndael128;
                        break;
+               case CRYPTO_AES_XTS:
+                       txform = &enc_xform_aes_xts;
+                       break;
+               case CRYPTO_AES_CTR:
+                       txform = &enc_xform_aes_ctr;
+                       break;          
                case CRYPTO_NULL_CBC:
                        txform = &enc_xform_null;
                        break;
index 75d4273..7da7e67 100644 (file)
 #define CAST128_BLOCK_LEN      8
 #define RIJNDAEL128_BLOCK_LEN  16
 #define AES_BLOCK_LEN          RIJNDAEL128_BLOCK_LEN
+#define AES_XTS_BLOCK_LEN      16
+#define AES_XTS_IV_LEN         8
+#define AESCTR_IV_LEN          8
+#define AESCTR_BLOCK_LEN       16
 #define CAMELLIA_BLOCK_LEN     16
 #define EALG_MAX_BLOCK_LEN     AES_BLOCK_LEN /* Keep this updated */
 
 #define        CRYPTO_SHA2_384_HMAC    19
 #define        CRYPTO_SHA2_512_HMAC    20
 #define CRYPTO_CAMELLIA_CBC    21
-#define        CRYPTO_ALGORITHM_MAX    21 /* Keep updated - see below */
+#define CRYPTO_AES_XTS         22
+#define CRYPTO_AES_CTR          23
+#define        CRYPTO_ALGORITHM_MAX    23 /* Keep updated - see below */
 
 /* Algorithm flags */
 #define        CRYPTO_ALG_FLAG_SUPPORTED       0x01 /* Algorithm is supported */
index ece8766..6b8f7c3 100644 (file)
@@ -80,12 +80,13 @@ swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
        u_int8_t *kschedule;
        u_int8_t *okschedule;
        struct enc_xform *exf;
-       int i, k, j, blks;
+       int i, k, j, blks, ivlen;
        int error;
        int explicit_kschedule;
 
        exf = sw->sw_exf;
        blks = exf->blocksize;
+       ivlen = exf->ivsize;
 
        /* Check for non-padded data */
        if (crd->crd_len % blks)
@@ -95,24 +96,33 @@ swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
        if (crd->crd_flags & CRD_F_ENCRYPT) {
                /* IV explicitly provided ? */
                if (crd->crd_flags & CRD_F_IV_EXPLICIT)
-                       bcopy(crd->crd_iv, iv, blks);
+                       bcopy(crd->crd_iv, iv, ivlen);
                else
-                       karc4rand(iv, blks);
+                       karc4rand(iv, ivlen);
 
                /* Do we need to write the IV */
                if (!(crd->crd_flags & CRD_F_IV_PRESENT))
-                       crypto_copyback(flags, buf, crd->crd_inject, blks, iv);
+                       crypto_copyback(flags, buf, crd->crd_inject, ivlen, iv);
 
        } else {        /* Decryption */
                        /* IV explicitly provided ? */
                if (crd->crd_flags & CRD_F_IV_EXPLICIT)
-                       bcopy(crd->crd_iv, iv, blks);
+                       bcopy(crd->crd_iv, iv, ivlen);
                else {
                        /* Get IV off buf */
-                       crypto_copydata(flags, buf, crd->crd_inject, blks, iv);
+                       crypto_copydata(flags, buf, crd->crd_inject, ivlen, iv);
                }
        }
 
+       ivp = iv;
+
+       /*
+        * xforms that provide a reinit method perform all IV
+        * handling themselves.
+        */
+       if (exf->reinit)
+               exf->reinit(sw->sw_kschedule, iv);
+
        /*
         * The semantics are seriously broken because the session key
         * storage was never designed for concurrent ops.
@@ -132,8 +142,6 @@ swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
                explicit_kschedule = 0;
        }
 
-       ivp = iv;
-
        if (flags & CRYPTO_F_IMBUF) {
                struct mbuf *m = (struct mbuf *) buf;
 
@@ -155,7 +163,15 @@ swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
                                m_copydata(m, k, blks, blk);
 
                                /* Actual encryption/decryption */
-                               if (crd->crd_flags & CRD_F_ENCRYPT) {
+                               if (exf->reinit) {
+                                       if (crd->crd_flags & CRD_F_ENCRYPT) {
+                                               exf->encrypt(kschedule,
+                                                   blk);
+                                       } else {
+                                               exf->decrypt(kschedule,
+                                                   blk);
+                                       }
+                               } else if (crd->crd_flags & CRD_F_ENCRYPT) {
                                        /* XOR with previous block */
                                        for (j = 0; j < blks; j++)
                                                blk[j] ^= ivp[j];
@@ -229,7 +245,15 @@ swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
                        idat = mtod(m, unsigned char *) + k;
 
                        while (m->m_len >= k + blks && i > 0) {
-                               if (crd->crd_flags & CRD_F_ENCRYPT) {
+                               if (exf->reinit) {
+                                       if (crd->crd_flags & CRD_F_ENCRYPT) {
+                                               exf->encrypt(kschedule,
+                                                   idat);
+                                       } else {
+                                               exf->decrypt(kschedule,
+                                                   idat);
+                                       }
+                               } else if (crd->crd_flags & CRD_F_ENCRYPT) {
                                        /* XOR with previous block/IV */
                                        for (j = 0; j < blks; j++)
                                                idat[j] ^= ivp[j];
@@ -286,7 +310,15 @@ swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
                                cuio_copydata(uio, k, blks, blk);
 
                                /* Actual encryption/decryption */
-                               if (crd->crd_flags & CRD_F_ENCRYPT) {
+                               if (exf->reinit) {
+                                       if (crd->crd_flags & CRD_F_ENCRYPT) {
+                                               exf->encrypt(kschedule,
+                                                   blk);
+                                       } else {
+                                               exf->decrypt(kschedule,
+                                                   blk);
+                                       }
+                               } else if (crd->crd_flags & CRD_F_ENCRYPT) {
                                        /* XOR with previous block */
                                        for (j = 0; j < blks; j++)
                                                blk[j] ^= ivp[j];
@@ -346,7 +378,15 @@ swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
                        idat = (char *)iov->iov_base + k;
 
                        while (iov->iov_len >= k + blks && i > 0) {
-                               if (crd->crd_flags & CRD_F_ENCRYPT) {
+                               if (exf->reinit) {
+                                       if (crd->crd_flags & CRD_F_ENCRYPT) {
+                                               exf->encrypt(kschedule,
+                                                   idat);
+                                       } else {
+                                               exf->decrypt(kschedule,
+                                                   idat);
+                                       }
+                               } else if (crd->crd_flags & CRD_F_ENCRYPT) {
                                        /* XOR with previous block/IV */
                                        for (j = 0; j < blks; j++)
                                                idat[j] ^= ivp[j];
@@ -681,6 +721,12 @@ swcr_newsession(device_t dev, u_int32_t *sid, struct cryptoini *cri)
                case CRYPTO_RIJNDAEL128_CBC:
                        txf = &enc_xform_rijndael128;
                        goto enccommon;
+               case CRYPTO_AES_XTS:
+                       txf = &enc_xform_aes_xts;
+                       goto enccommon;
+               case CRYPTO_AES_CTR:
+                       txf = &enc_xform_aes_ctr;
+                       goto enccommon;
                case CRYPTO_CAMELLIA_CBC:
                        txf = &enc_xform_camellia;
                        goto enccommon;
@@ -915,6 +961,8 @@ swcr_freesession_slot(struct swcr_data **swdp, u_int32_t sid)
                case CRYPTO_CAST_CBC:
                case CRYPTO_SKIPJACK_CBC:
                case CRYPTO_RIJNDAEL128_CBC:
+               case CRYPTO_AES_XTS:
+               case CRYPTO_AES_CTR:
                case CRYPTO_CAMELLIA_CBC:
                case CRYPTO_NULL_CBC:
                        txf = swd->sw_exf;
@@ -1029,6 +1077,8 @@ swcr_process(device_t dev, struct cryptop *crp, int hint)
                case CRYPTO_CAST_CBC:
                case CRYPTO_SKIPJACK_CBC:
                case CRYPTO_RIJNDAEL128_CBC:
+               case CRYPTO_AES_XTS:
+               case CRYPTO_AES_CTR:
                case CRYPTO_CAMELLIA_CBC:
                        if ((crp->crp_etype = swcr_encdec(crd, sw,
                            crp->crp_buf, crp->crp_flags)) != 0)
@@ -1123,6 +1173,8 @@ swcr_attach(device_t dev)
        REGISTER(CRYPTO_MD5);
        REGISTER(CRYPTO_SHA1);
        REGISTER(CRYPTO_RIJNDAEL128_CBC);
+       REGISTER(CRYPTO_AES_XTS);
+       REGISTER(CRYPTO_AES_CTR);
        REGISTER(CRYPTO_CAMELLIA_CBC);
        REGISTER(CRYPTO_DEFLATE_COMP);
 #undef REGISTER
index 6620a12..67d406e 100644 (file)
@@ -73,6 +73,8 @@ static        int blf_setkey(u_int8_t **, u_int8_t *, int);
 static int cast5_setkey(u_int8_t **, u_int8_t *, int);
 static int skipjack_setkey(u_int8_t **, u_int8_t *, int);
 static int rijndael128_setkey(u_int8_t **, u_int8_t *, int);
+static int aes_xts_setkey(u_int8_t **, u_int8_t *, int);
+static int aes_ctr_setkey(u_int8_t **, u_int8_t *, int);
 static int cml_setkey(u_int8_t **, u_int8_t *, int);
 static void des1_encrypt(caddr_t, u_int8_t *);
 static void des3_encrypt(caddr_t, u_int8_t *);
@@ -80,6 +82,7 @@ static        void blf_encrypt(caddr_t, u_int8_t *);
 static void cast5_encrypt(caddr_t, u_int8_t *);
 static void skipjack_encrypt(caddr_t, u_int8_t *);
 static void rijndael128_encrypt(caddr_t, u_int8_t *);
+static void aes_xts_encrypt(caddr_t, u_int8_t *);
 static void cml_encrypt(caddr_t, u_int8_t *);
 static void des1_decrypt(caddr_t, u_int8_t *);
 static void des3_decrypt(caddr_t, u_int8_t *);
@@ -87,6 +90,7 @@ static        void blf_decrypt(caddr_t, u_int8_t *);
 static void cast5_decrypt(caddr_t, u_int8_t *);
 static void skipjack_decrypt(caddr_t, u_int8_t *);
 static void rijndael128_decrypt(caddr_t, u_int8_t *);
+static void aes_xts_decrypt(caddr_t, u_int8_t *);
 static void cml_decrypt(caddr_t, u_int8_t *);
 static void des1_zerokey(u_int8_t **);
 static void des3_zerokey(u_int8_t **);
@@ -94,8 +98,15 @@ static       void blf_zerokey(u_int8_t **);
 static void cast5_zerokey(u_int8_t **);
 static void skipjack_zerokey(u_int8_t **);
 static void rijndael128_zerokey(u_int8_t **);
+static void aes_xts_zerokey(u_int8_t **);
+static void aes_ctr_zerokey(u_int8_t **);
 static void cml_zerokey(u_int8_t **);
 
+static void aes_ctr_crypt(caddr_t, u_int8_t *);
+
+static void aes_ctr_reinit(caddr_t, u_int8_t *);
+static void aes_xts_reinit(caddr_t, u_int8_t *);
+
 static void null_init(void *);
 static int null_update(void *, u_int8_t *, u_int16_t);
 static void null_final(u_int8_t *, void *);
@@ -111,89 +122,122 @@ static   int SHA512Update_int(void *, u_int8_t *, u_int16_t);
 static u_int32_t deflate_compress(u_int8_t *, u_int32_t, u_int8_t **);
 static u_int32_t deflate_decompress(u_int8_t *, u_int32_t, u_int8_t **);
 
+/* Helper */
+struct aes_xts_ctx;
+static void aes_xts_crypt(struct aes_xts_ctx *, u_int8_t *, u_int);
+
 MALLOC_DEFINE(M_XDATA, "xform", "xform data buffers");
 
 /* Encryption instances */
 struct enc_xform enc_xform_null = {
        CRYPTO_NULL_CBC, "NULL",
        /* NB: blocksize of 4 is to generate a properly aligned ESP header */
-       NULL_BLOCK_LEN, 0, 256, /* 2048 bits, max key */
+       NULL_BLOCK_LEN, NULL_BLOCK_LEN, 0, 256, /* 2048 bits, max key */
        null_encrypt,
        null_decrypt,
        null_setkey,
        null_zerokey,
+       NULL
 };
 
 struct enc_xform enc_xform_des = {
        CRYPTO_DES_CBC, "DES",
-       DES_BLOCK_LEN, 8, 8,
+       DES_BLOCK_LEN, DES_BLOCK_LEN, 8, 8,
        des1_encrypt,
        des1_decrypt,
        des1_setkey,
        des1_zerokey,
+       NULL
 };
 
 struct enc_xform enc_xform_3des = {
        CRYPTO_3DES_CBC, "3DES",
-       DES3_BLOCK_LEN, 24, 24,
+       DES3_BLOCK_LEN, DES3_BLOCK_LEN, 24, 24,
        des3_encrypt,
        des3_decrypt,
        des3_setkey,
-       des3_zerokey
+       des3_zerokey,
+       NULL
 };
 
 struct enc_xform enc_xform_blf = {
        CRYPTO_BLF_CBC, "Blowfish",
-       BLOWFISH_BLOCK_LEN, 5, 56 /* 448 bits, max key */,
+       BLOWFISH_BLOCK_LEN, BLOWFISH_BLOCK_LEN, 5, 56 /* 448 bits, max key */,
        blf_encrypt,
        blf_decrypt,
        blf_setkey,
-       blf_zerokey
+       blf_zerokey,
+       NULL
 };
 
 struct enc_xform enc_xform_cast5 = {
        CRYPTO_CAST_CBC, "CAST-128",
-       CAST128_BLOCK_LEN, 5, 16,
+       CAST128_BLOCK_LEN, CAST128_BLOCK_LEN, 5, 16,
        cast5_encrypt,
        cast5_decrypt,
        cast5_setkey,
-       cast5_zerokey
+       cast5_zerokey,
+       NULL
 };
 
 struct enc_xform enc_xform_skipjack = {
        CRYPTO_SKIPJACK_CBC, "Skipjack",
-       SKIPJACK_BLOCK_LEN, 10, 10,
+       SKIPJACK_BLOCK_LEN, SKIPJACK_BLOCK_LEN, 10, 10,
        skipjack_encrypt,
        skipjack_decrypt,
        skipjack_setkey,
-       skipjack_zerokey
+       skipjack_zerokey,
+       NULL
 };
 
 struct enc_xform enc_xform_rijndael128 = {
        CRYPTO_RIJNDAEL128_CBC, "Rijndael-128/AES",
-       RIJNDAEL128_BLOCK_LEN, 8, 32,
+       RIJNDAEL128_BLOCK_LEN, RIJNDAEL128_BLOCK_LEN, 8, 32,
        rijndael128_encrypt,
        rijndael128_decrypt,
        rijndael128_setkey,
        rijndael128_zerokey,
+       NULL
+};
+
+struct enc_xform enc_xform_aes_xts = {
+       CRYPTO_AES_XTS, "AES-XTS",
+       AES_XTS_BLOCK_LEN, AES_XTS_IV_LEN, 32, 64,
+       aes_xts_encrypt,
+       aes_xts_decrypt,
+       aes_xts_setkey,
+       aes_xts_zerokey,
+       aes_xts_reinit
+};
+
+struct enc_xform enc_xform_aes_ctr = {
+       CRYPTO_AES_CTR, "AES-CTR",
+       AESCTR_BLOCK_LEN, AESCTR_IV_LEN, 16+4, 32+4,
+       aes_ctr_crypt,
+       aes_ctr_crypt,
+       aes_ctr_setkey,
+       aes_ctr_zerokey,
+       aes_ctr_reinit
 };
 
 struct enc_xform enc_xform_arc4 = {
        CRYPTO_ARC4, "ARC4",
-       1, 1, 32,
+       1, 1, 1, 32,
        NULL,
        NULL,
        NULL,
        NULL,
+       NULL
 };
 
 struct enc_xform enc_xform_camellia = {
        CRYPTO_CAMELLIA_CBC, "Camellia",
-       CAMELLIA_BLOCK_LEN, 8, 32,
+       CAMELLIA_BLOCK_LEN, CAMELLIA_BLOCK_LEN, 8, 32,
        cml_encrypt,
        cml_decrypt,
        cml_setkey,
        cml_zerokey,
+       NULL
 };
 
 /* Authentication instances */
@@ -544,6 +588,170 @@ rijndael128_zerokey(u_int8_t **sched)
        *sched = NULL;
 }
 
+#define AES_XTS_ALPHA          0x87    /* GF(2^128) generator polynomial */
+
+struct aes_xts_ctx {
+       rijndael_ctx key1;
+       rijndael_ctx key2;
+       u_int8_t tweak[AES_XTS_BLOCK_LEN];
+};
+
+void
+aes_xts_reinit(caddr_t key, u_int8_t *iv)
+{
+       struct aes_xts_ctx *ctx = (struct aes_xts_ctx *)key;
+       u_int64_t blocknum;
+       u_int i;
+
+       /*
+       * Prepare tweak as E_k2(IV). IV is specified as LE representation
+       * of a 64-bit block number which we allow to be passed in directly.
+       */
+       bcopy(iv, &blocknum, AES_XTS_IV_LEN);
+       for (i = 0; i < AES_XTS_IV_LEN; i++) {
+               ctx->tweak[i] = blocknum & 0xff;
+               blocknum >>= 8;
+       }
+       /* Last 64 bits of IV are always zero */
+       bzero(ctx->tweak + AES_XTS_IV_LEN, AES_XTS_IV_LEN);
+
+       rijndael_encrypt(&ctx->key2, ctx->tweak, ctx->tweak);
+}
+
+void
+aes_xts_crypt(struct aes_xts_ctx *ctx, u_int8_t *data, u_int do_encrypt)
+{
+       u_int8_t block[AES_XTS_BLOCK_LEN];
+       u_int i, carry_in, carry_out;
+
+       for (i = 0; i < AES_XTS_BLOCK_LEN; i++)
+               block[i] = data[i] ^ ctx->tweak[i];
+
+       if (do_encrypt)
+               rijndael_encrypt(&ctx->key1, block, data);
+       else
+               rijndael_decrypt(&ctx->key1, block, data);
+
+       for (i = 0; i < AES_XTS_BLOCK_LEN; i++)
+               data[i] ^= ctx->tweak[i];
+
+       /* Exponentiate tweak */
+       carry_in = 0;
+       for (i = 0; i < AES_XTS_BLOCK_LEN; i++) {
+               carry_out = ctx->tweak[i] & 0x80;
+               ctx->tweak[i] = (ctx->tweak[i] << 1) | (carry_in ? 1 : 0);
+               carry_in = carry_out;
+       }
+       if (carry_in)
+               ctx->tweak[0] ^= AES_XTS_ALPHA;
+       bzero(block, sizeof(block));
+}
+
+void
+aes_xts_encrypt(caddr_t key, u_int8_t *data)
+{
+       aes_xts_crypt((struct aes_xts_ctx *)key, data, 1);
+}
+
+void
+aes_xts_decrypt(caddr_t key, u_int8_t *data)
+{
+       aes_xts_crypt((struct aes_xts_ctx *)key, data, 0);
+}
+
+int
+aes_xts_setkey(u_int8_t **sched, u_int8_t *key, int len)
+{
+       struct aes_xts_ctx *ctx;
+       
+       if (len != 32 && len != 64)
+               return -1;
+       
+       *sched = kmalloc(sizeof(struct aes_xts_ctx), M_CRYPTO_DATA,
+           M_WAITOK | M_ZERO);
+       ctx = (struct aes_xts_ctx *)*sched;
+       
+       rijndael_set_key(&ctx->key1, key, len * 4);
+       rijndael_set_key(&ctx->key2, key + (len / 2), len * 4);
+       
+       return 0;
+}
+
+void
+aes_xts_zerokey(u_int8_t **sched)
+{
+       bzero(*sched, sizeof(struct aes_xts_ctx));
+       kfree(*sched, M_CRYPTO_DATA);
+       *sched = NULL;
+}
+
+#define AESCTR_NONCESIZE       4
+
+struct aes_ctr_ctx {
+       u_int32_t       ac_ek[4*(14 + 1)];
+       u_int8_t        ac_block[AESCTR_BLOCK_LEN];
+       int             ac_nr;
+};
+
+void
+aes_ctr_reinit(caddr_t key, u_int8_t *iv)
+{
+       struct aes_ctr_ctx *ctx;
+
+       ctx = (struct aes_ctr_ctx *)key;
+       bcopy(iv, ctx->ac_block + AESCTR_NONCESIZE, AESCTR_IV_LEN);
+
+       /* reset counter */
+       bzero(ctx->ac_block + AESCTR_NONCESIZE + AESCTR_IV_LEN, 4);
+}
+
+void
+aes_ctr_crypt(caddr_t key, u_int8_t *data)
+{
+       struct aes_ctr_ctx *ctx;
+       u_int8_t keystream[AESCTR_BLOCK_LEN];
+       int i;
+
+       ctx = (struct aes_ctr_ctx *)key;
+       /* increment counter */
+       for (i = AESCTR_BLOCK_LEN - 1;
+       i >= AESCTR_NONCESIZE + AESCTR_IV_LEN; i--)
+               if (++ctx->ac_block[i])   /* continue on overflow */
+                       break;
+       rijndaelEncrypt(ctx->ac_ek, ctx->ac_nr, ctx->ac_block, keystream);
+       for (i = 0; i < AESCTR_BLOCK_LEN; i++)
+               data[i] ^= keystream[i];
+}
+
+int
+aes_ctr_setkey(u_int8_t **sched, u_int8_t *key, int len)
+{
+       struct aes_ctr_ctx *ctx;
+
+       if (len < AESCTR_NONCESIZE)
+               return -1;
+
+       *sched = kmalloc(sizeof(struct aes_ctr_ctx), M_CRYPTO_DATA,
+       M_WAITOK | M_ZERO);
+       ctx = (struct aes_ctr_ctx *)*sched;
+       ctx->ac_nr = rijndaelKeySetupEnc(ctx->ac_ek, (u_char *)key,
+       (len - AESCTR_NONCESIZE) * 8);
+       if (ctx->ac_nr == 0) {
+               aes_ctr_zerokey(sched);
+               return -1;
+       }
+       bcopy(key + len - AESCTR_NONCESIZE, ctx->ac_block, AESCTR_NONCESIZE);
+       return 0;
+}
+
+void
+aes_ctr_zerokey(u_int8_t **sched)
+{
+       bzero(*sched, sizeof(struct aes_ctr_ctx));
+       kfree(*sched, M_CRYPTO_DATA);
+       *sched = NULL;
+}
+
 static void
 cml_encrypt(caddr_t key, u_int8_t *blk)
 {
index 8481424..b44f1db 100644 (file)
@@ -49,12 +49,13 @@ struct auth_hash {
 struct enc_xform {
        int type;
        char *name;
-       u_int16_t blocksize;
+       u_int16_t blocksize, ivsize;
        u_int16_t minkey, maxkey;
        void (*encrypt) (caddr_t, u_int8_t *);
        void (*decrypt) (caddr_t, u_int8_t *);
        int (*setkey) (u_int8_t **, u_int8_t *, int len);
        void (*zerokey) (u_int8_t **);
+       void (*reinit) (caddr_t, u_int8_t *);
 };
 
 struct comp_algo {
@@ -81,6 +82,8 @@ extern struct enc_xform enc_xform_blf;
 extern struct enc_xform enc_xform_cast5;
 extern struct enc_xform enc_xform_skipjack;
 extern struct enc_xform enc_xform_rijndael128;
+extern struct enc_xform enc_xform_aes_xts;
+extern struct enc_xform enc_xform_aes_ctr;
 extern struct enc_xform enc_xform_arc4;
 extern struct enc_xform enc_xform_camellia;