aboutsummaryrefslogtreecommitdiff
path: root/sys/opencrypto/cryptosoft.c
diff options
context:
space:
mode:
Diffstat (limited to 'sys/opencrypto/cryptosoft.c')
-rw-r--r--sys/opencrypto/cryptosoft.c199
1 files changed, 199 insertions, 0 deletions
diff --git a/sys/opencrypto/cryptosoft.c b/sys/opencrypto/cryptosoft.c
index e5a1139039d0..b92e25e86f76 100644
--- a/sys/opencrypto/cryptosoft.c
+++ b/sys/opencrypto/cryptosoft.c
@@ -867,6 +867,168 @@ out:
return (error);
}
+static int
+swcr_chacha20_poly1305(struct swcr_session *ses, struct cryptop *crp)
+{
+ const struct crypto_session_params *csp;
+ uint64_t blkbuf[howmany(CHACHA20_NATIVE_BLOCK_LEN, sizeof(uint64_t))];
+ u_char *blk = (u_char *)blkbuf;
+ u_char tag[POLY1305_HASH_LEN];
+ struct crypto_buffer_cursor cc_in, cc_out;
+ const u_char *inblk;
+ u_char *outblk;
+ uint64_t *blkp;
+ union authctx ctx;
+ struct swcr_auth *swa;
+ struct swcr_encdec *swe;
+ struct auth_hash *axf;
+ struct enc_xform *exf;
+ int blksz, error, r, resid;
+
+ swa = &ses->swcr_auth;
+ axf = swa->sw_axf;
+
+ swe = &ses->swcr_encdec;
+ exf = swe->sw_exf;
+ blksz = exf->native_blocksize;
+ KASSERT(blksz <= sizeof(blkbuf), ("%s: blocksize mismatch", __func__));
+
+ if ((crp->crp_flags & CRYPTO_F_IV_SEPARATE) == 0)
+ return (EINVAL);
+
+ csp = crypto_get_params(crp->crp_session);
+
+ /* Generate Poly1305 key. */
+ if (crp->crp_cipher_key != NULL)
+ axf->Setkey(&ctx, crp->crp_cipher_key, csp->csp_cipher_klen);
+ else
+ axf->Setkey(&ctx, csp->csp_cipher_key, csp->csp_cipher_klen);
+ axf->Reinit(&ctx, crp->crp_iv, csp->csp_ivlen);
+
+ /* Supply MAC with AAD */
+ if (crp->crp_aad != NULL)
+ axf->Update(&ctx, crp->crp_aad, crp->crp_aad_length);
+ else
+ crypto_apply(crp, crp->crp_aad_start,
+ crp->crp_aad_length, axf->Update, &ctx);
+ if (crp->crp_aad_length % 16 != 0) {
+ /* padding1 */
+ memset(blk, 0, 16);
+ axf->Update(&ctx, blk, 16 - crp->crp_aad_length % 16);
+ }
+
+ if (crp->crp_cipher_key != NULL)
+ exf->setkey(swe->sw_kschedule, crp->crp_cipher_key,
+ csp->csp_cipher_klen);
+ exf->reinit(swe->sw_kschedule, crp->crp_iv);
+
+ /* Do encryption with MAC */
+ crypto_cursor_init(&cc_in, &crp->crp_buf);
+ crypto_cursor_advance(&cc_in, crp->crp_payload_start);
+ if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
+ crypto_cursor_init(&cc_out, &crp->crp_obuf);
+ crypto_cursor_advance(&cc_out, crp->crp_payload_output_start);
+ } else
+ cc_out = cc_in;
+ for (resid = crp->crp_payload_length; resid >= blksz; resid -= blksz) {
+ if (crypto_cursor_seglen(&cc_in) < blksz) {
+ crypto_cursor_copydata(&cc_in, blksz, blk);
+ inblk = blk;
+ } else {
+ inblk = crypto_cursor_segbase(&cc_in);
+ crypto_cursor_advance(&cc_in, blksz);
+ }
+ if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
+ if (crypto_cursor_seglen(&cc_out) < blksz)
+ outblk = blk;
+ else
+ outblk = crypto_cursor_segbase(&cc_out);
+ exf->encrypt(swe->sw_kschedule, inblk, outblk);
+ axf->Update(&ctx, outblk, blksz);
+ if (outblk == blk)
+ crypto_cursor_copyback(&cc_out, blksz, blk);
+ else
+ crypto_cursor_advance(&cc_out, blksz);
+ } else {
+ axf->Update(&ctx, inblk, blksz);
+ }
+ }
+ if (resid > 0) {
+ crypto_cursor_copydata(&cc_in, resid, blk);
+ if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
+ exf->encrypt_last(swe->sw_kschedule, blk, blk, resid);
+ crypto_cursor_copyback(&cc_out, resid, blk);
+ }
+ axf->Update(&ctx, blk, resid);
+ if (resid % 16 != 0) {
+ /* padding2 */
+ memset(blk, 0, 16);
+ axf->Update(&ctx, blk, 16 - resid % 16);
+ }
+ }
+
+ /* lengths */
+ blkp = (uint64_t *)blk;
+ blkp[0] = htole64(crp->crp_aad_length);
+ blkp[1] = htole64(crp->crp_payload_length);
+ axf->Update(&ctx, blk, sizeof(uint64_t) * 2);
+
+ /* Finalize MAC */
+ axf->Final(tag, &ctx);
+
+ /* Validate tag */
+ error = 0;
+ if (!CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) {
+ u_char tag2[POLY1305_HASH_LEN];
+
+ crypto_copydata(crp, crp->crp_digest_start, swa->sw_mlen, tag2);
+
+ r = timingsafe_bcmp(tag, tag2, swa->sw_mlen);
+ explicit_bzero(tag2, sizeof(tag2));
+ if (r != 0) {
+ error = EBADMSG;
+ goto out;
+ }
+
+ /* tag matches, decrypt data */
+ crypto_cursor_init(&cc_in, &crp->crp_buf);
+ crypto_cursor_advance(&cc_in, crp->crp_payload_start);
+ for (resid = crp->crp_payload_length; resid > blksz;
+ resid -= blksz) {
+ if (crypto_cursor_seglen(&cc_in) < blksz) {
+ crypto_cursor_copydata(&cc_in, blksz, blk);
+ inblk = blk;
+ } else {
+ inblk = crypto_cursor_segbase(&cc_in);
+ crypto_cursor_advance(&cc_in, blksz);
+ }
+ if (crypto_cursor_seglen(&cc_out) < blksz)
+ outblk = blk;
+ else
+ outblk = crypto_cursor_segbase(&cc_out);
+ exf->decrypt(swe->sw_kschedule, inblk, outblk);
+ if (outblk == blk)
+ crypto_cursor_copyback(&cc_out, blksz, blk);
+ else
+ crypto_cursor_advance(&cc_out, blksz);
+ }
+ if (resid > 0) {
+ crypto_cursor_copydata(&cc_in, resid, blk);
+ exf->decrypt_last(swe->sw_kschedule, blk, blk, resid);
+ crypto_cursor_copyback(&cc_out, resid, blk);
+ }
+ } else {
+ /* Inject the authentication data */
+ crypto_copyback(crp, crp->crp_digest_start, swa->sw_mlen, tag);
+ }
+
+out:
+ explicit_bzero(blkbuf, sizeof(blkbuf));
+ explicit_bzero(tag, sizeof(tag));
+ explicit_bzero(&ctx, sizeof(ctx));
+ return (error);
+}
+
/*
* Apply a cipher and a digest to perform EtA.
*/
@@ -1171,6 +1333,33 @@ swcr_setup_ccm(struct swcr_session *ses,
return (swcr_setup_cipher(ses, csp));
}
+static int
+swcr_setup_chacha20_poly1305(struct swcr_session *ses,
+ const struct crypto_session_params *csp)
+{
+ struct swcr_auth *swa;
+ struct auth_hash *axf;
+
+ if (csp->csp_ivlen != CHACHA20_POLY1305_IV_LEN)
+ return (EINVAL);
+
+ /* First, setup the auth side. */
+ swa = &ses->swcr_auth;
+ axf = &auth_hash_chacha20_poly1305;
+ swa->sw_axf = axf;
+ if (csp->csp_auth_mlen < 0 || csp->csp_auth_mlen > axf->hashsize)
+ return (EINVAL);
+ if (csp->csp_auth_mlen == 0)
+ swa->sw_mlen = axf->hashsize;
+ else
+ swa->sw_mlen = csp->csp_auth_mlen;
+
+ /* The auth state is regenerated for each nonce. */
+
+ /* Second, setup the cipher side. */
+ return (swcr_setup_cipher(ses, csp));
+}
+
static bool
swcr_auth_supported(const struct crypto_session_params *csp)
{
@@ -1258,6 +1447,7 @@ swcr_probesession(device_t dev, const struct crypto_session_params *csp)
switch (csp->csp_cipher_alg) {
case CRYPTO_AES_NIST_GCM_16:
case CRYPTO_AES_CCM_16:
+ case CRYPTO_CHACHA20_POLY1305:
return (EINVAL);
default:
if (!swcr_cipher_supported(csp))
@@ -1273,6 +1463,7 @@ swcr_probesession(device_t dev, const struct crypto_session_params *csp)
switch (csp->csp_cipher_alg) {
case CRYPTO_AES_NIST_GCM_16:
case CRYPTO_AES_CCM_16:
+ case CRYPTO_CHACHA20_POLY1305:
break;
default:
return (EINVAL);
@@ -1283,6 +1474,7 @@ swcr_probesession(device_t dev, const struct crypto_session_params *csp)
switch (csp->csp_cipher_alg) {
case CRYPTO_AES_NIST_GCM_16:
case CRYPTO_AES_CCM_16:
+ case CRYPTO_CHACHA20_POLY1305:
return (EINVAL);
}
switch (csp->csp_auth_alg) {
@@ -1343,6 +1535,7 @@ swcr_newsession(device_t dev, crypto_session_t cses,
#ifdef INVARIANTS
case CRYPTO_AES_NIST_GCM_16:
case CRYPTO_AES_CCM_16:
+ case CRYPTO_CHACHA20_POLY1305:
panic("bad cipher algo");
#endif
default:
@@ -1366,6 +1559,11 @@ swcr_newsession(device_t dev, crypto_session_t cses,
if (error == 0)
ses->swcr_process = swcr_ccm;
break;
+ case CRYPTO_CHACHA20_POLY1305:
+ error = swcr_setup_chacha20_poly1305(ses, csp);
+ if (error == 0)
+ ses->swcr_process = swcr_chacha20_poly1305;
+ break;
#ifdef INVARIANTS
default:
panic("bad aead algo");
@@ -1377,6 +1575,7 @@ swcr_newsession(device_t dev, crypto_session_t cses,
switch (csp->csp_cipher_alg) {
case CRYPTO_AES_NIST_GCM_16:
case CRYPTO_AES_CCM_16:
+ case CRYPTO_CHACHA20_POLY1305:
panic("bad eta cipher algo");
}
switch (csp->csp_auth_alg) {