Files
android_kernel_fxtec_sm6115/crypto/authencesn.c
Thomas Turner a984488d32 Merge tag 'v4.19.325-cip130' of https://git.kernel.org/pub/scm/linux/kernel/git/cip/linux-cip into android13-4.19-kona
version 4.19.325-cip130

* tag 'v4.19.325-cip130' of https://git.kernel.org/pub/scm/linux/kernel/git/cip/linux-cip:
  CIP: Bump version suffix to -cip130 after merge from cip/linux-4.19.y-st tree
  Update localversion-st, tree is up-to-date with 5.10.251.
  f2fs: fix out-of-bounds access in sysfs attribute read/write
  USB: serial: option: add Telit FN920C04 RNDIS compositions
  f2fs: fix to avoid UAF in f2fs_write_end_io()
  fbdev: rivafb: fix divide error in nv3_arb()
  scsi: qla2xxx: Fix bsg_done() causing double free
  scsi: qla2xxx: Validate sp before freeing associated memory
  scsi: qla2xxx: Free sp in error path to fix system crash
  crypto: virtio - Remove duplicated virtqueue_kick in virtio_crypto_skcipher_crypt_req
  fs: dlm: fix invalid derefence of sb_lvbptr
  gpiolib: acpi: Fix gpio count with string references
  platform/x86: classmate-laptop: Add missing NULL pointer checks
  drm/tegra: hdmi: sor: Fix error: variable ‘j’ set but not used
  gpio: sprd: Change sprd_gpio lock to raw_spin_lock
  gpio: omap: do not register driver in probe()
  scsi: qla2xxx: Query FW again before proceeding with login
  scsi: qla2xxx: Delay module unload while fabric scan in progress
  nilfs2: Fix potential block overflow that cause system hang
  crypto: virtio - Add spinlock protection with virtqueue notification
  crypto: omap - Allocate OMAP_CRYPTO_FORCE_COPY scatterlists correctly
  CIP: Bump version suffix to -cip129 after merge from cip/linux-4.19.y-st tree
  Update localversion-st, tree is up-to-date with 5.10.250.
  fbdev: ssd1307fb: fix build failure
  macvlan: fix possible UAF in macvlan_forward_source()
  HID: uclogic: Correct devm device reference for hidinput input_dev name
  xfs: set max_agbno to allow sparse alloc of last full inode chunk
  l2tp: avoid one data-race in l2tp_tunnel_del_work()
  platform/x86: intel_telemetry: Fix swapped arrays in PSS output
  macvlan: fix error recovery in macvlan_common_newlink()
  net: liquidio: Initialize netdev pointer before queue setup
  platform/x86: intel_telemetry: Fix PSS event register mask
  platform/x86: toshiba_haps: Fix memory leaks in add/remove routines
  scsi: target: iscsi: Fix use-after-free in iscsit_dec_conn_usage_count()
  scsi: target: iscsi: Fix use-after-free in iscsit_dec_session_usage_count()
  wifi: cfg80211: Fix bitrate calculation overflow for HE rates
  wifi: mac80211: collect station statistics earlier when disconnect
  ring-buffer: Avoid softlockup in ring_buffer_resize() during memory free
  HID: Apply quirk HID_QUIRK_ALWAYS_POLL to Edifier QR30 (2d99:a101)
  HID: quirks: Add another Chicony HP 5MP Cameras to hid_ignore_list
  HID: intel-ish-hid: Reset enum_devices_done before enumeration
  HID: multitouch: add MT_QUIRK_STICKY_FINGERS to MT_CLS_VTL
  net: usb: sr9700: support devices with virtual driver CD
  wifi: wlcore: ensure skb headroom before skb_push
  wifi: mac80211: ocb: skip rx_no_sta when interface is not joined
  ARM: 9468/1: fix memset64() on big-endian
  pinctrl: meson: mark the GPIO controller as sleeping
  writeback: fix 100% CPU usage when dirtytime_expire_interval is 0
  netfilter: nf_tables: typo NULL check in _clone() function
  ipv6: sr: Fix MAC comparison to be constant-time
  of: platform: Use default match table for /firmware
  can: esd_usb: esd_usb_read_bulk_callback(): fix URB memory leak
  driver core: fix potential null-ptr-deref in device_add()
  mei: trace: treat reg parameter as string
  iio: adc: exynos_adc: fix OF populate on driver rebind
  scsi: xen: scsiback: Fix potential memory leak in scsiback_remove()
  dmaengine: stm32: dmamux: fix device leak on route allocation
  nvme-fc: rename free_ctrl callback to match name pattern
  net/sched: act_ife: convert comma to semicolon
  scsi: be2iscsi: Fix a memory leak in beiscsi_boot_get_sinfo()
  scsi: firewire: sbp-target: Fix overflow in sbp_make_tpg()
  nfc: nci: Fix race between rfkill and nci_unregister_device().
  net/mlx5e: Report rx_discards_phy via rx_dropped
  rocker: fix memory leak in rocker_world_port_post_fini()
  Bluetooth: hci_uart: fix null-ptr-deref in hci_uart_write_work
  can: usb_8dev: usb_8dev_read_bulk_callback(): fix URB memory leak
  can: mcba_usb: mcba_usb_read_bulk_callback(): fix URB memory leak
  can: kvaser_usb: kvaser_usb_read_bulk_callback(): fix URB memory leak
  can: ems_usb: ems_usb_read_bulk_callback(): fix URB memory leak
  perf/x86/intel: Do not enable BTS for guests
  netrom: fix double-free in nr_route_frame()
  slimbus: core: fix device reference leak on report present
  slimbus: core: fix runtime PM imbalance on report present
  wifi: rsi: Fix memory corruption due to not set vif driver data size
  wifi: mwifiex: Fix a loop in mwifiex_update_ampdu_rxwinsize()
  wifi: ath10k: fix dma_free_coherent() pointer
  mmc: rtsx_pci_sdmmc: implement sdmmc_card_busy function
  ALSA: usb-audio: Fix use-after-free in snd_usb_mixer_free()
  ALSA: ctxfi: Fix potential OOB access in audio mixer handling
  iio: dac: ad5686: add AD5695R to ad5686_chip_info_tbl
  iio: adc: at91-sama5d2_adc: Fix potential use-after-free in sama5d2_adc driver
  of: fix reference count leak in of_alias_scan()
  leds: led-class: Only Add LED to leds_list when it is fully ready
  net/sched: act_ife: avoid possible NULL deref
  be2net: Fix NULL pointer dereference in be_cmd_get_mac_from_list
  drm/amd/pm: Workaround SI powertune issue on Radeon 430 (v2)
  drm/amd/pm: Don't clear SI SMC table when setting power limit
  usbnet: limit max_mtu based on device's hard_mtu
  mISDN: annotate data-race around dev->work
  ALSA: usb: Increase volume range that triggers a warning
  regmap: Fix race condition in hwspinlock irqsave routine
  iio: adc: ad7280a: handle spi_setup() errors in probe()
  Input: i8042 - add quirk for ASUS Zenbook UX425QA_UM425QA
  w1: fix redundant counter decrement in w1_attach_slave_device()
  comedi: dmm32at: serialize use of paged registers
  crypto: authencesn - reject too-short AAD (assoclen<8) to match ESP/ESN spec
  net/sched: Enforce that teql can only be used as root qdisc
  ipvlan: Make the addrs_lock be per port
  net: fou: rename the source for linking
  netlink: add a proto specification for FOU
  gue: Fix skb memleak with inner IP protocol 0.
  amd-xgbe: avoid misleading per-packet error log
  sctp: move SCTP_CMD_ASSOC_SHKEY right after SCTP_CMD_PEER_INIT
  sctp: sm_statefuns: Fix spelling mistakes
  net: usb: dm9601: remove broken SR9700 support
  macvlan: Fix leaking skb in source mode with nodst option
  btrfs: fix deadlock in wait_current_trans() due to ignored transaction type
  dmaengine: ti: dma-crossbar: fix device leak on am335x route allocation
  dmaengine: ti: dma-crossbar: fix device leak on dra7x route allocation
  dmaengine: lpc18xx-dmamux: fix device leak on route allocation
  dmaengine: bcm-sba-raid: fix device leak on probe
  dmaengine: at_hdmac: fix device leak on of_dma_xlate()
  drm/vmwgfx: Fix an error return check in vmw_compat_shader_add()
  drm/nouveau/disp/nv50-: Set lock_core in curs507a_prepare
  EDAC/i3200: Fix a resource leak in i3200_probe1()
  EDAC/x38: Fix a resource leak in x38_probe1()
  USB: serial: ftdi_sio: add support for PICAXE AXE027 cable
  USB: serial: option: add Telit LE910 MBIM composition
  USB: OHCI/UHCI: Add soft dependencies on ehci_platform
  ALSA: pcm: Improve the fix for race of buffer access at PCM OSS layer
  HID: usbhid: paper over wrong bNumDescriptor field
  dmaengine: omap-dma: fix dma_pool resource leak in error paths
  phy: stm32-usphyc: Fix off by one in probe()
  dmaengine: tegra-adma: Fix use-after-free
  textsearch: describe @list member in ts_ops search
  net/sched: sch_qfq: do not free existing class in qfq_change_class()
  ipv4: ip_gre: make ipgre_header() robust
  macvlan: Use 'hash' iterators to simplify code
  macvlan: Add nodst option to macvlan type source
  pnfs/flexfiles: Fix memory leak in nfs4_ff_alloc_deviceid_node()

Change-Id: I4b06f63aef42258de0c0415a055c47d8e60bba88
2026-03-20 15:21:00 +00:00

539 lines
15 KiB
C

/*
* authencesn.c - AEAD wrapper for IPsec with extended sequence numbers,
* derived from authenc.c
*
* Copyright (C) 2010 secunet Security Networks AG
* Copyright (C) 2010 Steffen Klassert <steffen.klassert@secunet.com>
* Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
*/
#include <crypto/internal/aead.h>
#include <crypto/internal/hash.h>
#include <crypto/internal/skcipher.h>
#include <crypto/authenc.h>
#include <crypto/null.h>
#include <crypto/scatterwalk.h>
#include <linux/err.h>
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/rtnetlink.h>
#include <linux/slab.h>
#include <linux/spinlock.h>
struct authenc_esn_instance_ctx {
struct crypto_ahash_spawn auth;
struct crypto_skcipher_spawn enc;
};
struct crypto_authenc_esn_ctx {
unsigned int reqoff;
struct crypto_ahash *auth;
struct crypto_skcipher *enc;
struct crypto_sync_skcipher *null;
};
struct authenc_esn_request_ctx {
struct scatterlist src[2];
struct scatterlist dst[2];
char tail[];
};
static void authenc_esn_request_complete(struct aead_request *req, int err)
{
if (err != -EINPROGRESS)
aead_request_complete(req, err);
}
static int crypto_authenc_esn_setauthsize(struct crypto_aead *authenc_esn,
unsigned int authsize)
{
if (authsize > 0 && authsize < 4)
return -EINVAL;
return 0;
}
static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key,
unsigned int keylen)
{
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
struct crypto_ahash *auth = ctx->auth;
struct crypto_skcipher *enc = ctx->enc;
struct crypto_authenc_keys keys;
int err = -EINVAL;
if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
goto badkey;
crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) &
CRYPTO_TFM_REQ_MASK);
err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
crypto_aead_set_flags(authenc_esn, crypto_ahash_get_flags(auth) &
CRYPTO_TFM_RES_MASK);
if (err)
goto out;
crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
crypto_skcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) &
CRYPTO_TFM_REQ_MASK);
err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
crypto_aead_set_flags(authenc_esn, crypto_skcipher_get_flags(enc) &
CRYPTO_TFM_RES_MASK);
out:
memzero_explicit(&keys, sizeof(keys));
return err;
badkey:
crypto_aead_set_flags(authenc_esn, CRYPTO_TFM_RES_BAD_KEY_LEN);
goto out;
}
static int crypto_authenc_esn_genicv_tail(struct aead_request *req,
unsigned int flags)
{
struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
struct crypto_ahash *auth = ctx->auth;
u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail,
crypto_ahash_alignmask(auth) + 1);
unsigned int authsize = crypto_aead_authsize(authenc_esn);
unsigned int assoclen = req->assoclen;
unsigned int cryptlen = req->cryptlen;
struct scatterlist *dst = req->dst;
u32 tmp[2];
/* Move high-order bits of sequence number back. */
scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1);
return 0;
}
static void authenc_esn_geniv_ahash_done(struct crypto_async_request *areq,
int err)
{
struct aead_request *req = areq->data;
err = err ?: crypto_authenc_esn_genicv_tail(req, 0);
aead_request_complete(req, err);
}
static int crypto_authenc_esn_genicv(struct aead_request *req,
unsigned int flags)
{
struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
struct crypto_ahash *auth = ctx->auth;
u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail,
crypto_ahash_alignmask(auth) + 1);
struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
unsigned int authsize = crypto_aead_authsize(authenc_esn);
unsigned int assoclen = req->assoclen;
unsigned int cryptlen = req->cryptlen;
struct scatterlist *dst = req->dst;
u32 tmp[2];
if (!authsize)
return 0;
/* Move high-order bits of sequence number to the end. */
scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
sg_init_table(areq_ctx->dst, 2);
dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
ahash_request_set_tfm(ahreq, auth);
ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen);
ahash_request_set_callback(ahreq, flags,
authenc_esn_geniv_ahash_done, req);
return crypto_ahash_digest(ahreq) ?:
crypto_authenc_esn_genicv_tail(req, aead_request_flags(req));
}
static void crypto_authenc_esn_encrypt_done(struct crypto_async_request *req,
int err)
{
struct aead_request *areq = req->data;
if (!err)
err = crypto_authenc_esn_genicv(areq, 0);
authenc_esn_request_complete(areq, err);
}
static int crypto_authenc_esn_copy(struct aead_request *req, unsigned int len)
{
struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
skcipher_request_set_sync_tfm(skreq, ctx->null);
skcipher_request_set_callback(skreq, aead_request_flags(req),
NULL, NULL);
skcipher_request_set_crypt(skreq, req->src, req->dst, len, NULL);
return crypto_skcipher_encrypt(skreq);
}
static int crypto_authenc_esn_encrypt(struct aead_request *req)
{
struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
struct skcipher_request *skreq = (void *)(areq_ctx->tail +
ctx->reqoff);
struct crypto_skcipher *enc = ctx->enc;
unsigned int assoclen = req->assoclen;
unsigned int cryptlen = req->cryptlen;
struct scatterlist *src, *dst;
int err;
if (assoclen < 8)
return -EINVAL;
sg_init_table(areq_ctx->src, 2);
src = scatterwalk_ffwd(areq_ctx->src, req->src, assoclen);
dst = src;
if (req->src != req->dst) {
err = crypto_authenc_esn_copy(req, assoclen);
if (err)
return err;
sg_init_table(areq_ctx->dst, 2);
dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, assoclen);
}
skcipher_request_set_tfm(skreq, enc);
skcipher_request_set_callback(skreq, aead_request_flags(req),
crypto_authenc_esn_encrypt_done, req);
skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
err = crypto_skcipher_encrypt(skreq);
if (err)
return err;
return crypto_authenc_esn_genicv(req, aead_request_flags(req));
}
static int crypto_authenc_esn_decrypt_tail(struct aead_request *req,
unsigned int flags)
{
struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
unsigned int authsize = crypto_aead_authsize(authenc_esn);
struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
struct skcipher_request *skreq = (void *)(areq_ctx->tail +
ctx->reqoff);
struct crypto_ahash *auth = ctx->auth;
u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail,
crypto_ahash_alignmask(auth) + 1);
unsigned int cryptlen = req->cryptlen - authsize;
unsigned int assoclen = req->assoclen;
struct scatterlist *dst = req->dst;
u8 *ihash = ohash + crypto_ahash_digestsize(auth);
u32 tmp[2];
if (!authsize)
goto decrypt;
/* Move high-order bits of sequence number back. */
scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
if (crypto_memneq(ihash, ohash, authsize))
return -EBADMSG;
decrypt:
sg_init_table(areq_ctx->dst, 2);
dst = scatterwalk_ffwd(areq_ctx->dst, dst, assoclen);
skcipher_request_set_tfm(skreq, ctx->enc);
skcipher_request_set_callback(skreq, flags,
req->base.complete, req->base.data);
skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv);
return crypto_skcipher_decrypt(skreq);
}
static void authenc_esn_verify_ahash_done(struct crypto_async_request *areq,
int err)
{
struct aead_request *req = areq->data;
err = err ?: crypto_authenc_esn_decrypt_tail(req, 0);
authenc_esn_request_complete(req, err);
}
static int crypto_authenc_esn_decrypt(struct aead_request *req)
{
struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
unsigned int authsize = crypto_aead_authsize(authenc_esn);
struct crypto_ahash *auth = ctx->auth;
u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail,
crypto_ahash_alignmask(auth) + 1);
unsigned int assoclen = req->assoclen;
unsigned int cryptlen = req->cryptlen;
u8 *ihash = ohash + crypto_ahash_digestsize(auth);
struct scatterlist *dst = req->dst;
u32 tmp[2];
int err;
if (assoclen < 8)
return -EINVAL;
cryptlen -= authsize;
if (req->src != dst) {
err = crypto_authenc_esn_copy(req, assoclen + cryptlen);
if (err)
return err;
}
scatterwalk_map_and_copy(ihash, req->src, assoclen + cryptlen,
authsize, 0);
if (!authsize)
goto tail;
/* Move high-order bits of sequence number to the end. */
scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
sg_init_table(areq_ctx->dst, 2);
dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
ahash_request_set_tfm(ahreq, auth);
ahash_request_set_crypt(ahreq, dst, ohash, assoclen + cryptlen);
ahash_request_set_callback(ahreq, aead_request_flags(req),
authenc_esn_verify_ahash_done, req);
err = crypto_ahash_digest(ahreq);
if (err)
return err;
tail:
return crypto_authenc_esn_decrypt_tail(req, aead_request_flags(req));
}
static int crypto_authenc_esn_init_tfm(struct crypto_aead *tfm)
{
struct aead_instance *inst = aead_alg_instance(tfm);
struct authenc_esn_instance_ctx *ictx = aead_instance_ctx(inst);
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
struct crypto_ahash *auth;
struct crypto_skcipher *enc;
struct crypto_sync_skcipher *null;
int err;
auth = crypto_spawn_ahash(&ictx->auth);
if (IS_ERR(auth))
return PTR_ERR(auth);
enc = crypto_spawn_skcipher(&ictx->enc);
err = PTR_ERR(enc);
if (IS_ERR(enc))
goto err_free_ahash;
null = crypto_get_default_null_skcipher();
err = PTR_ERR(null);
if (IS_ERR(null))
goto err_free_skcipher;
ctx->auth = auth;
ctx->enc = enc;
ctx->null = null;
ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth),
crypto_ahash_alignmask(auth) + 1);
crypto_aead_set_reqsize(
tfm,
sizeof(struct authenc_esn_request_ctx) +
ctx->reqoff +
max_t(unsigned int,
crypto_ahash_reqsize(auth) +
sizeof(struct ahash_request),
sizeof(struct skcipher_request) +
crypto_skcipher_reqsize(enc)));
return 0;
err_free_skcipher:
crypto_free_skcipher(enc);
err_free_ahash:
crypto_free_ahash(auth);
return err;
}
static void crypto_authenc_esn_exit_tfm(struct crypto_aead *tfm)
{
struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
crypto_free_ahash(ctx->auth);
crypto_free_skcipher(ctx->enc);
crypto_put_default_null_skcipher();
}
static void crypto_authenc_esn_free(struct aead_instance *inst)
{
struct authenc_esn_instance_ctx *ctx = aead_instance_ctx(inst);
crypto_drop_skcipher(&ctx->enc);
crypto_drop_ahash(&ctx->auth);
kfree(inst);
}
static int crypto_authenc_esn_create(struct crypto_template *tmpl,
struct rtattr **tb)
{
struct crypto_attr_type *algt;
struct aead_instance *inst;
struct hash_alg_common *auth;
struct crypto_alg *auth_base;
struct skcipher_alg *enc;
struct authenc_esn_instance_ctx *ctx;
const char *enc_name;
int err;
algt = crypto_get_attr_type(tb);
if (IS_ERR(algt))
return PTR_ERR(algt);
if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
return -EINVAL;
auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
CRYPTO_ALG_TYPE_AHASH_MASK |
crypto_requires_sync(algt->type, algt->mask));
if (IS_ERR(auth))
return PTR_ERR(auth);
auth_base = &auth->base;
enc_name = crypto_attr_alg_name(tb[2]);
err = PTR_ERR(enc_name);
if (IS_ERR(enc_name))
goto out_put_auth;
inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
err = -ENOMEM;
if (!inst)
goto out_put_auth;
ctx = aead_instance_ctx(inst);
err = crypto_init_ahash_spawn(&ctx->auth, auth,
aead_crypto_instance(inst));
if (err)
goto err_free_inst;
crypto_set_skcipher_spawn(&ctx->enc, aead_crypto_instance(inst));
err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
crypto_requires_sync(algt->type,
algt->mask));
if (err)
goto err_drop_auth;
enc = crypto_spawn_skcipher_alg(&ctx->enc);
err = -ENAMETOOLONG;
if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
"authencesn(%s,%s)", auth_base->cra_name,
enc->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
goto err_drop_enc;
if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"authencesn(%s,%s)", auth_base->cra_driver_name,
enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto err_drop_enc;
inst->alg.base.cra_flags = (auth_base->cra_flags |
enc->base.cra_flags) & CRYPTO_ALG_ASYNC;
inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
auth_base->cra_priority;
inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
inst->alg.base.cra_alignmask = auth_base->cra_alignmask |
enc->base.cra_alignmask;
inst->alg.base.cra_ctxsize = sizeof(struct crypto_authenc_esn_ctx);
inst->alg.ivsize = crypto_skcipher_alg_ivsize(enc);
inst->alg.chunksize = crypto_skcipher_alg_chunksize(enc);
inst->alg.maxauthsize = auth->digestsize;
inst->alg.init = crypto_authenc_esn_init_tfm;
inst->alg.exit = crypto_authenc_esn_exit_tfm;
inst->alg.setkey = crypto_authenc_esn_setkey;
inst->alg.setauthsize = crypto_authenc_esn_setauthsize;
inst->alg.encrypt = crypto_authenc_esn_encrypt;
inst->alg.decrypt = crypto_authenc_esn_decrypt;
inst->free = crypto_authenc_esn_free,
err = aead_register_instance(tmpl, inst);
if (err)
goto err_drop_enc;
out:
crypto_mod_put(auth_base);
return err;
err_drop_enc:
crypto_drop_skcipher(&ctx->enc);
err_drop_auth:
crypto_drop_ahash(&ctx->auth);
err_free_inst:
kfree(inst);
out_put_auth:
goto out;
}
static struct crypto_template crypto_authenc_esn_tmpl = {
.name = "authencesn",
.create = crypto_authenc_esn_create,
.module = THIS_MODULE,
};
static int __init crypto_authenc_esn_module_init(void)
{
return crypto_register_template(&crypto_authenc_esn_tmpl);
}
static void __exit crypto_authenc_esn_module_exit(void)
{
crypto_unregister_template(&crypto_authenc_esn_tmpl);
}
module_init(crypto_authenc_esn_module_init);
module_exit(crypto_authenc_esn_module_exit);
MODULE_LICENSE("GPL");
MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
MODULE_DESCRIPTION("AEAD wrapper for IPsec with extended sequence numbers");
MODULE_ALIAS_CRYPTO("authencesn");