linux/drivers/crypto/sa2ul.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * K3 SA2UL crypto accelerator driver
   4 *
   5 * Copyright (C) 2018-2020 Texas Instruments Incorporated - http://www.ti.com
   6 *
   7 * Authors:     Keerthy
   8 *              Vitaly Andrianov
   9 *              Tero Kristo
  10 */
  11#include <linux/clk.h>
  12#include <linux/dma-mapping.h>
  13#include <linux/dmaengine.h>
  14#include <linux/dmapool.h>
  15#include <linux/kernel.h>
  16#include <linux/module.h>
  17#include <linux/of_device.h>
  18#include <linux/platform_device.h>
  19#include <linux/pm_runtime.h>
  20
  21#include <crypto/aes.h>
  22#include <crypto/authenc.h>
  23#include <crypto/des.h>
  24#include <crypto/internal/aead.h>
  25#include <crypto/internal/hash.h>
  26#include <crypto/internal/skcipher.h>
  27#include <crypto/scatterwalk.h>
  28#include <crypto/sha1.h>
  29#include <crypto/sha2.h>
  30
  31#include "sa2ul.h"
  32
  33/* Byte offset for key in encryption security context */
  34#define SC_ENC_KEY_OFFSET (1 + 27 + 4)
  35/* Byte offset for Aux-1 in encryption security context */
  36#define SC_ENC_AUX1_OFFSET (1 + 27 + 4 + 32)
  37
  38#define SA_CMDL_UPD_ENC         0x0001
  39#define SA_CMDL_UPD_AUTH        0x0002
  40#define SA_CMDL_UPD_ENC_IV      0x0004
  41#define SA_CMDL_UPD_AUTH_IV     0x0008
  42#define SA_CMDL_UPD_AUX_KEY     0x0010
  43
  44#define SA_AUTH_SUBKEY_LEN      16
  45#define SA_CMDL_PAYLOAD_LENGTH_MASK     0xFFFF
  46#define SA_CMDL_SOP_BYPASS_LEN_MASK     0xFF000000
  47
  48#define MODE_CONTROL_BYTES      27
  49#define SA_HASH_PROCESSING      0
  50#define SA_CRYPTO_PROCESSING    0
  51#define SA_UPLOAD_HASH_TO_TLR   BIT(6)
  52
  53#define SA_SW0_FLAGS_MASK       0xF0000
  54#define SA_SW0_CMDL_INFO_MASK   0x1F00000
  55#define SA_SW0_CMDL_PRESENT     BIT(4)
  56#define SA_SW0_ENG_ID_MASK      0x3E000000
  57#define SA_SW0_DEST_INFO_PRESENT        BIT(30)
  58#define SA_SW2_EGRESS_LENGTH            0xFF000000
  59#define SA_BASIC_HASH           0x10
  60
  61#define SHA256_DIGEST_WORDS    8
  62/* Make 32-bit word from 4 bytes */
  63#define SA_MK_U32(b0, b1, b2, b3) (((b0) << 24) | ((b1) << 16) | \
  64                                   ((b2) << 8) | (b3))
  65
  66/* size of SCCTL structure in bytes */
  67#define SA_SCCTL_SZ 16
  68
  69/* Max Authentication tag size */
  70#define SA_MAX_AUTH_TAG_SZ 64
  71
  72enum sa_algo_id {
  73        SA_ALG_CBC_AES = 0,
  74        SA_ALG_EBC_AES,
  75        SA_ALG_CBC_DES3,
  76        SA_ALG_ECB_DES3,
  77        SA_ALG_SHA1,
  78        SA_ALG_SHA256,
  79        SA_ALG_SHA512,
  80        SA_ALG_AUTHENC_SHA1_AES,
  81        SA_ALG_AUTHENC_SHA256_AES,
  82};
  83
  84struct sa_match_data {
  85        u8 priv;
  86        u8 priv_id;
  87        u32 supported_algos;
  88        bool skip_engine_control;
  89};
  90
  91static struct device *sa_k3_dev;
  92
  93/**
  94 * struct sa_cmdl_cfg - Command label configuration descriptor
  95 * @aalg: authentication algorithm ID
  96 * @enc_eng_id: Encryption Engine ID supported by the SA hardware
  97 * @auth_eng_id: Authentication Engine ID
  98 * @iv_size: Initialization Vector size
  99 * @akey: Authentication key
 100 * @akey_len: Authentication key length
 101 * @enc: True, if this is an encode request
 102 */
 103struct sa_cmdl_cfg {
 104        int aalg;
 105        u8 enc_eng_id;
 106        u8 auth_eng_id;
 107        u8 iv_size;
 108        const u8 *akey;
 109        u16 akey_len;
 110        bool enc;
 111};
 112
 113/**
 114 * struct algo_data - Crypto algorithm specific data
 115 * @enc_eng: Encryption engine info structure
 116 * @auth_eng: Authentication engine info structure
 117 * @auth_ctrl: Authentication control word
 118 * @hash_size: Size of digest
 119 * @iv_idx: iv index in psdata
 120 * @iv_out_size: iv out size
 121 * @ealg_id: Encryption Algorithm ID
 122 * @aalg_id: Authentication algorithm ID
 123 * @mci_enc: Mode Control Instruction for Encryption algorithm
 124 * @mci_dec: Mode Control Instruction for Decryption
 125 * @inv_key: Whether the encryption algorithm demands key inversion
 126 * @ctx: Pointer to the algorithm context
 127 * @keyed_mac: Whether the authentication algorithm has key
 128 * @prep_iopad: Function pointer to generate intermediate ipad/opad
 129 */
 130struct algo_data {
 131        struct sa_eng_info enc_eng;
 132        struct sa_eng_info auth_eng;
 133        u8 auth_ctrl;
 134        u8 hash_size;
 135        u8 iv_idx;
 136        u8 iv_out_size;
 137        u8 ealg_id;
 138        u8 aalg_id;
 139        u8 *mci_enc;
 140        u8 *mci_dec;
 141        bool inv_key;
 142        struct sa_tfm_ctx *ctx;
 143        bool keyed_mac;
 144        void (*prep_iopad)(struct algo_data *algo, const u8 *key,
 145                           u16 key_sz, __be32 *ipad, __be32 *opad);
 146};
 147
 148/**
 149 * struct sa_alg_tmpl: A generic template encompassing crypto/aead algorithms
 150 * @type: Type of the crypto algorithm.
 151 * @alg: Union of crypto algorithm definitions.
 152 * @registered: Flag indicating if the crypto algorithm is already registered
 153 */
 154struct sa_alg_tmpl {
 155        u32 type;               /* CRYPTO_ALG_TYPE from <linux/crypto.h> */
 156        union {
 157                struct skcipher_alg skcipher;
 158                struct ahash_alg ahash;
 159                struct aead_alg aead;
 160        } alg;
 161        bool registered;
 162};
 163
 164/**
 165 * struct sa_mapped_sg: scatterlist information for tx and rx
 166 * @mapped: Set to true if the @sgt is mapped
 167 * @dir: mapping direction used for @sgt
 168 * @split_sg: Set if the sg is split and needs to be freed up
 169 * @static_sg: Static scatterlist entry for overriding data
 170 * @sgt: scatterlist table for DMA API use
 171 */
 172struct sa_mapped_sg {
 173        bool mapped;
 174        enum dma_data_direction dir;
 175        struct scatterlist static_sg;
 176        struct scatterlist *split_sg;
 177        struct sg_table sgt;
 178};
 179/**
 180 * struct sa_rx_data: RX Packet miscellaneous data place holder
 181 * @req: crypto request data pointer
 182 * @ddev: pointer to the DMA device
 183 * @tx_in: dma_async_tx_descriptor pointer for rx channel
 184 * @mapped_sg: Information on tx (0) and rx (1) scatterlist DMA mapping
 185 * @enc: Flag indicating either encryption or decryption
 186 * @enc_iv_size: Initialisation vector size
 187 * @iv_idx: Initialisation vector index
 188 */
 189struct sa_rx_data {
 190        void *req;
 191        struct device *ddev;
 192        struct dma_async_tx_descriptor *tx_in;
 193        struct sa_mapped_sg mapped_sg[2];
 194        u8 enc;
 195        u8 enc_iv_size;
 196        u8 iv_idx;
 197};
 198
 199/**
 200 * struct sa_req: SA request definition
 201 * @dev: device for the request
 202 * @size: total data to the xmitted via DMA
 203 * @enc_offset: offset of cipher data
 204 * @enc_size: data to be passed to cipher engine
 205 * @enc_iv: cipher IV
 206 * @auth_offset: offset of the authentication data
 207 * @auth_size: size of the authentication data
 208 * @auth_iv: authentication IV
 209 * @type: algorithm type for the request
 210 * @cmdl: command label pointer
 211 * @base: pointer to the base request
 212 * @ctx: pointer to the algorithm context data
 213 * @enc: true if this is an encode request
 214 * @src: source data
 215 * @dst: destination data
 216 * @callback: DMA callback for the request
 217 * @mdata_size: metadata size passed to DMA
 218 */
 219struct sa_req {
 220        struct device *dev;
 221        u16 size;
 222        u8 enc_offset;
 223        u16 enc_size;
 224        u8 *enc_iv;
 225        u8 auth_offset;
 226        u16 auth_size;
 227        u8 *auth_iv;
 228        u32 type;
 229        u32 *cmdl;
 230        struct crypto_async_request *base;
 231        struct sa_tfm_ctx *ctx;
 232        bool enc;
 233        struct scatterlist *src;
 234        struct scatterlist *dst;
 235        dma_async_tx_callback callback;
 236        u16 mdata_size;
 237};
 238
 239/*
 240 * Mode Control Instructions for various Key lengths 128, 192, 256
 241 * For CBC (Cipher Block Chaining) mode for encryption
 242 */
 243static u8 mci_cbc_enc_array[3][MODE_CONTROL_BYTES] = {
 244        {       0x61, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
 245                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 246                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 247        {       0x61, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
 248                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 249                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 250        {       0x61, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
 251                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 252                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 253};
 254
 255/*
 256 * Mode Control Instructions for various Key lengths 128, 192, 256
 257 * For CBC (Cipher Block Chaining) mode for decryption
 258 */
 259static u8 mci_cbc_dec_array[3][MODE_CONTROL_BYTES] = {
 260        {       0x71, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 261                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 262                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 263        {       0x71, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 264                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 265                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 266        {       0x71, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 267                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 268                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 269};
 270
 271/*
 272 * Mode Control Instructions for various Key lengths 128, 192, 256
 273 * For CBC (Cipher Block Chaining) mode for encryption
 274 */
 275static u8 mci_cbc_enc_no_iv_array[3][MODE_CONTROL_BYTES] = {
 276        {       0x21, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
 277                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 278                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 279        {       0x21, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
 280                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 281                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 282        {       0x21, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
 283                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 284                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 285};
 286
 287/*
 288 * Mode Control Instructions for various Key lengths 128, 192, 256
 289 * For CBC (Cipher Block Chaining) mode for decryption
 290 */
 291static u8 mci_cbc_dec_no_iv_array[3][MODE_CONTROL_BYTES] = {
 292        {       0x31, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 293                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 294                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 295        {       0x31, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 296                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 297                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 298        {       0x31, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 299                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 300                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 301};
 302
 303/*
 304 * Mode Control Instructions for various Key lengths 128, 192, 256
 305 * For ECB (Electronic Code Book) mode for encryption
 306 */
 307static u8 mci_ecb_enc_array[3][27] = {
 308        {       0x21, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 309                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 310                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 311        {       0x21, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 312                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 313                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 314        {       0x21, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 315                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 316                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 317};
 318
 319/*
 320 * Mode Control Instructions for various Key lengths 128, 192, 256
 321 * For ECB (Electronic Code Book) mode for decryption
 322 */
 323static u8 mci_ecb_dec_array[3][27] = {
 324        {       0x31, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 325                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 326                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 327        {       0x31, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 328                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 329                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 330        {       0x31, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 331                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 332                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 333};
 334
 335/*
 336 * Mode Control Instructions for DES algorithm
 337 * For CBC (Cipher Block Chaining) mode and ECB mode
 338 * encryption and for decryption respectively
 339 */
 340static u8 mci_cbc_3des_enc_array[MODE_CONTROL_BYTES] = {
 341        0x60, 0x00, 0x00, 0x18, 0x88, 0x52, 0xaa, 0x4b, 0x7e, 0x00, 0x00, 0x00,
 342        0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 343        0x00, 0x00, 0x00,
 344};
 345
 346static u8 mci_cbc_3des_dec_array[MODE_CONTROL_BYTES] = {
 347        0x70, 0x00, 0x00, 0x85, 0x0a, 0xca, 0x98, 0xf4, 0x40, 0xc0, 0x00, 0x00,
 348        0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 349        0x00, 0x00, 0x00,
 350};
 351
 352static u8 mci_ecb_3des_enc_array[MODE_CONTROL_BYTES] = {
 353        0x20, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
 354        0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 355        0x00, 0x00, 0x00,
 356};
 357
 358static u8 mci_ecb_3des_dec_array[MODE_CONTROL_BYTES] = {
 359        0x30, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
 360        0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 361        0x00, 0x00, 0x00,
 362};
 363
 364/*
 365 * Perform 16 byte or 128 bit swizzling
 366 * The SA2UL Expects the security context to
 367 * be in little Endian and the bus width is 128 bits or 16 bytes
 368 * Hence swap 16 bytes at a time from higher to lower address
 369 */
 370static void sa_swiz_128(u8 *in, u16 len)
 371{
 372        u8 data[16];
 373        int i, j;
 374
 375        for (i = 0; i < len; i += 16) {
 376                memcpy(data, &in[i], 16);
 377                for (j = 0; j < 16; j++)
 378                        in[i + j] = data[15 - j];
 379        }
 380}
 381
 382/* Prepare the ipad and opad from key as per SHA algorithm step 1*/
 383static void prepare_kipad(u8 *k_ipad, const u8 *key, u16 key_sz)
 384{
 385        int i;
 386
 387        for (i = 0; i < key_sz; i++)
 388                k_ipad[i] = key[i] ^ 0x36;
 389
 390        /* Instead of XOR with 0 */
 391        for (; i < SHA1_BLOCK_SIZE; i++)
 392                k_ipad[i] = 0x36;
 393}
 394
 395static void prepare_kopad(u8 *k_opad, const u8 *key, u16 key_sz)
 396{
 397        int i;
 398
 399        for (i = 0; i < key_sz; i++)
 400                k_opad[i] = key[i] ^ 0x5c;
 401
 402        /* Instead of XOR with 0 */
 403        for (; i < SHA1_BLOCK_SIZE; i++)
 404                k_opad[i] = 0x5c;
 405}
 406
 407static void sa_export_shash(void *state, struct shash_desc *hash,
 408                            int digest_size, __be32 *out)
 409{
 410        struct sha1_state *sha1;
 411        struct sha256_state *sha256;
 412        u32 *result;
 413
 414        switch (digest_size) {
 415        case SHA1_DIGEST_SIZE:
 416                sha1 = state;
 417                result = sha1->state;
 418                break;
 419        case SHA256_DIGEST_SIZE:
 420                sha256 = state;
 421                result = sha256->state;
 422                break;
 423        default:
 424                dev_err(sa_k3_dev, "%s: bad digest_size=%d\n", __func__,
 425                        digest_size);
 426                return;
 427        }
 428
 429        crypto_shash_export(hash, state);
 430
 431        cpu_to_be32_array(out, result, digest_size / 4);
 432}
 433
 434static void sa_prepare_iopads(struct algo_data *data, const u8 *key,
 435                              u16 key_sz, __be32 *ipad, __be32 *opad)
 436{
 437        SHASH_DESC_ON_STACK(shash, data->ctx->shash);
 438        int block_size = crypto_shash_blocksize(data->ctx->shash);
 439        int digest_size = crypto_shash_digestsize(data->ctx->shash);
 440        union {
 441                struct sha1_state sha1;
 442                struct sha256_state sha256;
 443                u8 k_pad[SHA1_BLOCK_SIZE];
 444        } sha;
 445
 446        shash->tfm = data->ctx->shash;
 447
 448        prepare_kipad(sha.k_pad, key, key_sz);
 449
 450        crypto_shash_init(shash);
 451        crypto_shash_update(shash, sha.k_pad, block_size);
 452        sa_export_shash(&sha, shash, digest_size, ipad);
 453
 454        prepare_kopad(sha.k_pad, key, key_sz);
 455
 456        crypto_shash_init(shash);
 457        crypto_shash_update(shash, sha.k_pad, block_size);
 458
 459        sa_export_shash(&sha, shash, digest_size, opad);
 460
 461        memzero_explicit(&sha, sizeof(sha));
 462}
 463
 464/* Derive the inverse key used in AES-CBC decryption operation */
 465static inline int sa_aes_inv_key(u8 *inv_key, const u8 *key, u16 key_sz)
 466{
 467        struct crypto_aes_ctx ctx;
 468        int key_pos;
 469
 470        if (aes_expandkey(&ctx, key, key_sz)) {
 471                dev_err(sa_k3_dev, "%s: bad key len(%d)\n", __func__, key_sz);
 472                return -EINVAL;
 473        }
 474
 475        /* work around to get the right inverse for AES_KEYSIZE_192 size keys */
 476        if (key_sz == AES_KEYSIZE_192) {
 477                ctx.key_enc[52] = ctx.key_enc[51] ^ ctx.key_enc[46];
 478                ctx.key_enc[53] = ctx.key_enc[52] ^ ctx.key_enc[47];
 479        }
 480
 481        /* Based crypto_aes_expand_key logic */
 482        switch (key_sz) {
 483        case AES_KEYSIZE_128:
 484        case AES_KEYSIZE_192:
 485                key_pos = key_sz + 24;
 486                break;
 487
 488        case AES_KEYSIZE_256:
 489                key_pos = key_sz + 24 - 4;
 490                break;
 491
 492        default:
 493                dev_err(sa_k3_dev, "%s: bad key len(%d)\n", __func__, key_sz);
 494                return -EINVAL;
 495        }
 496
 497        memcpy(inv_key, &ctx.key_enc[key_pos], key_sz);
 498        return 0;
 499}
 500
 501/* Set Security context for the encryption engine */
 502static int sa_set_sc_enc(struct algo_data *ad, const u8 *key, u16 key_sz,
 503                         u8 enc, u8 *sc_buf)
 504{
 505        const u8 *mci = NULL;
 506
 507        /* Set Encryption mode selector to crypto processing */
 508        sc_buf[0] = SA_CRYPTO_PROCESSING;
 509
 510        if (enc)
 511                mci = ad->mci_enc;
 512        else
 513                mci = ad->mci_dec;
 514        /* Set the mode control instructions in security context */
 515        if (mci)
 516                memcpy(&sc_buf[1], mci, MODE_CONTROL_BYTES);
 517
 518        /* For AES-CBC decryption get the inverse key */
 519        if (ad->inv_key && !enc) {
 520                if (sa_aes_inv_key(&sc_buf[SC_ENC_KEY_OFFSET], key, key_sz))
 521                        return -EINVAL;
 522        /* For all other cases: key is used */
 523        } else {
 524                memcpy(&sc_buf[SC_ENC_KEY_OFFSET], key, key_sz);
 525        }
 526
 527        return 0;
 528}
 529
 530/* Set Security context for the authentication engine */
 531static void sa_set_sc_auth(struct algo_data *ad, const u8 *key, u16 key_sz,
 532                           u8 *sc_buf)
 533{
 534        __be32 *ipad = (void *)(sc_buf + 32);
 535        __be32 *opad = (void *)(sc_buf + 64);
 536
 537        /* Set Authentication mode selector to hash processing */
 538        sc_buf[0] = SA_HASH_PROCESSING;
 539        /* Auth SW ctrl word: bit[6]=1 (upload computed hash to TLR section) */
 540        sc_buf[1] = SA_UPLOAD_HASH_TO_TLR;
 541        sc_buf[1] |= ad->auth_ctrl;
 542
 543        /* Copy the keys or ipad/opad */
 544        if (ad->keyed_mac)
 545                ad->prep_iopad(ad, key, key_sz, ipad, opad);
 546        else {
 547                /* basic hash */
 548                sc_buf[1] |= SA_BASIC_HASH;
 549        }
 550}
 551
 552static inline void sa_copy_iv(__be32 *out, const u8 *iv, bool size16)
 553{
 554        int j;
 555
 556        for (j = 0; j < ((size16) ? 4 : 2); j++) {
 557                *out = cpu_to_be32(*((u32 *)iv));
 558                iv += 4;
 559                out++;
 560        }
 561}
 562
 563/* Format general command label */
 564static int sa_format_cmdl_gen(struct sa_cmdl_cfg *cfg, u8 *cmdl,
 565                              struct sa_cmdl_upd_info *upd_info)
 566{
 567        u8 enc_offset = 0, auth_offset = 0, total = 0;
 568        u8 enc_next_eng = SA_ENG_ID_OUTPORT2;
 569        u8 auth_next_eng = SA_ENG_ID_OUTPORT2;
 570        u32 *word_ptr = (u32 *)cmdl;
 571        int i;
 572
 573        /* Clear the command label */
 574        memzero_explicit(cmdl, (SA_MAX_CMDL_WORDS * sizeof(u32)));
 575
 576        /* Iniialize the command update structure */
 577        memzero_explicit(upd_info, sizeof(*upd_info));
 578
 579        if (cfg->enc_eng_id && cfg->auth_eng_id) {
 580                if (cfg->enc) {
 581                        auth_offset = SA_CMDL_HEADER_SIZE_BYTES;
 582                        enc_next_eng = cfg->auth_eng_id;
 583
 584                        if (cfg->iv_size)
 585                                auth_offset += cfg->iv_size;
 586                } else {
 587                        enc_offset = SA_CMDL_HEADER_SIZE_BYTES;
 588                        auth_next_eng = cfg->enc_eng_id;
 589                }
 590        }
 591
 592        if (cfg->enc_eng_id) {
 593                upd_info->flags |= SA_CMDL_UPD_ENC;
 594                upd_info->enc_size.index = enc_offset >> 2;
 595                upd_info->enc_offset.index = upd_info->enc_size.index + 1;
 596                /* Encryption command label */
 597                cmdl[enc_offset + SA_CMDL_OFFSET_NESC] = enc_next_eng;
 598
 599                /* Encryption modes requiring IV */
 600                if (cfg->iv_size) {
 601                        upd_info->flags |= SA_CMDL_UPD_ENC_IV;
 602                        upd_info->enc_iv.index =
 603                                (enc_offset + SA_CMDL_HEADER_SIZE_BYTES) >> 2;
 604                        upd_info->enc_iv.size = cfg->iv_size;
 605
 606                        cmdl[enc_offset + SA_CMDL_OFFSET_LABEL_LEN] =
 607                                SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size;
 608
 609                        cmdl[enc_offset + SA_CMDL_OFFSET_OPTION_CTRL1] =
 610                                (SA_CTX_ENC_AUX2_OFFSET | (cfg->iv_size >> 3));
 611                        total += SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size;
 612                } else {
 613                        cmdl[enc_offset + SA_CMDL_OFFSET_LABEL_LEN] =
 614                                                SA_CMDL_HEADER_SIZE_BYTES;
 615                        total += SA_CMDL_HEADER_SIZE_BYTES;
 616                }
 617        }
 618
 619        if (cfg->auth_eng_id) {
 620                upd_info->flags |= SA_CMDL_UPD_AUTH;
 621                upd_info->auth_size.index = auth_offset >> 2;
 622                upd_info->auth_offset.index = upd_info->auth_size.index + 1;
 623                cmdl[auth_offset + SA_CMDL_OFFSET_NESC] = auth_next_eng;
 624                cmdl[auth_offset + SA_CMDL_OFFSET_LABEL_LEN] =
 625                        SA_CMDL_HEADER_SIZE_BYTES;
 626                total += SA_CMDL_HEADER_SIZE_BYTES;
 627        }
 628
 629        total = roundup(total, 8);
 630
 631        for (i = 0; i < total / 4; i++)
 632                word_ptr[i] = swab32(word_ptr[i]);
 633
 634        return total;
 635}
 636
 637/* Update Command label */
 638static inline void sa_update_cmdl(struct sa_req *req, u32 *cmdl,
 639                                  struct sa_cmdl_upd_info *upd_info)
 640{
 641        int i = 0, j;
 642
 643        if (likely(upd_info->flags & SA_CMDL_UPD_ENC)) {
 644                cmdl[upd_info->enc_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK;
 645                cmdl[upd_info->enc_size.index] |= req->enc_size;
 646                cmdl[upd_info->enc_offset.index] &=
 647                                                ~SA_CMDL_SOP_BYPASS_LEN_MASK;
 648                cmdl[upd_info->enc_offset.index] |=
 649                        ((u32)req->enc_offset <<
 650                         __ffs(SA_CMDL_SOP_BYPASS_LEN_MASK));
 651
 652                if (likely(upd_info->flags & SA_CMDL_UPD_ENC_IV)) {
 653                        __be32 *data = (__be32 *)&cmdl[upd_info->enc_iv.index];
 654                        u32 *enc_iv = (u32 *)req->enc_iv;
 655
 656                        for (j = 0; i < upd_info->enc_iv.size; i += 4, j++) {
 657                                data[j] = cpu_to_be32(*enc_iv);
 658                                enc_iv++;
 659                        }
 660                }
 661        }
 662
 663        if (likely(upd_info->flags & SA_CMDL_UPD_AUTH)) {
 664                cmdl[upd_info->auth_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK;
 665                cmdl[upd_info->auth_size.index] |= req->auth_size;
 666                cmdl[upd_info->auth_offset.index] &=
 667                        ~SA_CMDL_SOP_BYPASS_LEN_MASK;
 668                cmdl[upd_info->auth_offset.index] |=
 669                        ((u32)req->auth_offset <<
 670                         __ffs(SA_CMDL_SOP_BYPASS_LEN_MASK));
 671                if (upd_info->flags & SA_CMDL_UPD_AUTH_IV) {
 672                        sa_copy_iv((void *)&cmdl[upd_info->auth_iv.index],
 673                                   req->auth_iv,
 674                                   (upd_info->auth_iv.size > 8));
 675                }
 676                if (upd_info->flags & SA_CMDL_UPD_AUX_KEY) {
 677                        int offset = (req->auth_size & 0xF) ? 4 : 0;
 678
 679                        memcpy(&cmdl[upd_info->aux_key_info.index],
 680                               &upd_info->aux_key[offset], 16);
 681                }
 682        }
 683}
 684
 685/* Format SWINFO words to be sent to SA */
 686static
 687void sa_set_swinfo(u8 eng_id, u16 sc_id, dma_addr_t sc_phys,
 688                   u8 cmdl_present, u8 cmdl_offset, u8 flags,
 689                   u8 hash_size, u32 *swinfo)
 690{
 691        swinfo[0] = sc_id;
 692        swinfo[0] |= (flags << __ffs(SA_SW0_FLAGS_MASK));
 693        if (likely(cmdl_present))
 694                swinfo[0] |= ((cmdl_offset | SA_SW0_CMDL_PRESENT) <<
 695                                                __ffs(SA_SW0_CMDL_INFO_MASK));
 696        swinfo[0] |= (eng_id << __ffs(SA_SW0_ENG_ID_MASK));
 697
 698        swinfo[0] |= SA_SW0_DEST_INFO_PRESENT;
 699        swinfo[1] = (u32)(sc_phys & 0xFFFFFFFFULL);
 700        swinfo[2] = (u32)((sc_phys & 0xFFFFFFFF00000000ULL) >> 32);
 701        swinfo[2] |= (hash_size << __ffs(SA_SW2_EGRESS_LENGTH));
 702}
 703
 704/* Dump the security context */
 705static void sa_dump_sc(u8 *buf, dma_addr_t dma_addr)
 706{
 707#ifdef DEBUG
 708        dev_info(sa_k3_dev, "Security context dump:: 0x%pad\n", &dma_addr);
 709        print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
 710                       16, 1, buf, SA_CTX_MAX_SZ, false);
 711#endif
 712}
 713
 714static
 715int sa_init_sc(struct sa_ctx_info *ctx, const struct sa_match_data *match_data,
 716               const u8 *enc_key, u16 enc_key_sz,
 717               const u8 *auth_key, u16 auth_key_sz,
 718               struct algo_data *ad, u8 enc, u32 *swinfo)
 719{
 720        int enc_sc_offset = 0;
 721        int auth_sc_offset = 0;
 722        u8 *sc_buf = ctx->sc;
 723        u16 sc_id = ctx->sc_id;
 724        u8 first_engine = 0;
 725
 726        memzero_explicit(sc_buf, SA_CTX_MAX_SZ);
 727
 728        if (ad->auth_eng.eng_id) {
 729                if (enc)
 730                        first_engine = ad->enc_eng.eng_id;
 731                else
 732                        first_engine = ad->auth_eng.eng_id;
 733
 734                enc_sc_offset = SA_CTX_PHP_PE_CTX_SZ;
 735                auth_sc_offset = enc_sc_offset + ad->enc_eng.sc_size;
 736                sc_buf[1] = SA_SCCTL_FE_AUTH_ENC;
 737                if (!ad->hash_size)
 738                        return -EINVAL;
 739                ad->hash_size = roundup(ad->hash_size, 8);
 740
 741        } else if (ad->enc_eng.eng_id && !ad->auth_eng.eng_id) {
 742                enc_sc_offset = SA_CTX_PHP_PE_CTX_SZ;
 743                first_engine = ad->enc_eng.eng_id;
 744                sc_buf[1] = SA_SCCTL_FE_ENC;
 745                ad->hash_size = ad->iv_out_size;
 746        }
 747
 748        /* SCCTL Owner info: 0=host, 1=CP_ACE */
 749        sc_buf[SA_CTX_SCCTL_OWNER_OFFSET] = 0;
 750        memcpy(&sc_buf[2], &sc_id, 2);
 751        sc_buf[4] = 0x0;
 752        sc_buf[5] = match_data->priv_id;
 753        sc_buf[6] = match_data->priv;
 754        sc_buf[7] = 0x0;
 755
 756        /* Prepare context for encryption engine */
 757        if (ad->enc_eng.sc_size) {
 758                if (sa_set_sc_enc(ad, enc_key, enc_key_sz, enc,
 759                                  &sc_buf[enc_sc_offset]))
 760                        return -EINVAL;
 761        }
 762
 763        /* Prepare context for authentication engine */
 764        if (ad->auth_eng.sc_size)
 765                sa_set_sc_auth(ad, auth_key, auth_key_sz,
 766                               &sc_buf[auth_sc_offset]);
 767
 768        /* Set the ownership of context to CP_ACE */
 769        sc_buf[SA_CTX_SCCTL_OWNER_OFFSET] = 0x80;
 770
 771        /* swizzle the security context */
 772        sa_swiz_128(sc_buf, SA_CTX_MAX_SZ);
 773
 774        sa_set_swinfo(first_engine, ctx->sc_id, ctx->sc_phys, 1, 0,
 775                      SA_SW_INFO_FLAG_EVICT, ad->hash_size, swinfo);
 776
 777        sa_dump_sc(sc_buf, ctx->sc_phys);
 778
 779        return 0;
 780}
 781
 782/* Free the per direction context memory */
 783static void sa_free_ctx_info(struct sa_ctx_info *ctx,
 784                             struct sa_crypto_data *data)
 785{
 786        unsigned long bn;
 787
 788        bn = ctx->sc_id - data->sc_id_start;
 789        spin_lock(&data->scid_lock);
 790        __clear_bit(bn, data->ctx_bm);
 791        data->sc_id--;
 792        spin_unlock(&data->scid_lock);
 793
 794        if (ctx->sc) {
 795                dma_pool_free(data->sc_pool, ctx->sc, ctx->sc_phys);
 796                ctx->sc = NULL;
 797        }
 798}
 799
 800static int sa_init_ctx_info(struct sa_ctx_info *ctx,
 801                            struct sa_crypto_data *data)
 802{
 803        unsigned long bn;
 804        int err;
 805
 806        spin_lock(&data->scid_lock);
 807        bn = find_first_zero_bit(data->ctx_bm, SA_MAX_NUM_CTX);
 808        __set_bit(bn, data->ctx_bm);
 809        data->sc_id++;
 810        spin_unlock(&data->scid_lock);
 811
 812        ctx->sc_id = (u16)(data->sc_id_start + bn);
 813
 814        ctx->sc = dma_pool_alloc(data->sc_pool, GFP_KERNEL, &ctx->sc_phys);
 815        if (!ctx->sc) {
 816                dev_err(&data->pdev->dev, "Failed to allocate SC memory\n");
 817                err = -ENOMEM;
 818                goto scid_rollback;
 819        }
 820
 821        return 0;
 822
 823scid_rollback:
 824        spin_lock(&data->scid_lock);
 825        __clear_bit(bn, data->ctx_bm);
 826        data->sc_id--;
 827        spin_unlock(&data->scid_lock);
 828
 829        return err;
 830}
 831
 832static void sa_cipher_cra_exit(struct crypto_skcipher *tfm)
 833{
 834        struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
 835        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
 836
 837        dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
 838                __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
 839                ctx->dec.sc_id, &ctx->dec.sc_phys);
 840
 841        sa_free_ctx_info(&ctx->enc, data);
 842        sa_free_ctx_info(&ctx->dec, data);
 843
 844        crypto_free_skcipher(ctx->fallback.skcipher);
 845}
 846
 847static int sa_cipher_cra_init(struct crypto_skcipher *tfm)
 848{
 849        struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
 850        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
 851        const char *name = crypto_tfm_alg_name(&tfm->base);
 852        struct crypto_skcipher *child;
 853        int ret;
 854
 855        memzero_explicit(ctx, sizeof(*ctx));
 856        ctx->dev_data = data;
 857
 858        ret = sa_init_ctx_info(&ctx->enc, data);
 859        if (ret)
 860                return ret;
 861        ret = sa_init_ctx_info(&ctx->dec, data);
 862        if (ret) {
 863                sa_free_ctx_info(&ctx->enc, data);
 864                return ret;
 865        }
 866
 867        child = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
 868
 869        if (IS_ERR(child)) {
 870                dev_err(sa_k3_dev, "Error allocating fallback algo %s\n", name);
 871                return PTR_ERR(child);
 872        }
 873
 874        ctx->fallback.skcipher = child;
 875        crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(child) +
 876                                         sizeof(struct skcipher_request));
 877
 878        dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
 879                __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
 880                ctx->dec.sc_id, &ctx->dec.sc_phys);
 881        return 0;
 882}
 883
 884static int sa_cipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
 885                            unsigned int keylen, struct algo_data *ad)
 886{
 887        struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
 888        struct crypto_skcipher *child = ctx->fallback.skcipher;
 889        int cmdl_len;
 890        struct sa_cmdl_cfg cfg;
 891        int ret;
 892
 893        if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
 894            keylen != AES_KEYSIZE_256)
 895                return -EINVAL;
 896
 897        ad->enc_eng.eng_id = SA_ENG_ID_EM1;
 898        ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
 899
 900        memzero_explicit(&cfg, sizeof(cfg));
 901        cfg.enc_eng_id = ad->enc_eng.eng_id;
 902        cfg.iv_size = crypto_skcipher_ivsize(tfm);
 903
 904        crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 905        crypto_skcipher_set_flags(child, tfm->base.crt_flags &
 906                                         CRYPTO_TFM_REQ_MASK);
 907        ret = crypto_skcipher_setkey(child, key, keylen);
 908        if (ret)
 909                return ret;
 910
 911        /* Setup Encryption Security Context & Command label template */
 912        if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, key, keylen, NULL, 0,
 913                       ad, 1, &ctx->enc.epib[1]))
 914                goto badkey;
 915
 916        cmdl_len = sa_format_cmdl_gen(&cfg,
 917                                      (u8 *)ctx->enc.cmdl,
 918                                      &ctx->enc.cmdl_upd_info);
 919        if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
 920                goto badkey;
 921
 922        ctx->enc.cmdl_size = cmdl_len;
 923
 924        /* Setup Decryption Security Context & Command label template */
 925        if (sa_init_sc(&ctx->dec, ctx->dev_data->match_data, key, keylen, NULL, 0,
 926                       ad, 0, &ctx->dec.epib[1]))
 927                goto badkey;
 928
 929        cfg.enc_eng_id = ad->enc_eng.eng_id;
 930        cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl,
 931                                      &ctx->dec.cmdl_upd_info);
 932
 933        if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
 934                goto badkey;
 935
 936        ctx->dec.cmdl_size = cmdl_len;
 937        ctx->iv_idx = ad->iv_idx;
 938
 939        return 0;
 940
 941badkey:
 942        dev_err(sa_k3_dev, "%s: badkey\n", __func__);
 943        return -EINVAL;
 944}
 945
 946static int sa_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
 947                             unsigned int keylen)
 948{
 949        struct algo_data ad = { 0 };
 950        /* Convert the key size (16/24/32) to the key size index (0/1/2) */
 951        int key_idx = (keylen >> 3) - 2;
 952
 953        if (key_idx >= 3)
 954                return -EINVAL;
 955
 956        ad.mci_enc = mci_cbc_enc_array[key_idx];
 957        ad.mci_dec = mci_cbc_dec_array[key_idx];
 958        ad.inv_key = true;
 959        ad.ealg_id = SA_EALG_ID_AES_CBC;
 960        ad.iv_idx = 4;
 961        ad.iv_out_size = 16;
 962
 963        return sa_cipher_setkey(tfm, key, keylen, &ad);
 964}
 965
 966static int sa_aes_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key,
 967                             unsigned int keylen)
 968{
 969        struct algo_data ad = { 0 };
 970        /* Convert the key size (16/24/32) to the key size index (0/1/2) */
 971        int key_idx = (keylen >> 3) - 2;
 972
 973        if (key_idx >= 3)
 974                return -EINVAL;
 975
 976        ad.mci_enc = mci_ecb_enc_array[key_idx];
 977        ad.mci_dec = mci_ecb_dec_array[key_idx];
 978        ad.inv_key = true;
 979        ad.ealg_id = SA_EALG_ID_AES_ECB;
 980
 981        return sa_cipher_setkey(tfm, key, keylen, &ad);
 982}
 983
 984static int sa_3des_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
 985                              unsigned int keylen)
 986{
 987        struct algo_data ad = { 0 };
 988
 989        ad.mci_enc = mci_cbc_3des_enc_array;
 990        ad.mci_dec = mci_cbc_3des_dec_array;
 991        ad.ealg_id = SA_EALG_ID_3DES_CBC;
 992        ad.iv_idx = 6;
 993        ad.iv_out_size = 8;
 994
 995        return sa_cipher_setkey(tfm, key, keylen, &ad);
 996}
 997
 998static int sa_3des_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key,
 999                              unsigned int keylen)
1000{
1001        struct algo_data ad = { 0 };
1002
1003        ad.mci_enc = mci_ecb_3des_enc_array;
1004        ad.mci_dec = mci_ecb_3des_dec_array;
1005
1006        return sa_cipher_setkey(tfm, key, keylen, &ad);
1007}
1008
1009static void sa_sync_from_device(struct sa_rx_data *rxd)
1010{
1011        struct sg_table *sgt;
1012
1013        if (rxd->mapped_sg[0].dir == DMA_BIDIRECTIONAL)
1014                sgt = &rxd->mapped_sg[0].sgt;
1015        else
1016                sgt = &rxd->mapped_sg[1].sgt;
1017
1018        dma_sync_sgtable_for_cpu(rxd->ddev, sgt, DMA_FROM_DEVICE);
1019}
1020
1021static void sa_free_sa_rx_data(struct sa_rx_data *rxd)
1022{
1023        int i;
1024
1025        for (i = 0; i < ARRAY_SIZE(rxd->mapped_sg); i++) {
1026                struct sa_mapped_sg *mapped_sg = &rxd->mapped_sg[i];
1027
1028                if (mapped_sg->mapped) {
1029                        dma_unmap_sgtable(rxd->ddev, &mapped_sg->sgt,
1030                                          mapped_sg->dir, 0);
1031                        kfree(mapped_sg->split_sg);
1032                }
1033        }
1034
1035        kfree(rxd);
1036}
1037
1038static void sa_aes_dma_in_callback(void *data)
1039{
1040        struct sa_rx_data *rxd = (struct sa_rx_data *)data;
1041        struct skcipher_request *req;
1042        u32 *result;
1043        __be32 *mdptr;
1044        size_t ml, pl;
1045        int i;
1046
1047        sa_sync_from_device(rxd);
1048        req = container_of(rxd->req, struct skcipher_request, base);
1049
1050        if (req->iv) {
1051                mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl,
1052                                                               &ml);
1053                result = (u32 *)req->iv;
1054
1055                for (i = 0; i < (rxd->enc_iv_size / 4); i++)
1056                        result[i] = be32_to_cpu(mdptr[i + rxd->iv_idx]);
1057        }
1058
1059        sa_free_sa_rx_data(rxd);
1060
1061        skcipher_request_complete(req, 0);
1062}
1063
1064static void
1065sa_prepare_tx_desc(u32 *mdptr, u32 pslen, u32 *psdata, u32 epiblen, u32 *epib)
1066{
1067        u32 *out, *in;
1068        int i;
1069
1070        for (out = mdptr, in = epib, i = 0; i < epiblen / sizeof(u32); i++)
1071                *out++ = *in++;
1072
1073        mdptr[4] = (0xFFFF << 16);
1074        for (out = &mdptr[5], in = psdata, i = 0;
1075             i < pslen / sizeof(u32); i++)
1076                *out++ = *in++;
1077}
1078
1079static int sa_run(struct sa_req *req)
1080{
1081        struct sa_rx_data *rxd;
1082        gfp_t gfp_flags;
1083        u32 cmdl[SA_MAX_CMDL_WORDS];
1084        struct sa_crypto_data *pdata = dev_get_drvdata(sa_k3_dev);
1085        struct device *ddev;
1086        struct dma_chan *dma_rx;
1087        int sg_nents, src_nents, dst_nents;
1088        struct scatterlist *src, *dst;
1089        size_t pl, ml, split_size;
1090        struct sa_ctx_info *sa_ctx = req->enc ? &req->ctx->enc : &req->ctx->dec;
1091        int ret;
1092        struct dma_async_tx_descriptor *tx_out;
1093        u32 *mdptr;
1094        bool diff_dst;
1095        enum dma_data_direction dir_src;
1096        struct sa_mapped_sg *mapped_sg;
1097
1098        gfp_flags = req->base->flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
1099                GFP_KERNEL : GFP_ATOMIC;
1100
1101        rxd = kzalloc(sizeof(*rxd), gfp_flags);
1102        if (!rxd)
1103                return -ENOMEM;
1104
1105        if (req->src != req->dst) {
1106                diff_dst = true;
1107                dir_src = DMA_TO_DEVICE;
1108        } else {
1109                diff_dst = false;
1110                dir_src = DMA_BIDIRECTIONAL;
1111        }
1112
1113        /*
1114         * SA2UL has an interesting feature where the receive DMA channel
1115         * is selected based on the data passed to the engine. Within the
1116         * transition range, there is also a space where it is impossible
1117         * to determine where the data will end up, and this should be
1118         * avoided. This will be handled by the SW fallback mechanism by
1119         * the individual algorithm implementations.
1120         */
1121        if (req->size >= 256)
1122                dma_rx = pdata->dma_rx2;
1123        else
1124                dma_rx = pdata->dma_rx1;
1125
1126        ddev = dmaengine_get_dma_device(pdata->dma_tx);
1127        rxd->ddev = ddev;
1128
1129        memcpy(cmdl, sa_ctx->cmdl, sa_ctx->cmdl_size);
1130
1131        sa_update_cmdl(req, cmdl, &sa_ctx->cmdl_upd_info);
1132
1133        if (req->type != CRYPTO_ALG_TYPE_AHASH) {
1134                if (req->enc)
1135                        req->type |=
1136                                (SA_REQ_SUBTYPE_ENC << SA_REQ_SUBTYPE_SHIFT);
1137                else
1138                        req->type |=
1139                                (SA_REQ_SUBTYPE_DEC << SA_REQ_SUBTYPE_SHIFT);
1140        }
1141
1142        cmdl[sa_ctx->cmdl_size / sizeof(u32)] = req->type;
1143
1144        /*
1145         * Map the packets, first we check if the data fits into a single
1146         * sg entry and use that if possible. If it does not fit, we check
1147         * if we need to do sg_split to align the scatterlist data on the
1148         * actual data size being processed by the crypto engine.
1149         */
1150        src = req->src;
1151        sg_nents = sg_nents_for_len(src, req->size);
1152
1153        split_size = req->size;
1154
1155        mapped_sg = &rxd->mapped_sg[0];
1156        if (sg_nents == 1 && split_size <= req->src->length) {
1157                src = &mapped_sg->static_sg;
1158                src_nents = 1;
1159                sg_init_table(src, 1);
1160                sg_set_page(src, sg_page(req->src), split_size,
1161                            req->src->offset);
1162
1163                mapped_sg->sgt.sgl = src;
1164                mapped_sg->sgt.orig_nents = src_nents;
1165                ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0);
1166                if (ret) {
1167                        kfree(rxd);
1168                        return ret;
1169                }
1170
1171                mapped_sg->dir = dir_src;
1172                mapped_sg->mapped = true;
1173        } else {
1174                mapped_sg->sgt.sgl = req->src;
1175                mapped_sg->sgt.orig_nents = sg_nents;
1176                ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0);
1177                if (ret) {
1178                        kfree(rxd);
1179                        return ret;
1180                }
1181
1182                mapped_sg->dir = dir_src;
1183                mapped_sg->mapped = true;
1184
1185                ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents, 0, 1,
1186                               &split_size, &src, &src_nents, gfp_flags);
1187                if (ret) {
1188                        src_nents = mapped_sg->sgt.nents;
1189                        src = mapped_sg->sgt.sgl;
1190                } else {
1191                        mapped_sg->split_sg = src;
1192                }
1193        }
1194
1195        dma_sync_sgtable_for_device(ddev, &mapped_sg->sgt, DMA_TO_DEVICE);
1196
1197        if (!diff_dst) {
1198                dst_nents = src_nents;
1199                dst = src;
1200        } else {
1201                dst_nents = sg_nents_for_len(req->dst, req->size);
1202                mapped_sg = &rxd->mapped_sg[1];
1203
1204                if (dst_nents == 1 && split_size <= req->dst->length) {
1205                        dst = &mapped_sg->static_sg;
1206                        dst_nents = 1;
1207                        sg_init_table(dst, 1);
1208                        sg_set_page(dst, sg_page(req->dst), split_size,
1209                                    req->dst->offset);
1210
1211                        mapped_sg->sgt.sgl = dst;
1212                        mapped_sg->sgt.orig_nents = dst_nents;
1213                        ret = dma_map_sgtable(ddev, &mapped_sg->sgt,
1214                                              DMA_FROM_DEVICE, 0);
1215                        if (ret)
1216                                goto err_cleanup;
1217
1218                        mapped_sg->dir = DMA_FROM_DEVICE;
1219                        mapped_sg->mapped = true;
1220                } else {
1221                        mapped_sg->sgt.sgl = req->dst;
1222                        mapped_sg->sgt.orig_nents = dst_nents;
1223                        ret = dma_map_sgtable(ddev, &mapped_sg->sgt,
1224                                              DMA_FROM_DEVICE, 0);
1225                        if (ret)
1226                                goto err_cleanup;
1227
1228                        mapped_sg->dir = DMA_FROM_DEVICE;
1229                        mapped_sg->mapped = true;
1230
1231                        ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents,
1232                                       0, 1, &split_size, &dst, &dst_nents,
1233                                       gfp_flags);
1234                        if (ret) {
1235                                dst_nents = mapped_sg->sgt.nents;
1236                                dst = mapped_sg->sgt.sgl;
1237                        } else {
1238                                mapped_sg->split_sg = dst;
1239                        }
1240                }
1241        }
1242
1243        rxd->tx_in = dmaengine_prep_slave_sg(dma_rx, dst, dst_nents,
1244                                             DMA_DEV_TO_MEM,
1245                                             DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1246        if (!rxd->tx_in) {
1247                dev_err(pdata->dev, "IN prep_slave_sg() failed\n");
1248                ret = -EINVAL;
1249                goto err_cleanup;
1250        }
1251
1252        rxd->req = (void *)req->base;
1253        rxd->enc = req->enc;
1254        rxd->iv_idx = req->ctx->iv_idx;
1255        rxd->enc_iv_size = sa_ctx->cmdl_upd_info.enc_iv.size;
1256        rxd->tx_in->callback = req->callback;
1257        rxd->tx_in->callback_param = rxd;
1258
1259        tx_out = dmaengine_prep_slave_sg(pdata->dma_tx, src,
1260                                         src_nents, DMA_MEM_TO_DEV,
1261                                         DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1262
1263        if (!tx_out) {
1264                dev_err(pdata->dev, "OUT prep_slave_sg() failed\n");
1265                ret = -EINVAL;
1266                goto err_cleanup;
1267        }
1268
1269        /*
1270         * Prepare metadata for DMA engine. This essentially describes the
1271         * crypto algorithm to be used, data sizes, different keys etc.
1272         */
1273        mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(tx_out, &pl, &ml);
1274
1275        sa_prepare_tx_desc(mdptr, (sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS *
1276                                   sizeof(u32))), cmdl, sizeof(sa_ctx->epib),
1277                           sa_ctx->epib);
1278
1279        ml = sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS * sizeof(u32));
1280        dmaengine_desc_set_metadata_len(tx_out, req->mdata_size);
1281
1282        dmaengine_submit(tx_out);
1283        dmaengine_submit(rxd->tx_in);
1284
1285        dma_async_issue_pending(dma_rx);
1286        dma_async_issue_pending(pdata->dma_tx);
1287
1288        return -EINPROGRESS;
1289
1290err_cleanup:
1291        sa_free_sa_rx_data(rxd);
1292
1293        return ret;
1294}
1295
1296static int sa_cipher_run(struct skcipher_request *req, u8 *iv, int enc)
1297{
1298        struct sa_tfm_ctx *ctx =
1299            crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
1300        struct crypto_alg *alg = req->base.tfm->__crt_alg;
1301        struct sa_req sa_req = { 0 };
1302
1303        if (!req->cryptlen)
1304                return 0;
1305
1306        if (req->cryptlen % alg->cra_blocksize)
1307                return -EINVAL;
1308
1309        /* Use SW fallback if the data size is not supported */
1310        if (req->cryptlen > SA_MAX_DATA_SZ ||
1311            (req->cryptlen >= SA_UNSAFE_DATA_SZ_MIN &&
1312             req->cryptlen <= SA_UNSAFE_DATA_SZ_MAX)) {
1313                struct skcipher_request *subreq = skcipher_request_ctx(req);
1314
1315                skcipher_request_set_tfm(subreq, ctx->fallback.skcipher);
1316                skcipher_request_set_callback(subreq, req->base.flags,
1317                                              req->base.complete,
1318                                              req->base.data);
1319                skcipher_request_set_crypt(subreq, req->src, req->dst,
1320                                           req->cryptlen, req->iv);
1321                if (enc)
1322                        return crypto_skcipher_encrypt(subreq);
1323                else
1324                        return crypto_skcipher_decrypt(subreq);
1325        }
1326
1327        sa_req.size = req->cryptlen;
1328        sa_req.enc_size = req->cryptlen;
1329        sa_req.src = req->src;
1330        sa_req.dst = req->dst;
1331        sa_req.enc_iv = iv;
1332        sa_req.type = CRYPTO_ALG_TYPE_SKCIPHER;
1333        sa_req.enc = enc;
1334        sa_req.callback = sa_aes_dma_in_callback;
1335        sa_req.mdata_size = 44;
1336        sa_req.base = &req->base;
1337        sa_req.ctx = ctx;
1338
1339        return sa_run(&sa_req);
1340}
1341
1342static int sa_encrypt(struct skcipher_request *req)
1343{
1344        return sa_cipher_run(req, req->iv, 1);
1345}
1346
1347static int sa_decrypt(struct skcipher_request *req)
1348{
1349        return sa_cipher_run(req, req->iv, 0);
1350}
1351
1352static void sa_sha_dma_in_callback(void *data)
1353{
1354        struct sa_rx_data *rxd = (struct sa_rx_data *)data;
1355        struct ahash_request *req;
1356        struct crypto_ahash *tfm;
1357        unsigned int authsize;
1358        int i;
1359        size_t ml, pl;
1360        u32 *result;
1361        __be32 *mdptr;
1362
1363        sa_sync_from_device(rxd);
1364        req = container_of(rxd->req, struct ahash_request, base);
1365        tfm = crypto_ahash_reqtfm(req);
1366        authsize = crypto_ahash_digestsize(tfm);
1367
1368        mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
1369        result = (u32 *)req->result;
1370
1371        for (i = 0; i < (authsize / 4); i++)
1372                result[i] = be32_to_cpu(mdptr[i + 4]);
1373
1374        sa_free_sa_rx_data(rxd);
1375
1376        ahash_request_complete(req, 0);
1377}
1378
1379static int zero_message_process(struct ahash_request *req)
1380{
1381        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1382        int sa_digest_size = crypto_ahash_digestsize(tfm);
1383
1384        switch (sa_digest_size) {
1385        case SHA1_DIGEST_SIZE:
1386                memcpy(req->result, sha1_zero_message_hash, sa_digest_size);
1387                break;
1388        case SHA256_DIGEST_SIZE:
1389                memcpy(req->result, sha256_zero_message_hash, sa_digest_size);
1390                break;
1391        case SHA512_DIGEST_SIZE:
1392                memcpy(req->result, sha512_zero_message_hash, sa_digest_size);
1393                break;
1394        default:
1395                return -EINVAL;
1396        }
1397
1398        return 0;
1399}
1400
1401static int sa_sha_run(struct ahash_request *req)
1402{
1403        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
1404        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1405        struct sa_req sa_req = { 0 };
1406        size_t auth_len;
1407
1408        auth_len = req->nbytes;
1409
1410        if (!auth_len)
1411                return zero_message_process(req);
1412
1413        if (auth_len > SA_MAX_DATA_SZ ||
1414            (auth_len >= SA_UNSAFE_DATA_SZ_MIN &&
1415             auth_len <= SA_UNSAFE_DATA_SZ_MAX)) {
1416                struct ahash_request *subreq = &rctx->fallback_req;
1417                int ret = 0;
1418
1419                ahash_request_set_tfm(subreq, ctx->fallback.ahash);
1420                subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1421
1422                crypto_ahash_init(subreq);
1423
1424                subreq->nbytes = auth_len;
1425                subreq->src = req->src;
1426                subreq->result = req->result;
1427
1428                ret |= crypto_ahash_update(subreq);
1429
1430                subreq->nbytes = 0;
1431
1432                ret |= crypto_ahash_final(subreq);
1433
1434                return ret;
1435        }
1436
1437        sa_req.size = auth_len;
1438        sa_req.auth_size = auth_len;
1439        sa_req.src = req->src;
1440        sa_req.dst = req->src;
1441        sa_req.enc = true;
1442        sa_req.type = CRYPTO_ALG_TYPE_AHASH;
1443        sa_req.callback = sa_sha_dma_in_callback;
1444        sa_req.mdata_size = 28;
1445        sa_req.ctx = ctx;
1446        sa_req.base = &req->base;
1447
1448        return sa_run(&sa_req);
1449}
1450
1451static int sa_sha_setup(struct sa_tfm_ctx *ctx, struct  algo_data *ad)
1452{
1453        int bs = crypto_shash_blocksize(ctx->shash);
1454        int cmdl_len;
1455        struct sa_cmdl_cfg cfg;
1456
1457        ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
1458        ad->auth_eng.eng_id = SA_ENG_ID_AM1;
1459        ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ;
1460
1461        memset(ctx->authkey, 0, bs);
1462        memset(&cfg, 0, sizeof(cfg));
1463        cfg.aalg = ad->aalg_id;
1464        cfg.enc_eng_id = ad->enc_eng.eng_id;
1465        cfg.auth_eng_id = ad->auth_eng.eng_id;
1466        cfg.iv_size = 0;
1467        cfg.akey = NULL;
1468        cfg.akey_len = 0;
1469
1470        ctx->dev_data = dev_get_drvdata(sa_k3_dev);
1471        /* Setup Encryption Security Context & Command label template */
1472        if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, NULL, 0, NULL, 0,
1473                       ad, 0, &ctx->enc.epib[1]))
1474                goto badkey;
1475
1476        cmdl_len = sa_format_cmdl_gen(&cfg,
1477                                      (u8 *)ctx->enc.cmdl,
1478                                      &ctx->enc.cmdl_upd_info);
1479        if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
1480                goto badkey;
1481
1482        ctx->enc.cmdl_size = cmdl_len;
1483
1484        return 0;
1485
1486badkey:
1487        dev_err(sa_k3_dev, "%s: badkey\n", __func__);
1488        return -EINVAL;
1489}
1490
1491static int sa_sha_cra_init_alg(struct crypto_tfm *tfm, const char *alg_base)
1492{
1493        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1494        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1495        int ret;
1496
1497        memset(ctx, 0, sizeof(*ctx));
1498        ctx->dev_data = data;
1499        ret = sa_init_ctx_info(&ctx->enc, data);
1500        if (ret)
1501                return ret;
1502
1503        if (alg_base) {
1504                ctx->shash = crypto_alloc_shash(alg_base, 0,
1505                                                CRYPTO_ALG_NEED_FALLBACK);
1506                if (IS_ERR(ctx->shash)) {
1507                        dev_err(sa_k3_dev, "base driver %s couldn't be loaded\n",
1508                                alg_base);
1509                        return PTR_ERR(ctx->shash);
1510                }
1511                /* for fallback */
1512                ctx->fallback.ahash =
1513                        crypto_alloc_ahash(alg_base, 0,
1514                                           CRYPTO_ALG_NEED_FALLBACK);
1515                if (IS_ERR(ctx->fallback.ahash)) {
1516                        dev_err(ctx->dev_data->dev,
1517                                "Could not load fallback driver\n");
1518                        return PTR_ERR(ctx->fallback.ahash);
1519                }
1520        }
1521
1522        dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1523                __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
1524                ctx->dec.sc_id, &ctx->dec.sc_phys);
1525
1526        crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1527                                 sizeof(struct sa_sha_req_ctx) +
1528                                 crypto_ahash_reqsize(ctx->fallback.ahash));
1529
1530        return 0;
1531}
1532
1533static int sa_sha_digest(struct ahash_request *req)
1534{
1535        return sa_sha_run(req);
1536}
1537
1538static int sa_sha_init(struct ahash_request *req)
1539{
1540        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1541        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1542        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1543
1544        dev_dbg(sa_k3_dev, "init: digest size: %u, rctx=%p\n",
1545                crypto_ahash_digestsize(tfm), rctx);
1546
1547        ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1548        rctx->fallback_req.base.flags =
1549                req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1550
1551        return crypto_ahash_init(&rctx->fallback_req);
1552}
1553
1554static int sa_sha_update(struct ahash_request *req)
1555{
1556        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1557        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1558        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1559
1560        ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1561        rctx->fallback_req.base.flags =
1562                req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1563        rctx->fallback_req.nbytes = req->nbytes;
1564        rctx->fallback_req.src = req->src;
1565
1566        return crypto_ahash_update(&rctx->fallback_req);
1567}
1568
1569static int sa_sha_final(struct ahash_request *req)
1570{
1571        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1572        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1573        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1574
1575        ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1576        rctx->fallback_req.base.flags =
1577                req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1578        rctx->fallback_req.result = req->result;
1579
1580        return crypto_ahash_final(&rctx->fallback_req);
1581}
1582
1583static int sa_sha_finup(struct ahash_request *req)
1584{
1585        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1586        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1587        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1588
1589        ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1590        rctx->fallback_req.base.flags =
1591                req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1592
1593        rctx->fallback_req.nbytes = req->nbytes;
1594        rctx->fallback_req.src = req->src;
1595        rctx->fallback_req.result = req->result;
1596
1597        return crypto_ahash_finup(&rctx->fallback_req);
1598}
1599
1600static int sa_sha_import(struct ahash_request *req, const void *in)
1601{
1602        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1603        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1604        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1605
1606        ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1607        rctx->fallback_req.base.flags = req->base.flags &
1608                CRYPTO_TFM_REQ_MAY_SLEEP;
1609
1610        return crypto_ahash_import(&rctx->fallback_req, in);
1611}
1612
1613static int sa_sha_export(struct ahash_request *req, void *out)
1614{
1615        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1616        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1617        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1618        struct ahash_request *subreq = &rctx->fallback_req;
1619
1620        ahash_request_set_tfm(subreq, ctx->fallback.ahash);
1621        subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1622
1623        return crypto_ahash_export(subreq, out);
1624}
1625
1626static int sa_sha1_cra_init(struct crypto_tfm *tfm)
1627{
1628        struct algo_data ad = { 0 };
1629        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1630
1631        sa_sha_cra_init_alg(tfm, "sha1");
1632
1633        ad.aalg_id = SA_AALG_ID_SHA1;
1634        ad.hash_size = SHA1_DIGEST_SIZE;
1635        ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA1;
1636
1637        sa_sha_setup(ctx, &ad);
1638
1639        return 0;
1640}
1641
1642static int sa_sha256_cra_init(struct crypto_tfm *tfm)
1643{
1644        struct algo_data ad = { 0 };
1645        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1646
1647        sa_sha_cra_init_alg(tfm, "sha256");
1648
1649        ad.aalg_id = SA_AALG_ID_SHA2_256;
1650        ad.hash_size = SHA256_DIGEST_SIZE;
1651        ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA256;
1652
1653        sa_sha_setup(ctx, &ad);
1654
1655        return 0;
1656}
1657
1658static int sa_sha512_cra_init(struct crypto_tfm *tfm)
1659{
1660        struct algo_data ad = { 0 };
1661        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1662
1663        sa_sha_cra_init_alg(tfm, "sha512");
1664
1665        ad.aalg_id = SA_AALG_ID_SHA2_512;
1666        ad.hash_size = SHA512_DIGEST_SIZE;
1667        ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA512;
1668
1669        sa_sha_setup(ctx, &ad);
1670
1671        return 0;
1672}
1673
1674static void sa_sha_cra_exit(struct crypto_tfm *tfm)
1675{
1676        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1677        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1678
1679        dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1680                __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
1681                ctx->dec.sc_id, &ctx->dec.sc_phys);
1682
1683        if (crypto_tfm_alg_type(tfm) == CRYPTO_ALG_TYPE_AHASH)
1684                sa_free_ctx_info(&ctx->enc, data);
1685
1686        crypto_free_shash(ctx->shash);
1687        crypto_free_ahash(ctx->fallback.ahash);
1688}
1689
1690static void sa_aead_dma_in_callback(void *data)
1691{
1692        struct sa_rx_data *rxd = (struct sa_rx_data *)data;
1693        struct aead_request *req;
1694        struct crypto_aead *tfm;
1695        unsigned int start;
1696        unsigned int authsize;
1697        u8 auth_tag[SA_MAX_AUTH_TAG_SZ];
1698        size_t pl, ml;
1699        int i;
1700        int err = 0;
1701        u16 auth_len;
1702        u32 *mdptr;
1703
1704        sa_sync_from_device(rxd);
1705        req = container_of(rxd->req, struct aead_request, base);
1706        tfm = crypto_aead_reqtfm(req);
1707        start = req->assoclen + req->cryptlen;
1708        authsize = crypto_aead_authsize(tfm);
1709
1710        mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
1711        for (i = 0; i < (authsize / 4); i++)
1712                mdptr[i + 4] = swab32(mdptr[i + 4]);
1713
1714        auth_len = req->assoclen + req->cryptlen;
1715
1716        if (rxd->enc) {
1717                scatterwalk_map_and_copy(&mdptr[4], req->dst, start, authsize,
1718                                         1);
1719        } else {
1720                auth_len -= authsize;
1721                start -= authsize;
1722                scatterwalk_map_and_copy(auth_tag, req->src, start, authsize,
1723                                         0);
1724
1725                err = memcmp(&mdptr[4], auth_tag, authsize) ? -EBADMSG : 0;
1726        }
1727
1728        sa_free_sa_rx_data(rxd);
1729
1730        aead_request_complete(req, err);
1731}
1732
1733static int sa_cra_init_aead(struct crypto_aead *tfm, const char *hash,
1734                            const char *fallback)
1735{
1736        struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
1737        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1738        int ret;
1739
1740        memzero_explicit(ctx, sizeof(*ctx));
1741        ctx->dev_data = data;
1742
1743        ctx->shash = crypto_alloc_shash(hash, 0, CRYPTO_ALG_NEED_FALLBACK);
1744        if (IS_ERR(ctx->shash)) {
1745                dev_err(sa_k3_dev, "base driver %s couldn't be loaded\n", hash);
1746                return PTR_ERR(ctx->shash);
1747        }
1748
1749        ctx->fallback.aead = crypto_alloc_aead(fallback, 0,
1750                                               CRYPTO_ALG_NEED_FALLBACK);
1751
1752        if (IS_ERR(ctx->fallback.aead)) {
1753                dev_err(sa_k3_dev, "fallback driver %s couldn't be loaded\n",
1754                        fallback);
1755                return PTR_ERR(ctx->fallback.aead);
1756        }
1757
1758        crypto_aead_set_reqsize(tfm, sizeof(struct aead_request) +
1759                                crypto_aead_reqsize(ctx->fallback.aead));
1760
1761        ret = sa_init_ctx_info(&ctx->enc, data);
1762        if (ret)
1763                return ret;
1764
1765        ret = sa_init_ctx_info(&ctx->dec, data);
1766        if (ret) {
1767                sa_free_ctx_info(&ctx->enc, data);
1768                return ret;
1769        }
1770
1771        dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1772                __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
1773                ctx->dec.sc_id, &ctx->dec.sc_phys);
1774
1775        return ret;
1776}
1777
1778static int sa_cra_init_aead_sha1(struct crypto_aead *tfm)
1779{
1780        return sa_cra_init_aead(tfm, "sha1",
1781                                "authenc(hmac(sha1-ce),cbc(aes-ce))");
1782}
1783
1784static int sa_cra_init_aead_sha256(struct crypto_aead *tfm)
1785{
1786        return sa_cra_init_aead(tfm, "sha256",
1787                                "authenc(hmac(sha256-ce),cbc(aes-ce))");
1788}
1789
1790static void sa_exit_tfm_aead(struct crypto_aead *tfm)
1791{
1792        struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
1793        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1794
1795        crypto_free_shash(ctx->shash);
1796        crypto_free_aead(ctx->fallback.aead);
1797
1798        sa_free_ctx_info(&ctx->enc, data);
1799        sa_free_ctx_info(&ctx->dec, data);
1800}
1801
1802/* AEAD algorithm configuration interface function */
1803static int sa_aead_setkey(struct crypto_aead *authenc,
1804                          const u8 *key, unsigned int keylen,
1805                          struct algo_data *ad)
1806{
1807        struct sa_tfm_ctx *ctx = crypto_aead_ctx(authenc);
1808        struct crypto_authenc_keys keys;
1809        int cmdl_len;
1810        struct sa_cmdl_cfg cfg;
1811        int key_idx;
1812
1813        if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
1814                return -EINVAL;
1815
1816        /* Convert the key size (16/24/32) to the key size index (0/1/2) */
1817        key_idx = (keys.enckeylen >> 3) - 2;
1818        if (key_idx >= 3)
1819                return -EINVAL;
1820
1821        ad->ctx = ctx;
1822        ad->enc_eng.eng_id = SA_ENG_ID_EM1;
1823        ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
1824        ad->auth_eng.eng_id = SA_ENG_ID_AM1;
1825        ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ;
1826        ad->mci_enc = mci_cbc_enc_no_iv_array[key_idx];
1827        ad->mci_dec = mci_cbc_dec_no_iv_array[key_idx];
1828        ad->inv_key = true;
1829        ad->keyed_mac = true;
1830        ad->ealg_id = SA_EALG_ID_AES_CBC;
1831        ad->prep_iopad = sa_prepare_iopads;
1832
1833        memset(&cfg, 0, sizeof(cfg));
1834        cfg.enc = true;
1835        cfg.aalg = ad->aalg_id;
1836        cfg.enc_eng_id = ad->enc_eng.eng_id;
1837        cfg.auth_eng_id = ad->auth_eng.eng_id;
1838        cfg.iv_size = crypto_aead_ivsize(authenc);
1839        cfg.akey = keys.authkey;
1840        cfg.akey_len = keys.authkeylen;
1841
1842        /* Setup Encryption Security Context & Command label template */
1843        if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, keys.enckey,
1844                       keys.enckeylen, keys.authkey, keys.authkeylen,
1845                       ad, 1, &ctx->enc.epib[1]))
1846                return -EINVAL;
1847
1848        cmdl_len = sa_format_cmdl_gen(&cfg,
1849                                      (u8 *)ctx->enc.cmdl,
1850                                      &ctx->enc.cmdl_upd_info);
1851        if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
1852                return -EINVAL;
1853
1854        ctx->enc.cmdl_size = cmdl_len;
1855
1856        /* Setup Decryption Security Context & Command label template */
1857        if (sa_init_sc(&ctx->dec, ctx->dev_data->match_data, keys.enckey,
1858                       keys.enckeylen, keys.authkey, keys.authkeylen,
1859                       ad, 0, &ctx->dec.epib[1]))
1860                return -EINVAL;
1861
1862        cfg.enc = false;
1863        cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl,
1864                                      &ctx->dec.cmdl_upd_info);
1865
1866        if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
1867                return -EINVAL;
1868
1869        ctx->dec.cmdl_size = cmdl_len;
1870
1871        crypto_aead_clear_flags(ctx->fallback.aead, CRYPTO_TFM_REQ_MASK);
1872        crypto_aead_set_flags(ctx->fallback.aead,
1873                              crypto_aead_get_flags(authenc) &
1874                              CRYPTO_TFM_REQ_MASK);
1875        crypto_aead_setkey(ctx->fallback.aead, key, keylen);
1876
1877        return 0;
1878}
1879
1880static int sa_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
1881{
1882        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(crypto_aead_tfm(tfm));
1883
1884        return crypto_aead_setauthsize(ctx->fallback.aead, authsize);
1885}
1886
1887static int sa_aead_cbc_sha1_setkey(struct crypto_aead *authenc,
1888                                   const u8 *key, unsigned int keylen)
1889{
1890        struct algo_data ad = { 0 };
1891
1892        ad.ealg_id = SA_EALG_ID_AES_CBC;
1893        ad.aalg_id = SA_AALG_ID_HMAC_SHA1;
1894        ad.hash_size = SHA1_DIGEST_SIZE;
1895        ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA1;
1896
1897        return sa_aead_setkey(authenc, key, keylen, &ad);
1898}
1899
1900static int sa_aead_cbc_sha256_setkey(struct crypto_aead *authenc,
1901                                     const u8 *key, unsigned int keylen)
1902{
1903        struct algo_data ad = { 0 };
1904
1905        ad.ealg_id = SA_EALG_ID_AES_CBC;
1906        ad.aalg_id = SA_AALG_ID_HMAC_SHA2_256;
1907        ad.hash_size = SHA256_DIGEST_SIZE;
1908        ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA256;
1909
1910        return sa_aead_setkey(authenc, key, keylen, &ad);
1911}
1912
1913static int sa_aead_run(struct aead_request *req, u8 *iv, int enc)
1914{
1915        struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1916        struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
1917        struct sa_req sa_req = { 0 };
1918        size_t auth_size, enc_size;
1919
1920        enc_size = req->cryptlen;
1921        auth_size = req->assoclen + req->cryptlen;
1922
1923        if (!enc) {
1924                enc_size -= crypto_aead_authsize(tfm);
1925                auth_size -= crypto_aead_authsize(tfm);
1926        }
1927
1928        if (auth_size > SA_MAX_DATA_SZ ||
1929            (auth_size >= SA_UNSAFE_DATA_SZ_MIN &&
1930             auth_size <= SA_UNSAFE_DATA_SZ_MAX)) {
1931                struct aead_request *subreq = aead_request_ctx(req);
1932                int ret;
1933
1934                aead_request_set_tfm(subreq, ctx->fallback.aead);
1935                aead_request_set_callback(subreq, req->base.flags,
1936                                          req->base.complete, req->base.data);
1937                aead_request_set_crypt(subreq, req->src, req->dst,
1938                                       req->cryptlen, req->iv);
1939                aead_request_set_ad(subreq, req->assoclen);
1940
1941                ret = enc ? crypto_aead_encrypt(subreq) :
1942                        crypto_aead_decrypt(subreq);
1943                return ret;
1944        }
1945
1946        sa_req.enc_offset = req->assoclen;
1947        sa_req.enc_size = enc_size;
1948        sa_req.auth_size = auth_size;
1949        sa_req.size = auth_size;
1950        sa_req.enc_iv = iv;
1951        sa_req.type = CRYPTO_ALG_TYPE_AEAD;
1952        sa_req.enc = enc;
1953        sa_req.callback = sa_aead_dma_in_callback;
1954        sa_req.mdata_size = 52;
1955        sa_req.base = &req->base;
1956        sa_req.ctx = ctx;
1957        sa_req.src = req->src;
1958        sa_req.dst = req->dst;
1959
1960        return sa_run(&sa_req);
1961}
1962
1963/* AEAD algorithm encrypt interface function */
1964static int sa_aead_encrypt(struct aead_request *req)
1965{
1966        return sa_aead_run(req, req->iv, 1);
1967}
1968
1969/* AEAD algorithm decrypt interface function */
1970static int sa_aead_decrypt(struct aead_request *req)
1971{
1972        return sa_aead_run(req, req->iv, 0);
1973}
1974
1975static struct sa_alg_tmpl sa_algs[] = {
1976        [SA_ALG_CBC_AES] = {
1977                .type = CRYPTO_ALG_TYPE_SKCIPHER,
1978                .alg.skcipher = {
1979                        .base.cra_name          = "cbc(aes)",
1980                        .base.cra_driver_name   = "cbc-aes-sa2ul",
1981                        .base.cra_priority      = 30000,
1982                        .base.cra_flags         = CRYPTO_ALG_TYPE_SKCIPHER |
1983                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
1984                                                  CRYPTO_ALG_ASYNC |
1985                                                  CRYPTO_ALG_NEED_FALLBACK,
1986                        .base.cra_blocksize     = AES_BLOCK_SIZE,
1987                        .base.cra_ctxsize       = sizeof(struct sa_tfm_ctx),
1988                        .base.cra_module        = THIS_MODULE,
1989                        .init                   = sa_cipher_cra_init,
1990                        .exit                   = sa_cipher_cra_exit,
1991                        .min_keysize            = AES_MIN_KEY_SIZE,
1992                        .max_keysize            = AES_MAX_KEY_SIZE,
1993                        .ivsize                 = AES_BLOCK_SIZE,
1994                        .setkey                 = sa_aes_cbc_setkey,
1995                        .encrypt                = sa_encrypt,
1996                        .decrypt                = sa_decrypt,
1997                }
1998        },
1999        [SA_ALG_EBC_AES] = {
2000                .type = CRYPTO_ALG_TYPE_SKCIPHER,
2001                .alg.skcipher = {
2002                        .base.cra_name          = "ecb(aes)",
2003                        .base.cra_driver_name   = "ecb-aes-sa2ul",
2004                        .base.cra_priority      = 30000,
2005                        .base.cra_flags         = CRYPTO_ALG_TYPE_SKCIPHER |
2006                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2007                                                  CRYPTO_ALG_ASYNC |
2008                                                  CRYPTO_ALG_NEED_FALLBACK,
2009                        .base.cra_blocksize     = AES_BLOCK_SIZE,
2010                        .base.cra_ctxsize       = sizeof(struct sa_tfm_ctx),
2011                        .base.cra_module        = THIS_MODULE,
2012                        .init                   = sa_cipher_cra_init,
2013                        .exit                   = sa_cipher_cra_exit,
2014                        .min_keysize            = AES_MIN_KEY_SIZE,
2015                        .max_keysize            = AES_MAX_KEY_SIZE,
2016                        .setkey                 = sa_aes_ecb_setkey,
2017                        .encrypt                = sa_encrypt,
2018                        .decrypt                = sa_decrypt,
2019                }
2020        },
2021        [SA_ALG_CBC_DES3] = {
2022                .type = CRYPTO_ALG_TYPE_SKCIPHER,
2023                .alg.skcipher = {
2024                        .base.cra_name          = "cbc(des3_ede)",
2025                        .base.cra_driver_name   = "cbc-des3-sa2ul",
2026                        .base.cra_priority      = 30000,
2027                        .base.cra_flags         = CRYPTO_ALG_TYPE_SKCIPHER |
2028                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2029                                                  CRYPTO_ALG_ASYNC |
2030                                                  CRYPTO_ALG_NEED_FALLBACK,
2031                        .base.cra_blocksize     = DES_BLOCK_SIZE,
2032                        .base.cra_ctxsize       = sizeof(struct sa_tfm_ctx),
2033                        .base.cra_module        = THIS_MODULE,
2034                        .init                   = sa_cipher_cra_init,
2035                        .exit                   = sa_cipher_cra_exit,
2036                        .min_keysize            = 3 * DES_KEY_SIZE,
2037                        .max_keysize            = 3 * DES_KEY_SIZE,
2038                        .ivsize                 = DES_BLOCK_SIZE,
2039                        .setkey                 = sa_3des_cbc_setkey,
2040                        .encrypt                = sa_encrypt,
2041                        .decrypt                = sa_decrypt,
2042                }
2043        },
2044        [SA_ALG_ECB_DES3] = {
2045                .type = CRYPTO_ALG_TYPE_SKCIPHER,
2046                .alg.skcipher = {
2047                        .base.cra_name          = "ecb(des3_ede)",
2048                        .base.cra_driver_name   = "ecb-des3-sa2ul",
2049                        .base.cra_priority      = 30000,
2050                        .base.cra_flags         = CRYPTO_ALG_TYPE_SKCIPHER |
2051                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2052                                                  CRYPTO_ALG_ASYNC |
2053                                                  CRYPTO_ALG_NEED_FALLBACK,
2054                        .base.cra_blocksize     = DES_BLOCK_SIZE,
2055                        .base.cra_ctxsize       = sizeof(struct sa_tfm_ctx),
2056                        .base.cra_module        = THIS_MODULE,
2057                        .init                   = sa_cipher_cra_init,
2058                        .exit                   = sa_cipher_cra_exit,
2059                        .min_keysize            = 3 * DES_KEY_SIZE,
2060                        .max_keysize            = 3 * DES_KEY_SIZE,
2061                        .setkey                 = sa_3des_ecb_setkey,
2062                        .encrypt                = sa_encrypt,
2063                        .decrypt                = sa_decrypt,
2064                }
2065        },
2066        [SA_ALG_SHA1] = {
2067                .type = CRYPTO_ALG_TYPE_AHASH,
2068                .alg.ahash = {
2069                        .halg.base = {
2070                                .cra_name       = "sha1",
2071                                .cra_driver_name        = "sha1-sa2ul",
2072                                .cra_priority   = 400,
2073                                .cra_flags      = CRYPTO_ALG_TYPE_AHASH |
2074                                                  CRYPTO_ALG_ASYNC |
2075                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2076                                                  CRYPTO_ALG_NEED_FALLBACK,
2077                                .cra_blocksize  = SHA1_BLOCK_SIZE,
2078                                .cra_ctxsize    = sizeof(struct sa_tfm_ctx),
2079                                .cra_module     = THIS_MODULE,
2080                                .cra_init       = sa_sha1_cra_init,
2081                                .cra_exit       = sa_sha_cra_exit,
2082                        },
2083                        .halg.digestsize        = SHA1_DIGEST_SIZE,
2084                        .halg.statesize         = sizeof(struct sa_sha_req_ctx) +
2085                                                  sizeof(struct sha1_state),
2086                        .init                   = sa_sha_init,
2087                        .update                 = sa_sha_update,
2088                        .final                  = sa_sha_final,
2089                        .finup                  = sa_sha_finup,
2090                        .digest                 = sa_sha_digest,
2091                        .export                 = sa_sha_export,
2092                        .import                 = sa_sha_import,
2093                },
2094        },
2095        [SA_ALG_SHA256] = {
2096                .type = CRYPTO_ALG_TYPE_AHASH,
2097                .alg.ahash = {
2098                        .halg.base = {
2099                                .cra_name       = "sha256",
2100                                .cra_driver_name        = "sha256-sa2ul",
2101                                .cra_priority   = 400,
2102                                .cra_flags      = CRYPTO_ALG_TYPE_AHASH |
2103                                                  CRYPTO_ALG_ASYNC |
2104                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2105                                                  CRYPTO_ALG_NEED_FALLBACK,
2106                                .cra_blocksize  = SHA256_BLOCK_SIZE,
2107                                .cra_ctxsize    = sizeof(struct sa_tfm_ctx),
2108                                .cra_module     = THIS_MODULE,
2109                                .cra_init       = sa_sha256_cra_init,
2110                                .cra_exit       = sa_sha_cra_exit,
2111                        },
2112                        .halg.digestsize        = SHA256_DIGEST_SIZE,
2113                        .halg.statesize         = sizeof(struct sa_sha_req_ctx) +
2114                                                  sizeof(struct sha256_state),
2115                        .init                   = sa_sha_init,
2116                        .update                 = sa_sha_update,
2117                        .final                  = sa_sha_final,
2118                        .finup                  = sa_sha_finup,
2119                        .digest                 = sa_sha_digest,
2120                        .export                 = sa_sha_export,
2121                        .import                 = sa_sha_import,
2122                },
2123        },
2124        [SA_ALG_SHA512] = {
2125                .type = CRYPTO_ALG_TYPE_AHASH,
2126                .alg.ahash = {
2127                        .halg.base = {
2128                                .cra_name       = "sha512",
2129                                .cra_driver_name        = "sha512-sa2ul",
2130                                .cra_priority   = 400,
2131                                .cra_flags      = CRYPTO_ALG_TYPE_AHASH |
2132                                                  CRYPTO_ALG_ASYNC |
2133                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2134                                                  CRYPTO_ALG_NEED_FALLBACK,
2135                                .cra_blocksize  = SHA512_BLOCK_SIZE,
2136                                .cra_ctxsize    = sizeof(struct sa_tfm_ctx),
2137                                .cra_module     = THIS_MODULE,
2138                                .cra_init       = sa_sha512_cra_init,
2139                                .cra_exit       = sa_sha_cra_exit,
2140                        },
2141                        .halg.digestsize        = SHA512_DIGEST_SIZE,
2142                        .halg.statesize         = sizeof(struct sa_sha_req_ctx) +
2143                                                  sizeof(struct sha512_state),
2144                        .init                   = sa_sha_init,
2145                        .update                 = sa_sha_update,
2146                        .final                  = sa_sha_final,
2147                        .finup                  = sa_sha_finup,
2148                        .digest                 = sa_sha_digest,
2149                        .export                 = sa_sha_export,
2150                        .import                 = sa_sha_import,
2151                },
2152        },
2153        [SA_ALG_AUTHENC_SHA1_AES] = {
2154                .type   = CRYPTO_ALG_TYPE_AEAD,
2155                .alg.aead = {
2156                        .base = {
2157                                .cra_name = "authenc(hmac(sha1),cbc(aes))",
2158                                .cra_driver_name =
2159                                        "authenc(hmac(sha1),cbc(aes))-sa2ul",
2160                                .cra_blocksize = AES_BLOCK_SIZE,
2161                                .cra_flags = CRYPTO_ALG_TYPE_AEAD |
2162                                        CRYPTO_ALG_KERN_DRIVER_ONLY |
2163                                        CRYPTO_ALG_ASYNC |
2164                                        CRYPTO_ALG_NEED_FALLBACK,
2165                                .cra_ctxsize = sizeof(struct sa_tfm_ctx),
2166                                .cra_module = THIS_MODULE,
2167                                .cra_priority = 3000,
2168                        },
2169                        .ivsize = AES_BLOCK_SIZE,
2170                        .maxauthsize = SHA1_DIGEST_SIZE,
2171
2172                        .init = sa_cra_init_aead_sha1,
2173                        .exit = sa_exit_tfm_aead,
2174                        .setkey = sa_aead_cbc_sha1_setkey,
2175                        .setauthsize = sa_aead_setauthsize,
2176                        .encrypt = sa_aead_encrypt,
2177                        .decrypt = sa_aead_decrypt,
2178                },
2179        },
2180        [SA_ALG_AUTHENC_SHA256_AES] = {
2181                .type   = CRYPTO_ALG_TYPE_AEAD,
2182                .alg.aead = {
2183                        .base = {
2184                                .cra_name = "authenc(hmac(sha256),cbc(aes))",
2185                                .cra_driver_name =
2186                                        "authenc(hmac(sha256),cbc(aes))-sa2ul",
2187                                .cra_blocksize = AES_BLOCK_SIZE,
2188                                .cra_flags = CRYPTO_ALG_TYPE_AEAD |
2189                                        CRYPTO_ALG_KERN_DRIVER_ONLY |
2190                                        CRYPTO_ALG_ASYNC |
2191                                        CRYPTO_ALG_NEED_FALLBACK,
2192                                .cra_ctxsize = sizeof(struct sa_tfm_ctx),
2193                                .cra_module = THIS_MODULE,
2194                                .cra_alignmask = 0,
2195                                .cra_priority = 3000,
2196                        },
2197                        .ivsize = AES_BLOCK_SIZE,
2198                        .maxauthsize = SHA256_DIGEST_SIZE,
2199
2200                        .init = sa_cra_init_aead_sha256,
2201                        .exit = sa_exit_tfm_aead,
2202                        .setkey = sa_aead_cbc_sha256_setkey,
2203                        .setauthsize = sa_aead_setauthsize,
2204                        .encrypt = sa_aead_encrypt,
2205                        .decrypt = sa_aead_decrypt,
2206                },
2207        },
2208};
2209
2210/* Register the algorithms in crypto framework */
2211static void sa_register_algos(struct sa_crypto_data *dev_data)
2212{
2213        const struct sa_match_data *match_data = dev_data->match_data;
2214        struct device *dev = dev_data->dev;
2215        char *alg_name;
2216        u32 type;
2217        int i, err;
2218
2219        for (i = 0; i < ARRAY_SIZE(sa_algs); i++) {
2220                /* Skip unsupported algos */
2221                if (!(match_data->supported_algos & BIT(i)))
2222                        continue;
2223
2224                type = sa_algs[i].type;
2225                if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
2226                        alg_name = sa_algs[i].alg.skcipher.base.cra_name;
2227                        err = crypto_register_skcipher(&sa_algs[i].alg.skcipher);
2228                } else if (type == CRYPTO_ALG_TYPE_AHASH) {
2229                        alg_name = sa_algs[i].alg.ahash.halg.base.cra_name;
2230                        err = crypto_register_ahash(&sa_algs[i].alg.ahash);
2231                } else if (type == CRYPTO_ALG_TYPE_AEAD) {
2232                        alg_name = sa_algs[i].alg.aead.base.cra_name;
2233                        err = crypto_register_aead(&sa_algs[i].alg.aead);
2234                } else {
2235                        dev_err(dev,
2236                                "un-supported crypto algorithm (%d)",
2237                                sa_algs[i].type);
2238                        continue;
2239                }
2240
2241                if (err)
2242                        dev_err(dev, "Failed to register '%s'\n", alg_name);
2243                else
2244                        sa_algs[i].registered = true;
2245        }
2246}
2247
2248/* Unregister the algorithms in crypto framework */
2249static void sa_unregister_algos(const struct device *dev)
2250{
2251        u32 type;
2252        int i;
2253
2254        for (i = 0; i < ARRAY_SIZE(sa_algs); i++) {
2255                type = sa_algs[i].type;
2256                if (!sa_algs[i].registered)
2257                        continue;
2258                if (type == CRYPTO_ALG_TYPE_SKCIPHER)
2259                        crypto_unregister_skcipher(&sa_algs[i].alg.skcipher);
2260                else if (type == CRYPTO_ALG_TYPE_AHASH)
2261                        crypto_unregister_ahash(&sa_algs[i].alg.ahash);
2262                else if (type == CRYPTO_ALG_TYPE_AEAD)
2263                        crypto_unregister_aead(&sa_algs[i].alg.aead);
2264
2265                sa_algs[i].registered = false;
2266        }
2267}
2268
2269static int sa_init_mem(struct sa_crypto_data *dev_data)
2270{
2271        struct device *dev = &dev_data->pdev->dev;
2272        /* Setup dma pool for security context buffers */
2273        dev_data->sc_pool = dma_pool_create("keystone-sc", dev,
2274                                            SA_CTX_MAX_SZ, 64, 0);
2275        if (!dev_data->sc_pool) {
2276                dev_err(dev, "Failed to create dma pool");
2277                return -ENOMEM;
2278        }
2279
2280        return 0;
2281}
2282
2283static int sa_dma_init(struct sa_crypto_data *dd)
2284{
2285        int ret;
2286        struct dma_slave_config cfg;
2287
2288        dd->dma_rx1 = NULL;
2289        dd->dma_tx = NULL;
2290        dd->dma_rx2 = NULL;
2291
2292        ret = dma_coerce_mask_and_coherent(dd->dev, DMA_BIT_MASK(48));
2293        if (ret)
2294                return ret;
2295
2296        dd->dma_rx1 = dma_request_chan(dd->dev, "rx1");
2297        if (IS_ERR(dd->dma_rx1))
2298                return dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx1),
2299                                     "Unable to request rx1 DMA channel\n");
2300
2301        dd->dma_rx2 = dma_request_chan(dd->dev, "rx2");
2302        if (IS_ERR(dd->dma_rx2)) {
2303                ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx2),
2304                                    "Unable to request rx2 DMA channel\n");
2305                goto err_dma_rx2;
2306        }
2307
2308        dd->dma_tx = dma_request_chan(dd->dev, "tx");
2309        if (IS_ERR(dd->dma_tx)) {
2310                ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_tx),
2311                                    "Unable to request tx DMA channel\n");
2312                goto err_dma_tx;
2313        }
2314
2315        memzero_explicit(&cfg, sizeof(cfg));
2316
2317        cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2318        cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2319        cfg.src_maxburst = 4;
2320        cfg.dst_maxburst = 4;
2321
2322        ret = dmaengine_slave_config(dd->dma_rx1, &cfg);
2323        if (ret) {
2324                dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n",
2325                        ret);
2326                goto err_dma_config;
2327        }
2328
2329        ret = dmaengine_slave_config(dd->dma_rx2, &cfg);
2330        if (ret) {
2331                dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n",
2332                        ret);
2333                goto err_dma_config;
2334        }
2335
2336        ret = dmaengine_slave_config(dd->dma_tx, &cfg);
2337        if (ret) {
2338                dev_err(dd->dev, "can't configure OUT dmaengine slave: %d\n",
2339                        ret);
2340                goto err_dma_config;
2341        }
2342
2343        return 0;
2344
2345err_dma_config:
2346        dma_release_channel(dd->dma_tx);
2347err_dma_tx:
2348        dma_release_channel(dd->dma_rx2);
2349err_dma_rx2:
2350        dma_release_channel(dd->dma_rx1);
2351
2352        return ret;
2353}
2354
2355static int sa_link_child(struct device *dev, void *data)
2356{
2357        struct device *parent = data;
2358
2359        device_link_add(dev, parent, DL_FLAG_AUTOPROBE_CONSUMER);
2360
2361        return 0;
2362}
2363
2364static struct sa_match_data am654_match_data = {
2365        .priv = 1,
2366        .priv_id = 1,
2367        .supported_algos = GENMASK(SA_ALG_AUTHENC_SHA256_AES, 0),
2368};
2369
2370static struct sa_match_data am64_match_data = {
2371        .priv = 0,
2372        .priv_id = 0,
2373        .supported_algos = BIT(SA_ALG_CBC_AES) |
2374                           BIT(SA_ALG_EBC_AES) |
2375                           BIT(SA_ALG_SHA256) |
2376                           BIT(SA_ALG_SHA512) |
2377                           BIT(SA_ALG_AUTHENC_SHA256_AES),
2378        .skip_engine_control = true,
2379};
2380
2381static const struct of_device_id of_match[] = {
2382        { .compatible = "ti,j721e-sa2ul", .data = &am654_match_data, },
2383        { .compatible = "ti,am654-sa2ul", .data = &am654_match_data, },
2384        { .compatible = "ti,am64-sa2ul", .data = &am64_match_data, },
2385        {},
2386};
2387MODULE_DEVICE_TABLE(of, of_match);
2388
2389static int sa_ul_probe(struct platform_device *pdev)
2390{
2391        struct device *dev = &pdev->dev;
2392        struct device_node *node = dev->of_node;
2393        struct resource *res;
2394        static void __iomem *saul_base;
2395        struct sa_crypto_data *dev_data;
2396        int ret;
2397
2398        dev_data = devm_kzalloc(dev, sizeof(*dev_data), GFP_KERNEL);
2399        if (!dev_data)
2400                return -ENOMEM;
2401
2402        dev_data->match_data = of_device_get_match_data(dev);
2403        if (!dev_data->match_data)
2404                return -ENODEV;
2405
2406        sa_k3_dev = dev;
2407        dev_data->dev = dev;
2408        dev_data->pdev = pdev;
2409        platform_set_drvdata(pdev, dev_data);
2410        dev_set_drvdata(sa_k3_dev, dev_data);
2411
2412        pm_runtime_enable(dev);
2413        ret = pm_runtime_resume_and_get(dev);
2414        if (ret < 0) {
2415                dev_err(&pdev->dev, "%s: failed to get sync: %d\n", __func__,
2416                        ret);
2417                pm_runtime_disable(dev);
2418                return ret;
2419        }
2420
2421        sa_init_mem(dev_data);
2422        ret = sa_dma_init(dev_data);
2423        if (ret)
2424                goto destroy_dma_pool;
2425
2426        spin_lock_init(&dev_data->scid_lock);
2427        res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
2428        saul_base = devm_ioremap_resource(dev, res);
2429
2430        dev_data->base = saul_base;
2431
2432        if (!dev_data->match_data->skip_engine_control) {
2433                u32 val = SA_EEC_ENCSS_EN | SA_EEC_AUTHSS_EN | SA_EEC_CTXCACH_EN |
2434                          SA_EEC_CPPI_PORT_IN_EN | SA_EEC_CPPI_PORT_OUT_EN |
2435                          SA_EEC_TRNG_EN;
2436
2437                writel_relaxed(val, saul_base + SA_ENGINE_ENABLE_CONTROL);
2438        }
2439
2440        sa_register_algos(dev_data);
2441
2442        ret = of_platform_populate(node, NULL, NULL, &pdev->dev);
2443        if (ret)
2444                goto release_dma;
2445
2446        device_for_each_child(&pdev->dev, &pdev->dev, sa_link_child);
2447
2448        return 0;
2449
2450release_dma:
2451        sa_unregister_algos(&pdev->dev);
2452
2453        dma_release_channel(dev_data->dma_rx2);
2454        dma_release_channel(dev_data->dma_rx1);
2455        dma_release_channel(dev_data->dma_tx);
2456
2457destroy_dma_pool:
2458        dma_pool_destroy(dev_data->sc_pool);
2459
2460        pm_runtime_put_sync(&pdev->dev);
2461        pm_runtime_disable(&pdev->dev);
2462
2463        return ret;
2464}
2465
2466static int sa_ul_remove(struct platform_device *pdev)
2467{
2468        struct sa_crypto_data *dev_data = platform_get_drvdata(pdev);
2469
2470        sa_unregister_algos(&pdev->dev);
2471
2472        dma_release_channel(dev_data->dma_rx2);
2473        dma_release_channel(dev_data->dma_rx1);
2474        dma_release_channel(dev_data->dma_tx);
2475
2476        dma_pool_destroy(dev_data->sc_pool);
2477
2478        platform_set_drvdata(pdev, NULL);
2479
2480        pm_runtime_put_sync(&pdev->dev);
2481        pm_runtime_disable(&pdev->dev);
2482
2483        return 0;
2484}
2485
2486static struct platform_driver sa_ul_driver = {
2487        .probe = sa_ul_probe,
2488        .remove = sa_ul_remove,
2489        .driver = {
2490                   .name = "saul-crypto",
2491                   .of_match_table = of_match,
2492                   },
2493};
2494module_platform_driver(sa_ul_driver);
2495MODULE_LICENSE("GPL v2");
2496