linux/crypto/sha256_generic.c
<<
>>
Prefs
   1/*
   2 * Cryptographic API.
   3 *
   4 * SHA-256, as specified in
   5 * http://csrc.nist.gov/groups/STM/cavp/documents/shs/sha256-384-512.pdf
   6 *
   7 * SHA-256 code by Jean-Luc Cooke <jlcooke@certainkey.com>.
   8 *
   9 * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
  10 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
  11 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  12 * SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com>
  13 *
  14 * This program is free software; you can redistribute it and/or modify it
  15 * under the terms of the GNU General Public License as published by the Free
  16 * Software Foundation; either version 2 of the License, or (at your option) 
  17 * any later version.
  18 *
  19 */
  20#include <crypto/internal/hash.h>
  21#include <linux/init.h>
  22#include <linux/module.h>
  23#include <linux/mm.h>
  24#include <linux/types.h>
  25#include <crypto/sha.h>
  26#include <asm/byteorder.h>
  27
  28static inline u32 Ch(u32 x, u32 y, u32 z)
  29{
  30        return z ^ (x & (y ^ z));
  31}
  32
  33static inline u32 Maj(u32 x, u32 y, u32 z)
  34{
  35        return (x & y) | (z & (x | y));
  36}
  37
  38#define e0(x)       (ror32(x, 2) ^ ror32(x,13) ^ ror32(x,22))
  39#define e1(x)       (ror32(x, 6) ^ ror32(x,11) ^ ror32(x,25))
  40#define s0(x)       (ror32(x, 7) ^ ror32(x,18) ^ (x >> 3))
  41#define s1(x)       (ror32(x,17) ^ ror32(x,19) ^ (x >> 10))
  42
  43static inline void LOAD_OP(int I, u32 *W, const u8 *input)
  44{
  45        W[I] = __be32_to_cpu( ((__be32*)(input))[I] );
  46}
  47
  48static inline void BLEND_OP(int I, u32 *W)
  49{
  50        W[I] = s1(W[I-2]) + W[I-7] + s0(W[I-15]) + W[I-16];
  51}
  52
  53static void sha256_transform(u32 *state, const u8 *input)
  54{
  55        u32 a, b, c, d, e, f, g, h, t1, t2;
  56        u32 W[64];
  57        int i;
  58
  59        /* load the input */
  60        for (i = 0; i < 16; i++)
  61                LOAD_OP(i, W, input);
  62
  63        /* now blend */
  64        for (i = 16; i < 64; i++)
  65                BLEND_OP(i, W);
  66
  67        /* load the state into our registers */
  68        a=state[0];  b=state[1];  c=state[2];  d=state[3];
  69        e=state[4];  f=state[5];  g=state[6];  h=state[7];
  70
  71        /* now iterate */
  72        t1 = h + e1(e) + Ch(e,f,g) + 0x428a2f98 + W[ 0];
  73        t2 = e0(a) + Maj(a,b,c);    d+=t1;    h=t1+t2;
  74        t1 = g + e1(d) + Ch(d,e,f) + 0x71374491 + W[ 1];
  75        t2 = e0(h) + Maj(h,a,b);    c+=t1;    g=t1+t2;
  76        t1 = f + e1(c) + Ch(c,d,e) + 0xb5c0fbcf + W[ 2];
  77        t2 = e0(g) + Maj(g,h,a);    b+=t1;    f=t1+t2;
  78        t1 = e + e1(b) + Ch(b,c,d) + 0xe9b5dba5 + W[ 3];
  79        t2 = e0(f) + Maj(f,g,h);    a+=t1;    e=t1+t2;
  80        t1 = d + e1(a) + Ch(a,b,c) + 0x3956c25b + W[ 4];
  81        t2 = e0(e) + Maj(e,f,g);    h+=t1;    d=t1+t2;
  82        t1 = c + e1(h) + Ch(h,a,b) + 0x59f111f1 + W[ 5];
  83        t2 = e0(d) + Maj(d,e,f);    g+=t1;    c=t1+t2;
  84        t1 = b + e1(g) + Ch(g,h,a) + 0x923f82a4 + W[ 6];
  85        t2 = e0(c) + Maj(c,d,e);    f+=t1;    b=t1+t2;
  86        t1 = a + e1(f) + Ch(f,g,h) + 0xab1c5ed5 + W[ 7];
  87        t2 = e0(b) + Maj(b,c,d);    e+=t1;    a=t1+t2;
  88
  89        t1 = h + e1(e) + Ch(e,f,g) + 0xd807aa98 + W[ 8];
  90        t2 = e0(a) + Maj(a,b,c);    d+=t1;    h=t1+t2;
  91        t1 = g + e1(d) + Ch(d,e,f) + 0x12835b01 + W[ 9];
  92        t2 = e0(h) + Maj(h,a,b);    c+=t1;    g=t1+t2;
  93        t1 = f + e1(c) + Ch(c,d,e) + 0x243185be + W[10];
  94        t2 = e0(g) + Maj(g,h,a);    b+=t1;    f=t1+t2;
  95        t1 = e + e1(b) + Ch(b,c,d) + 0x550c7dc3 + W[11];
  96        t2 = e0(f) + Maj(f,g,h);    a+=t1;    e=t1+t2;
  97        t1 = d + e1(a) + Ch(a,b,c) + 0x72be5d74 + W[12];
  98        t2 = e0(e) + Maj(e,f,g);    h+=t1;    d=t1+t2;
  99        t1 = c + e1(h) + Ch(h,a,b) + 0x80deb1fe + W[13];
 100        t2 = e0(d) + Maj(d,e,f);    g+=t1;    c=t1+t2;
 101        t1 = b + e1(g) + Ch(g,h,a) + 0x9bdc06a7 + W[14];
 102        t2 = e0(c) + Maj(c,d,e);    f+=t1;    b=t1+t2;
 103        t1 = a + e1(f) + Ch(f,g,h) + 0xc19bf174 + W[15];
 104        t2 = e0(b) + Maj(b,c,d);    e+=t1;    a=t1+t2;
 105
 106        t1 = h + e1(e) + Ch(e,f,g) + 0xe49b69c1 + W[16];
 107        t2 = e0(a) + Maj(a,b,c);    d+=t1;    h=t1+t2;
 108        t1 = g + e1(d) + Ch(d,e,f) + 0xefbe4786 + W[17];
 109        t2 = e0(h) + Maj(h,a,b);    c+=t1;    g=t1+t2;
 110        t1 = f + e1(c) + Ch(c,d,e) + 0x0fc19dc6 + W[18];
 111        t2 = e0(g) + Maj(g,h,a);    b+=t1;    f=t1+t2;
 112        t1 = e + e1(b) + Ch(b,c,d) + 0x240ca1cc + W[19];
 113        t2 = e0(f) + Maj(f,g,h);    a+=t1;    e=t1+t2;
 114        t1 = d + e1(a) + Ch(a,b,c) + 0x2de92c6f + W[20];
 115        t2 = e0(e) + Maj(e,f,g);    h+=t1;    d=t1+t2;
 116        t1 = c + e1(h) + Ch(h,a,b) + 0x4a7484aa + W[21];
 117        t2 = e0(d) + Maj(d,e,f);    g+=t1;    c=t1+t2;
 118        t1 = b + e1(g) + Ch(g,h,a) + 0x5cb0a9dc + W[22];
 119        t2 = e0(c) + Maj(c,d,e);    f+=t1;    b=t1+t2;
 120        t1 = a + e1(f) + Ch(f,g,h) + 0x76f988da + W[23];
 121        t2 = e0(b) + Maj(b,c,d);    e+=t1;    a=t1+t2;
 122
 123        t1 = h + e1(e) + Ch(e,f,g) + 0x983e5152 + W[24];
 124        t2 = e0(a) + Maj(a,b,c);    d+=t1;    h=t1+t2;
 125        t1 = g + e1(d) + Ch(d,e,f) + 0xa831c66d + W[25];
 126        t2 = e0(h) + Maj(h,a,b);    c+=t1;    g=t1+t2;
 127        t1 = f + e1(c) + Ch(c,d,e) + 0xb00327c8 + W[26];
 128        t2 = e0(g) + Maj(g,h,a);    b+=t1;    f=t1+t2;
 129        t1 = e + e1(b) + Ch(b,c,d) + 0xbf597fc7 + W[27];
 130        t2 = e0(f) + Maj(f,g,h);    a+=t1;    e=t1+t2;
 131        t1 = d + e1(a) + Ch(a,b,c) + 0xc6e00bf3 + W[28];
 132        t2 = e0(e) + Maj(e,f,g);    h+=t1;    d=t1+t2;
 133        t1 = c + e1(h) + Ch(h,a,b) + 0xd5a79147 + W[29];
 134        t2 = e0(d) + Maj(d,e,f);    g+=t1;    c=t1+t2;
 135        t1 = b + e1(g) + Ch(g,h,a) + 0x06ca6351 + W[30];
 136        t2 = e0(c) + Maj(c,d,e);    f+=t1;    b=t1+t2;
 137        t1 = a + e1(f) + Ch(f,g,h) + 0x14292967 + W[31];
 138        t2 = e0(b) + Maj(b,c,d);    e+=t1;    a=t1+t2;
 139
 140        t1 = h + e1(e) + Ch(e,f,g) + 0x27b70a85 + W[32];
 141        t2 = e0(a) + Maj(a,b,c);    d+=t1;    h=t1+t2;
 142        t1 = g + e1(d) + Ch(d,e,f) + 0x2e1b2138 + W[33];
 143        t2 = e0(h) + Maj(h,a,b);    c+=t1;    g=t1+t2;
 144        t1 = f + e1(c) + Ch(c,d,e) + 0x4d2c6dfc + W[34];
 145        t2 = e0(g) + Maj(g,h,a);    b+=t1;    f=t1+t2;
 146        t1 = e + e1(b) + Ch(b,c,d) + 0x53380d13 + W[35];
 147        t2 = e0(f) + Maj(f,g,h);    a+=t1;    e=t1+t2;
 148        t1 = d + e1(a) + Ch(a,b,c) + 0x650a7354 + W[36];
 149        t2 = e0(e) + Maj(e,f,g);    h+=t1;    d=t1+t2;
 150        t1 = c + e1(h) + Ch(h,a,b) + 0x766a0abb + W[37];
 151        t2 = e0(d) + Maj(d,e,f);    g+=t1;    c=t1+t2;
 152        t1 = b + e1(g) + Ch(g,h,a) + 0x81c2c92e + W[38];
 153        t2 = e0(c) + Maj(c,d,e);    f+=t1;    b=t1+t2;
 154        t1 = a + e1(f) + Ch(f,g,h) + 0x92722c85 + W[39];
 155        t2 = e0(b) + Maj(b,c,d);    e+=t1;    a=t1+t2;
 156
 157        t1 = h + e1(e) + Ch(e,f,g) + 0xa2bfe8a1 + W[40];
 158        t2 = e0(a) + Maj(a,b,c);    d+=t1;    h=t1+t2;
 159        t1 = g + e1(d) + Ch(d,e,f) + 0xa81a664b + W[41];
 160        t2 = e0(h) + Maj(h,a,b);    c+=t1;    g=t1+t2;
 161        t1 = f + e1(c) + Ch(c,d,e) + 0xc24b8b70 + W[42];
 162        t2 = e0(g) + Maj(g,h,a);    b+=t1;    f=t1+t2;
 163        t1 = e + e1(b) + Ch(b,c,d) + 0xc76c51a3 + W[43];
 164        t2 = e0(f) + Maj(f,g,h);    a+=t1;    e=t1+t2;
 165        t1 = d + e1(a) + Ch(a,b,c) + 0xd192e819 + W[44];
 166        t2 = e0(e) + Maj(e,f,g);    h+=t1;    d=t1+t2;
 167        t1 = c + e1(h) + Ch(h,a,b) + 0xd6990624 + W[45];
 168        t2 = e0(d) + Maj(d,e,f);    g+=t1;    c=t1+t2;
 169        t1 = b + e1(g) + Ch(g,h,a) + 0xf40e3585 + W[46];
 170        t2 = e0(c) + Maj(c,d,e);    f+=t1;    b=t1+t2;
 171        t1 = a + e1(f) + Ch(f,g,h) + 0x106aa070 + W[47];
 172        t2 = e0(b) + Maj(b,c,d);    e+=t1;    a=t1+t2;
 173
 174        t1 = h + e1(e) + Ch(e,f,g) + 0x19a4c116 + W[48];
 175        t2 = e0(a) + Maj(a,b,c);    d+=t1;    h=t1+t2;
 176        t1 = g + e1(d) + Ch(d,e,f) + 0x1e376c08 + W[49];
 177        t2 = e0(h) + Maj(h,a,b);    c+=t1;    g=t1+t2;
 178        t1 = f + e1(c) + Ch(c,d,e) + 0x2748774c + W[50];
 179        t2 = e0(g) + Maj(g,h,a);    b+=t1;    f=t1+t2;
 180        t1 = e + e1(b) + Ch(b,c,d) + 0x34b0bcb5 + W[51];
 181        t2 = e0(f) + Maj(f,g,h);    a+=t1;    e=t1+t2;
 182        t1 = d + e1(a) + Ch(a,b,c) + 0x391c0cb3 + W[52];
 183        t2 = e0(e) + Maj(e,f,g);    h+=t1;    d=t1+t2;
 184        t1 = c + e1(h) + Ch(h,a,b) + 0x4ed8aa4a + W[53];
 185        t2 = e0(d) + Maj(d,e,f);    g+=t1;    c=t1+t2;
 186        t1 = b + e1(g) + Ch(g,h,a) + 0x5b9cca4f + W[54];
 187        t2 = e0(c) + Maj(c,d,e);    f+=t1;    b=t1+t2;
 188        t1 = a + e1(f) + Ch(f,g,h) + 0x682e6ff3 + W[55];
 189        t2 = e0(b) + Maj(b,c,d);    e+=t1;    a=t1+t2;
 190
 191        t1 = h + e1(e) + Ch(e,f,g) + 0x748f82ee + W[56];
 192        t2 = e0(a) + Maj(a,b,c);    d+=t1;    h=t1+t2;
 193        t1 = g + e1(d) + Ch(d,e,f) + 0x78a5636f + W[57];
 194        t2 = e0(h) + Maj(h,a,b);    c+=t1;    g=t1+t2;
 195        t1 = f + e1(c) + Ch(c,d,e) + 0x84c87814 + W[58];
 196        t2 = e0(g) + Maj(g,h,a);    b+=t1;    f=t1+t2;
 197        t1 = e + e1(b) + Ch(b,c,d) + 0x8cc70208 + W[59];
 198        t2 = e0(f) + Maj(f,g,h);    a+=t1;    e=t1+t2;
 199        t1 = d + e1(a) + Ch(a,b,c) + 0x90befffa + W[60];
 200        t2 = e0(e) + Maj(e,f,g);    h+=t1;    d=t1+t2;
 201        t1 = c + e1(h) + Ch(h,a,b) + 0xa4506ceb + W[61];
 202        t2 = e0(d) + Maj(d,e,f);    g+=t1;    c=t1+t2;
 203        t1 = b + e1(g) + Ch(g,h,a) + 0xbef9a3f7 + W[62];
 204        t2 = e0(c) + Maj(c,d,e);    f+=t1;    b=t1+t2;
 205        t1 = a + e1(f) + Ch(f,g,h) + 0xc67178f2 + W[63];
 206        t2 = e0(b) + Maj(b,c,d);    e+=t1;    a=t1+t2;
 207
 208        state[0] += a; state[1] += b; state[2] += c; state[3] += d;
 209        state[4] += e; state[5] += f; state[6] += g; state[7] += h;
 210
 211        /* clear any sensitive info... */
 212        a = b = c = d = e = f = g = h = t1 = t2 = 0;
 213        memset(W, 0, 64 * sizeof(u32));
 214}
 215
 216
 217static int sha224_init(struct shash_desc *desc)
 218{
 219        struct sha256_state *sctx = shash_desc_ctx(desc);
 220        sctx->state[0] = SHA224_H0;
 221        sctx->state[1] = SHA224_H1;
 222        sctx->state[2] = SHA224_H2;
 223        sctx->state[3] = SHA224_H3;
 224        sctx->state[4] = SHA224_H4;
 225        sctx->state[5] = SHA224_H5;
 226        sctx->state[6] = SHA224_H6;
 227        sctx->state[7] = SHA224_H7;
 228        sctx->count = 0;
 229
 230        return 0;
 231}
 232
 233static int sha256_init(struct shash_desc *desc)
 234{
 235        struct sha256_state *sctx = shash_desc_ctx(desc);
 236        sctx->state[0] = SHA256_H0;
 237        sctx->state[1] = SHA256_H1;
 238        sctx->state[2] = SHA256_H2;
 239        sctx->state[3] = SHA256_H3;
 240        sctx->state[4] = SHA256_H4;
 241        sctx->state[5] = SHA256_H5;
 242        sctx->state[6] = SHA256_H6;
 243        sctx->state[7] = SHA256_H7;
 244        sctx->count = 0;
 245
 246        return 0;
 247}
 248
 249int crypto_sha256_update(struct shash_desc *desc, const u8 *data,
 250                          unsigned int len)
 251{
 252        struct sha256_state *sctx = shash_desc_ctx(desc);
 253        unsigned int partial, done;
 254        const u8 *src;
 255
 256        partial = sctx->count & 0x3f;
 257        sctx->count += len;
 258        done = 0;
 259        src = data;
 260
 261        if ((partial + len) > 63) {
 262                if (partial) {
 263                        done = -partial;
 264                        memcpy(sctx->buf + partial, data, done + 64);
 265                        src = sctx->buf;
 266                }
 267
 268                do {
 269                        sha256_transform(sctx->state, src);
 270                        done += 64;
 271                        src = data + done;
 272                } while (done + 63 < len);
 273
 274                partial = 0;
 275        }
 276        memcpy(sctx->buf + partial, src, len - done);
 277
 278        return 0;
 279}
 280EXPORT_SYMBOL(crypto_sha256_update);
 281
 282static int sha256_final(struct shash_desc *desc, u8 *out)
 283{
 284        struct sha256_state *sctx = shash_desc_ctx(desc);
 285        __be32 *dst = (__be32 *)out;
 286        __be64 bits;
 287        unsigned int index, pad_len;
 288        int i;
 289        static const u8 padding[64] = { 0x80, };
 290
 291        /* Save number of bits */
 292        bits = cpu_to_be64(sctx->count << 3);
 293
 294        /* Pad out to 56 mod 64. */
 295        index = sctx->count & 0x3f;
 296        pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
 297        crypto_sha256_update(desc, padding, pad_len);
 298
 299        /* Append length (before padding) */
 300        crypto_sha256_update(desc, (const u8 *)&bits, sizeof(bits));
 301
 302        /* Store state in digest */
 303        for (i = 0; i < 8; i++)
 304                dst[i] = cpu_to_be32(sctx->state[i]);
 305
 306        /* Zeroize sensitive information. */
 307        memset(sctx, 0, sizeof(*sctx));
 308
 309        return 0;
 310}
 311
 312static int sha224_final(struct shash_desc *desc, u8 *hash)
 313{
 314        u8 D[SHA256_DIGEST_SIZE];
 315
 316        sha256_final(desc, D);
 317
 318        memcpy(hash, D, SHA224_DIGEST_SIZE);
 319        memset(D, 0, SHA256_DIGEST_SIZE);
 320
 321        return 0;
 322}
 323
 324static int sha256_export(struct shash_desc *desc, void *out)
 325{
 326        struct sha256_state *sctx = shash_desc_ctx(desc);
 327
 328        memcpy(out, sctx, sizeof(*sctx));
 329        return 0;
 330}
 331
 332static int sha256_import(struct shash_desc *desc, const void *in)
 333{
 334        struct sha256_state *sctx = shash_desc_ctx(desc);
 335
 336        memcpy(sctx, in, sizeof(*sctx));
 337        return 0;
 338}
 339
 340static struct shash_alg sha256_algs[2] = { {
 341        .digestsize     =       SHA256_DIGEST_SIZE,
 342        .init           =       sha256_init,
 343        .update         =       crypto_sha256_update,
 344        .final          =       sha256_final,
 345        .export         =       sha256_export,
 346        .import         =       sha256_import,
 347        .descsize       =       sizeof(struct sha256_state),
 348        .statesize      =       sizeof(struct sha256_state),
 349        .base           =       {
 350                .cra_name       =       "sha256",
 351                .cra_driver_name=       "sha256-generic",
 352                .cra_flags      =       CRYPTO_ALG_TYPE_SHASH,
 353                .cra_blocksize  =       SHA256_BLOCK_SIZE,
 354                .cra_module     =       THIS_MODULE,
 355        }
 356}, {
 357        .digestsize     =       SHA224_DIGEST_SIZE,
 358        .init           =       sha224_init,
 359        .update         =       crypto_sha256_update,
 360        .final          =       sha224_final,
 361        .descsize       =       sizeof(struct sha256_state),
 362        .base           =       {
 363                .cra_name       =       "sha224",
 364                .cra_driver_name=       "sha224-generic",
 365                .cra_flags      =       CRYPTO_ALG_TYPE_SHASH,
 366                .cra_blocksize  =       SHA224_BLOCK_SIZE,
 367                .cra_module     =       THIS_MODULE,
 368        }
 369} };
 370
 371static int __init sha256_generic_mod_init(void)
 372{
 373        return crypto_register_shashes(sha256_algs, ARRAY_SIZE(sha256_algs));
 374}
 375
 376static void __exit sha256_generic_mod_fini(void)
 377{
 378        crypto_unregister_shashes(sha256_algs, ARRAY_SIZE(sha256_algs));
 379}
 380
 381module_init(sha256_generic_mod_init);
 382module_exit(sha256_generic_mod_fini);
 383
 384MODULE_LICENSE("GPL");
 385MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm");
 386
 387MODULE_ALIAS("sha224");
 388MODULE_ALIAS("sha256");
 389
lxr.linux.no kindly hosted by Redpill Linpro AS, provider of Linux consulting and operations services since 1995.