1/* 2 * Cryptographic API. 3 * 4 * Glue code for the SHA256 Secure Hash Algorithm assembler 5 * implementation using supplemental SSE3 / AVX / AVX2 instructions. 6 * 7 * This file is based on sha256_generic.c 8 * 9 * Copyright (C) 2013 Intel Corporation. 10 * 11 * Author: 12 * Tim Chen <tim.c.chen@linux.intel.com> 13 * 14 * This program is free software; you can redistribute it and/or modify it 15 * under the terms of the GNU General Public License as published by the Free 16 * Software Foundation; either version 2 of the License, or (at your option) 17 * any later version. 18 * 19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 20 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS 23 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 24 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 25 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 26 * SOFTWARE. 27 */ 28 29 30#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt 31 32#include <crypto/internal/hash.h> 33#include <linux/init.h> 34#include <linux/module.h> 35#include <linux/mm.h> 36#include <linux/cryptohash.h> 37#include <linux/types.h> 38#include <crypto/sha.h> 39#include <crypto/sha256_base.h> 40#include <asm/fpu/api.h> 41#include <linux/string.h> 42 43asmlinkage void sha256_transform_ssse3(u32 *digest, const char *data, 44 u64 rounds); 45typedef void (sha256_transform_fn)(u32 *digest, const char *data, u64 rounds); 46 47static int sha256_update(struct shash_desc *desc, const u8 *data, 48 unsigned int len, sha256_transform_fn *sha256_xform) 49{ 50 struct sha256_state *sctx = shash_desc_ctx(desc); 51 52 if (!irq_fpu_usable() || 53 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) 54 return crypto_sha256_update(desc, data, len); 55 56 /* make sure casting to sha256_block_fn() is safe */ 57 BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0); 58 59 kernel_fpu_begin(); 60 sha256_base_do_update(desc, data, len, 61 (sha256_block_fn *)sha256_xform); 62 kernel_fpu_end(); 63 64 return 0; 65} 66 67static int sha256_finup(struct shash_desc *desc, const u8 *data, 68 unsigned int len, u8 *out, sha256_transform_fn *sha256_xform) 69{ 70 if (!irq_fpu_usable()) 71 return crypto_sha256_finup(desc, data, len, out); 72 73 kernel_fpu_begin(); 74 if (len) 75 sha256_base_do_update(desc, data, len, 76 (sha256_block_fn *)sha256_xform); 77 sha256_base_do_finalize(desc, (sha256_block_fn *)sha256_xform); 78 kernel_fpu_end(); 79 80 return sha256_base_finish(desc, out); 81} 82 83static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data, 84 unsigned int len) 85{ 86 return sha256_update(desc, data, len, sha256_transform_ssse3); 87} 88 89static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data, 90 unsigned int len, u8 *out) 91{ 92 return sha256_finup(desc, data, len, out, sha256_transform_ssse3); 93} 94 95/* Add padding and return the message digest. */ 96static int sha256_ssse3_final(struct shash_desc *desc, u8 *out) 97{ 98 return sha256_ssse3_finup(desc, NULL, 0, out); 99} 100 101static struct shash_alg sha256_ssse3_algs[] = { { 102 .digestsize = SHA256_DIGEST_SIZE, 103 .init = sha256_base_init, 104 .update = sha256_ssse3_update, 105 .final = sha256_ssse3_final, 106 .finup = sha256_ssse3_finup, 107 .descsize = sizeof(struct sha256_state), 108 .base = { 109 .cra_name = "sha256", 110 .cra_driver_name = "sha256-ssse3", 111 .cra_priority = 150, 112 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 113 .cra_blocksize = SHA256_BLOCK_SIZE, 114 .cra_module = THIS_MODULE, 115 } 116}, { 117 .digestsize = SHA224_DIGEST_SIZE, 118 .init = sha224_base_init, 119 .update = sha256_ssse3_update, 120 .final = sha256_ssse3_final, 121 .finup = sha256_ssse3_finup, 122 .descsize = sizeof(struct sha256_state), 123 .base = { 124 .cra_name = "sha224", 125 .cra_driver_name = "sha224-ssse3", 126 .cra_priority = 150, 127 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 128 .cra_blocksize = SHA224_BLOCK_SIZE, 129 .cra_module = THIS_MODULE, 130 } 131} }; 132 133static int register_sha256_ssse3(void) 134{ 135 if (boot_cpu_has(X86_FEATURE_SSSE3)) 136 return crypto_register_shashes(sha256_ssse3_algs, 137 ARRAY_SIZE(sha256_ssse3_algs)); 138 return 0; 139} 140 141static void unregister_sha256_ssse3(void) 142{ 143 if (boot_cpu_has(X86_FEATURE_SSSE3)) 144 crypto_unregister_shashes(sha256_ssse3_algs, 145 ARRAY_SIZE(sha256_ssse3_algs)); 146} 147 148#ifdef CONFIG_AS_AVX 149asmlinkage void sha256_transform_avx(u32 *digest, const char *data, 150 u64 rounds); 151 152static int sha256_avx_update(struct shash_desc *desc, const u8 *data, 153 unsigned int len) 154{ 155 return sha256_update(desc, data, len, sha256_transform_avx); 156} 157 158static int sha256_avx_finup(struct shash_desc *desc, const u8 *data, 159 unsigned int len, u8 *out) 160{ 161 return sha256_finup(desc, data, len, out, sha256_transform_avx); 162} 163 164static int sha256_avx_final(struct shash_desc *desc, u8 *out) 165{ 166 return sha256_avx_finup(desc, NULL, 0, out); 167} 168 169static struct shash_alg sha256_avx_algs[] = { { 170 .digestsize = SHA256_DIGEST_SIZE, 171 .init = sha256_base_init, 172 .update = sha256_avx_update, 173 .final = sha256_avx_final, 174 .finup = sha256_avx_finup, 175 .descsize = sizeof(struct sha256_state), 176 .base = { 177 .cra_name = "sha256", 178 .cra_driver_name = "sha256-avx", 179 .cra_priority = 160, 180 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 181 .cra_blocksize = SHA256_BLOCK_SIZE, 182 .cra_module = THIS_MODULE, 183 } 184}, { 185 .digestsize = SHA224_DIGEST_SIZE, 186 .init = sha224_base_init, 187 .update = sha256_avx_update, 188 .final = sha256_avx_final, 189 .finup = sha256_avx_finup, 190 .descsize = sizeof(struct sha256_state), 191 .base = { 192 .cra_name = "sha224", 193 .cra_driver_name = "sha224-avx", 194 .cra_priority = 160, 195 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 196 .cra_blocksize = SHA224_BLOCK_SIZE, 197 .cra_module = THIS_MODULE, 198 } 199} }; 200 201static bool avx_usable(void) 202{ 203 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { 204 if (cpu_has_avx) 205 pr_info("AVX detected but unusable.\n"); 206 return false; 207 } 208 209 return true; 210} 211 212static int register_sha256_avx(void) 213{ 214 if (avx_usable()) 215 return crypto_register_shashes(sha256_avx_algs, 216 ARRAY_SIZE(sha256_avx_algs)); 217 return 0; 218} 219 220static void unregister_sha256_avx(void) 221{ 222 if (avx_usable()) 223 crypto_unregister_shashes(sha256_avx_algs, 224 ARRAY_SIZE(sha256_avx_algs)); 225} 226 227#else 228static inline int register_sha256_avx(void) { return 0; } 229static inline void unregister_sha256_avx(void) { } 230#endif 231 232#if defined(CONFIG_AS_AVX2) && defined(CONFIG_AS_AVX) 233asmlinkage void sha256_transform_rorx(u32 *digest, const char *data, 234 u64 rounds); 235 236static int sha256_avx2_update(struct shash_desc *desc, const u8 *data, 237 unsigned int len) 238{ 239 return sha256_update(desc, data, len, sha256_transform_rorx); 240} 241 242static int sha256_avx2_finup(struct shash_desc *desc, const u8 *data, 243 unsigned int len, u8 *out) 244{ 245 return sha256_finup(desc, data, len, out, sha256_transform_rorx); 246} 247 248static int sha256_avx2_final(struct shash_desc *desc, u8 *out) 249{ 250 return sha256_avx2_finup(desc, NULL, 0, out); 251} 252 253static struct shash_alg sha256_avx2_algs[] = { { 254 .digestsize = SHA256_DIGEST_SIZE, 255 .init = sha256_base_init, 256 .update = sha256_avx2_update, 257 .final = sha256_avx2_final, 258 .finup = sha256_avx2_finup, 259 .descsize = sizeof(struct sha256_state), 260 .base = { 261 .cra_name = "sha256", 262 .cra_driver_name = "sha256-avx2", 263 .cra_priority = 170, 264 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 265 .cra_blocksize = SHA256_BLOCK_SIZE, 266 .cra_module = THIS_MODULE, 267 } 268}, { 269 .digestsize = SHA224_DIGEST_SIZE, 270 .init = sha224_base_init, 271 .update = sha256_avx2_update, 272 .final = sha256_avx2_final, 273 .finup = sha256_avx2_finup, 274 .descsize = sizeof(struct sha256_state), 275 .base = { 276 .cra_name = "sha224", 277 .cra_driver_name = "sha224-avx2", 278 .cra_priority = 170, 279 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 280 .cra_blocksize = SHA224_BLOCK_SIZE, 281 .cra_module = THIS_MODULE, 282 } 283} }; 284 285static bool avx2_usable(void) 286{ 287 if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) && 288 boot_cpu_has(X86_FEATURE_BMI2)) 289 return true; 290 291 return false; 292} 293 294static int register_sha256_avx2(void) 295{ 296 if (avx2_usable()) 297 return crypto_register_shashes(sha256_avx2_algs, 298 ARRAY_SIZE(sha256_avx2_algs)); 299 return 0; 300} 301 302static void unregister_sha256_avx2(void) 303{ 304 if (avx2_usable()) 305 crypto_unregister_shashes(sha256_avx2_algs, 306 ARRAY_SIZE(sha256_avx2_algs)); 307} 308 309#else 310static inline int register_sha256_avx2(void) { return 0; } 311static inline void unregister_sha256_avx2(void) { } 312#endif 313 314#ifdef CONFIG_AS_SHA256_NI 315asmlinkage void sha256_ni_transform(u32 *digest, const char *data, 316 u64 rounds); /*unsigned int rounds);*/ 317 318static int sha256_ni_update(struct shash_desc *desc, const u8 *data, 319 unsigned int len) 320{ 321 return sha256_update(desc, data, len, sha256_ni_transform); 322} 323 324static int sha256_ni_finup(struct shash_desc *desc, const u8 *data, 325 unsigned int len, u8 *out) 326{ 327 return sha256_finup(desc, data, len, out, sha256_ni_transform); 328} 329 330static int sha256_ni_final(struct shash_desc *desc, u8 *out) 331{ 332 return sha256_ni_finup(desc, NULL, 0, out); 333} 334 335static struct shash_alg sha256_ni_algs[] = { { 336 .digestsize = SHA256_DIGEST_SIZE, 337 .init = sha256_base_init, 338 .update = sha256_ni_update, 339 .final = sha256_ni_final, 340 .finup = sha256_ni_finup, 341 .descsize = sizeof(struct sha256_state), 342 .base = { 343 .cra_name = "sha256", 344 .cra_driver_name = "sha256-ni", 345 .cra_priority = 250, 346 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 347 .cra_blocksize = SHA256_BLOCK_SIZE, 348 .cra_module = THIS_MODULE, 349 } 350}, { 351 .digestsize = SHA224_DIGEST_SIZE, 352 .init = sha224_base_init, 353 .update = sha256_ni_update, 354 .final = sha256_ni_final, 355 .finup = sha256_ni_finup, 356 .descsize = sizeof(struct sha256_state), 357 .base = { 358 .cra_name = "sha224", 359 .cra_driver_name = "sha224-ni", 360 .cra_priority = 250, 361 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 362 .cra_blocksize = SHA224_BLOCK_SIZE, 363 .cra_module = THIS_MODULE, 364 } 365} }; 366 367static int register_sha256_ni(void) 368{ 369 if (boot_cpu_has(X86_FEATURE_SHA_NI)) 370 return crypto_register_shashes(sha256_ni_algs, 371 ARRAY_SIZE(sha256_ni_algs)); 372 return 0; 373} 374 375static void unregister_sha256_ni(void) 376{ 377 if (boot_cpu_has(X86_FEATURE_SHA_NI)) 378 crypto_unregister_shashes(sha256_ni_algs, 379 ARRAY_SIZE(sha256_ni_algs)); 380} 381 382#else 383static inline int register_sha256_ni(void) { return 0; } 384static inline void unregister_sha256_ni(void) { } 385#endif 386 387static int __init sha256_ssse3_mod_init(void) 388{ 389 if (register_sha256_ssse3()) 390 goto fail; 391 392 if (register_sha256_avx()) { 393 unregister_sha256_ssse3(); 394 goto fail; 395 } 396 397 if (register_sha256_avx2()) { 398 unregister_sha256_avx(); 399 unregister_sha256_ssse3(); 400 goto fail; 401 } 402 403 if (register_sha256_ni()) { 404 unregister_sha256_avx2(); 405 unregister_sha256_avx(); 406 unregister_sha256_ssse3(); 407 goto fail; 408 } 409 410 return 0; 411fail: 412 return -ENODEV; 413} 414 415static void __exit sha256_ssse3_mod_fini(void) 416{ 417 unregister_sha256_ni(); 418 unregister_sha256_avx2(); 419 unregister_sha256_avx(); 420 unregister_sha256_ssse3(); 421} 422 423module_init(sha256_ssse3_mod_init); 424module_exit(sha256_ssse3_mod_fini); 425 426MODULE_LICENSE("GPL"); 427MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm, Supplemental SSE3 accelerated"); 428 429MODULE_ALIAS_CRYPTO("sha256"); 430MODULE_ALIAS_CRYPTO("sha224"); 431