root/drivers/crypto/geode-aes.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. _writefield
  2. _readfield
  3. do_crypt
  4. geode_aes_crypt
  5. geode_setkey_cip
  6. geode_setkey_skcipher
  7. geode_encrypt
  8. geode_decrypt
  9. fallback_init_cip
  10. fallback_exit_cip
  11. geode_init_skcipher
  12. geode_exit_skcipher
  13. geode_skcipher_crypt
  14. geode_cbc_encrypt
  15. geode_cbc_decrypt
  16. geode_ecb_encrypt
  17. geode_ecb_decrypt
  18. geode_aes_remove
  19. geode_aes_probe

   1 // SPDX-License-Identifier: GPL-2.0-or-later
   2  /* Copyright (C) 2004-2006, Advanced Micro Devices, Inc.
   3   */
   4 
   5 #include <linux/module.h>
   6 #include <linux/kernel.h>
   7 #include <linux/pci.h>
   8 #include <linux/pci_ids.h>
   9 #include <linux/crypto.h>
  10 #include <linux/spinlock.h>
  11 #include <crypto/algapi.h>
  12 #include <crypto/aes.h>
  13 #include <crypto/internal/skcipher.h>
  14 
  15 #include <linux/io.h>
  16 #include <linux/delay.h>
  17 
  18 #include "geode-aes.h"
  19 
  20 /* Static structures */
  21 
  22 static void __iomem *_iobase;
  23 static spinlock_t lock;
  24 
  25 /* Write a 128 bit field (either a writable key or IV) */
  26 static inline void
  27 _writefield(u32 offset, const void *value)
  28 {
  29         int i;
  30 
  31         for (i = 0; i < 4; i++)
  32                 iowrite32(((const u32 *) value)[i], _iobase + offset + (i * 4));
  33 }
  34 
  35 /* Read a 128 bit field (either a writable key or IV) */
  36 static inline void
  37 _readfield(u32 offset, void *value)
  38 {
  39         int i;
  40 
  41         for (i = 0; i < 4; i++)
  42                 ((u32 *) value)[i] = ioread32(_iobase + offset + (i * 4));
  43 }
  44 
  45 static int
  46 do_crypt(const void *src, void *dst, u32 len, u32 flags)
  47 {
  48         u32 status;
  49         u32 counter = AES_OP_TIMEOUT;
  50 
  51         iowrite32(virt_to_phys((void *)src), _iobase + AES_SOURCEA_REG);
  52         iowrite32(virt_to_phys(dst), _iobase + AES_DSTA_REG);
  53         iowrite32(len,  _iobase + AES_LENA_REG);
  54 
  55         /* Start the operation */
  56         iowrite32(AES_CTRL_START | flags, _iobase + AES_CTRLA_REG);
  57 
  58         do {
  59                 status = ioread32(_iobase + AES_INTR_REG);
  60                 cpu_relax();
  61         } while (!(status & AES_INTRA_PENDING) && --counter);
  62 
  63         /* Clear the event */
  64         iowrite32((status & 0xFF) | AES_INTRA_PENDING, _iobase + AES_INTR_REG);
  65         return counter ? 0 : 1;
  66 }
  67 
  68 static void
  69 geode_aes_crypt(const struct geode_aes_tfm_ctx *tctx, const void *src,
  70                 void *dst, u32 len, u8 *iv, int mode, int dir)
  71 {
  72         u32 flags = 0;
  73         unsigned long iflags;
  74         int ret;
  75 
  76         /* If the source and destination is the same, then
  77          * we need to turn on the coherent flags, otherwise
  78          * we don't need to worry
  79          */
  80 
  81         flags |= (AES_CTRL_DCA | AES_CTRL_SCA);
  82 
  83         if (dir == AES_DIR_ENCRYPT)
  84                 flags |= AES_CTRL_ENCRYPT;
  85 
  86         /* Start the critical section */
  87 
  88         spin_lock_irqsave(&lock, iflags);
  89 
  90         if (mode == AES_MODE_CBC) {
  91                 flags |= AES_CTRL_CBC;
  92                 _writefield(AES_WRITEIV0_REG, iv);
  93         }
  94 
  95         flags |= AES_CTRL_WRKEY;
  96         _writefield(AES_WRITEKEY0_REG, tctx->key);
  97 
  98         ret = do_crypt(src, dst, len, flags);
  99         BUG_ON(ret);
 100 
 101         if (mode == AES_MODE_CBC)
 102                 _readfield(AES_WRITEIV0_REG, iv);
 103 
 104         spin_unlock_irqrestore(&lock, iflags);
 105 }
 106 
 107 /* CRYPTO-API Functions */
 108 
 109 static int geode_setkey_cip(struct crypto_tfm *tfm, const u8 *key,
 110                 unsigned int len)
 111 {
 112         struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
 113         unsigned int ret;
 114 
 115         tctx->keylen = len;
 116 
 117         if (len == AES_KEYSIZE_128) {
 118                 memcpy(tctx->key, key, len);
 119                 return 0;
 120         }
 121 
 122         if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) {
 123                 /* not supported at all */
 124                 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 125                 return -EINVAL;
 126         }
 127 
 128         /*
 129          * The requested key size is not supported by HW, do a fallback
 130          */
 131         tctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
 132         tctx->fallback.cip->base.crt_flags |=
 133                 (tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
 134 
 135         ret = crypto_cipher_setkey(tctx->fallback.cip, key, len);
 136         if (ret) {
 137                 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
 138                 tfm->crt_flags |= (tctx->fallback.cip->base.crt_flags &
 139                                    CRYPTO_TFM_RES_MASK);
 140         }
 141         return ret;
 142 }
 143 
 144 static int geode_setkey_skcipher(struct crypto_skcipher *tfm, const u8 *key,
 145                                  unsigned int len)
 146 {
 147         struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
 148         unsigned int ret;
 149 
 150         tctx->keylen = len;
 151 
 152         if (len == AES_KEYSIZE_128) {
 153                 memcpy(tctx->key, key, len);
 154                 return 0;
 155         }
 156 
 157         if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) {
 158                 /* not supported at all */
 159                 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 160                 return -EINVAL;
 161         }
 162 
 163         /*
 164          * The requested key size is not supported by HW, do a fallback
 165          */
 166         crypto_skcipher_clear_flags(tctx->fallback.skcipher,
 167                                     CRYPTO_TFM_REQ_MASK);
 168         crypto_skcipher_set_flags(tctx->fallback.skcipher,
 169                                   crypto_skcipher_get_flags(tfm) &
 170                                   CRYPTO_TFM_REQ_MASK);
 171         ret = crypto_skcipher_setkey(tctx->fallback.skcipher, key, len);
 172         crypto_skcipher_set_flags(tfm,
 173                                   crypto_skcipher_get_flags(tctx->fallback.skcipher) &
 174                                   CRYPTO_TFM_RES_MASK);
 175         return ret;
 176 }
 177 
 178 static void
 179 geode_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 180 {
 181         const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
 182 
 183         if (unlikely(tctx->keylen != AES_KEYSIZE_128)) {
 184                 crypto_cipher_encrypt_one(tctx->fallback.cip, out, in);
 185                 return;
 186         }
 187 
 188         geode_aes_crypt(tctx, in, out, AES_BLOCK_SIZE, NULL,
 189                         AES_MODE_ECB, AES_DIR_ENCRYPT);
 190 }
 191 
 192 
 193 static void
 194 geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 195 {
 196         const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
 197 
 198         if (unlikely(tctx->keylen != AES_KEYSIZE_128)) {
 199                 crypto_cipher_decrypt_one(tctx->fallback.cip, out, in);
 200                 return;
 201         }
 202 
 203         geode_aes_crypt(tctx, in, out, AES_BLOCK_SIZE, NULL,
 204                         AES_MODE_ECB, AES_DIR_DECRYPT);
 205 }
 206 
 207 static int fallback_init_cip(struct crypto_tfm *tfm)
 208 {
 209         const char *name = crypto_tfm_alg_name(tfm);
 210         struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
 211 
 212         tctx->fallback.cip = crypto_alloc_cipher(name, 0,
 213                                                  CRYPTO_ALG_NEED_FALLBACK);
 214 
 215         if (IS_ERR(tctx->fallback.cip)) {
 216                 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
 217                 return PTR_ERR(tctx->fallback.cip);
 218         }
 219 
 220         return 0;
 221 }
 222 
 223 static void fallback_exit_cip(struct crypto_tfm *tfm)
 224 {
 225         struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
 226 
 227         crypto_free_cipher(tctx->fallback.cip);
 228 }
 229 
 230 static struct crypto_alg geode_alg = {
 231         .cra_name                       =       "aes",
 232         .cra_driver_name        =       "geode-aes",
 233         .cra_priority           =       300,
 234         .cra_alignmask          =       15,
 235         .cra_flags                      =       CRYPTO_ALG_TYPE_CIPHER |
 236                                                         CRYPTO_ALG_NEED_FALLBACK,
 237         .cra_init                       =       fallback_init_cip,
 238         .cra_exit                       =       fallback_exit_cip,
 239         .cra_blocksize          =       AES_BLOCK_SIZE,
 240         .cra_ctxsize            =       sizeof(struct geode_aes_tfm_ctx),
 241         .cra_module                     =       THIS_MODULE,
 242         .cra_u                          =       {
 243                 .cipher =       {
 244                         .cia_min_keysize        =       AES_MIN_KEY_SIZE,
 245                         .cia_max_keysize        =       AES_MAX_KEY_SIZE,
 246                         .cia_setkey                     =       geode_setkey_cip,
 247                         .cia_encrypt            =       geode_encrypt,
 248                         .cia_decrypt            =       geode_decrypt
 249                 }
 250         }
 251 };
 252 
 253 static int geode_init_skcipher(struct crypto_skcipher *tfm)
 254 {
 255         const char *name = crypto_tfm_alg_name(&tfm->base);
 256         struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
 257 
 258         tctx->fallback.skcipher =
 259                 crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK |
 260                                       CRYPTO_ALG_ASYNC);
 261         if (IS_ERR(tctx->fallback.skcipher)) {
 262                 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
 263                 return PTR_ERR(tctx->fallback.skcipher);
 264         }
 265 
 266         crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
 267                                     crypto_skcipher_reqsize(tctx->fallback.skcipher));
 268         return 0;
 269 }
 270 
 271 static void geode_exit_skcipher(struct crypto_skcipher *tfm)
 272 {
 273         struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
 274 
 275         crypto_free_skcipher(tctx->fallback.skcipher);
 276 }
 277 
 278 static int geode_skcipher_crypt(struct skcipher_request *req, int mode, int dir)
 279 {
 280         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 281         const struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
 282         struct skcipher_walk walk;
 283         unsigned int nbytes;
 284         int err;
 285 
 286         if (unlikely(tctx->keylen != AES_KEYSIZE_128)) {
 287                 struct skcipher_request *subreq = skcipher_request_ctx(req);
 288 
 289                 *subreq = *req;
 290                 skcipher_request_set_tfm(subreq, tctx->fallback.skcipher);
 291                 if (dir == AES_DIR_DECRYPT)
 292                         return crypto_skcipher_decrypt(subreq);
 293                 else
 294                         return crypto_skcipher_encrypt(subreq);
 295         }
 296 
 297         err = skcipher_walk_virt(&walk, req, false);
 298 
 299         while ((nbytes = walk.nbytes) != 0) {
 300                 geode_aes_crypt(tctx, walk.src.virt.addr, walk.dst.virt.addr,
 301                                 round_down(nbytes, AES_BLOCK_SIZE),
 302                                 walk.iv, mode, dir);
 303                 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
 304         }
 305 
 306         return err;
 307 }
 308 
 309 static int geode_cbc_encrypt(struct skcipher_request *req)
 310 {
 311         return geode_skcipher_crypt(req, AES_MODE_CBC, AES_DIR_ENCRYPT);
 312 }
 313 
 314 static int geode_cbc_decrypt(struct skcipher_request *req)
 315 {
 316         return geode_skcipher_crypt(req, AES_MODE_CBC, AES_DIR_DECRYPT);
 317 }
 318 
 319 static int geode_ecb_encrypt(struct skcipher_request *req)
 320 {
 321         return geode_skcipher_crypt(req, AES_MODE_ECB, AES_DIR_ENCRYPT);
 322 }
 323 
 324 static int geode_ecb_decrypt(struct skcipher_request *req)
 325 {
 326         return geode_skcipher_crypt(req, AES_MODE_ECB, AES_DIR_DECRYPT);
 327 }
 328 
 329 static struct skcipher_alg geode_skcipher_algs[] = {
 330         {
 331                 .base.cra_name          = "cbc(aes)",
 332                 .base.cra_driver_name   = "cbc-aes-geode",
 333                 .base.cra_priority      = 400,
 334                 .base.cra_flags         = CRYPTO_ALG_KERN_DRIVER_ONLY |
 335                                           CRYPTO_ALG_NEED_FALLBACK,
 336                 .base.cra_blocksize     = AES_BLOCK_SIZE,
 337                 .base.cra_ctxsize       = sizeof(struct geode_aes_tfm_ctx),
 338                 .base.cra_alignmask     = 15,
 339                 .base.cra_module        = THIS_MODULE,
 340                 .init                   = geode_init_skcipher,
 341                 .exit                   = geode_exit_skcipher,
 342                 .setkey                 = geode_setkey_skcipher,
 343                 .encrypt                = geode_cbc_encrypt,
 344                 .decrypt                = geode_cbc_decrypt,
 345                 .min_keysize            = AES_MIN_KEY_SIZE,
 346                 .max_keysize            = AES_MAX_KEY_SIZE,
 347                 .ivsize                 = AES_BLOCK_SIZE,
 348         }, {
 349                 .base.cra_name          = "ecb(aes)",
 350                 .base.cra_driver_name   = "ecb-aes-geode",
 351                 .base.cra_priority      = 400,
 352                 .base.cra_flags         = CRYPTO_ALG_KERN_DRIVER_ONLY |
 353                                           CRYPTO_ALG_NEED_FALLBACK,
 354                 .base.cra_blocksize     = AES_BLOCK_SIZE,
 355                 .base.cra_ctxsize       = sizeof(struct geode_aes_tfm_ctx),
 356                 .base.cra_alignmask     = 15,
 357                 .base.cra_module        = THIS_MODULE,
 358                 .init                   = geode_init_skcipher,
 359                 .exit                   = geode_exit_skcipher,
 360                 .setkey                 = geode_setkey_skcipher,
 361                 .encrypt                = geode_ecb_encrypt,
 362                 .decrypt                = geode_ecb_decrypt,
 363                 .min_keysize            = AES_MIN_KEY_SIZE,
 364                 .max_keysize            = AES_MAX_KEY_SIZE,
 365         },
 366 };
 367 
 368 static void geode_aes_remove(struct pci_dev *dev)
 369 {
 370         crypto_unregister_alg(&geode_alg);
 371         crypto_unregister_skciphers(geode_skcipher_algs,
 372                                     ARRAY_SIZE(geode_skcipher_algs));
 373 
 374         pci_iounmap(dev, _iobase);
 375         _iobase = NULL;
 376 
 377         pci_release_regions(dev);
 378         pci_disable_device(dev);
 379 }
 380 
 381 
 382 static int geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
 383 {
 384         int ret;
 385 
 386         ret = pci_enable_device(dev);
 387         if (ret)
 388                 return ret;
 389 
 390         ret = pci_request_regions(dev, "geode-aes");
 391         if (ret)
 392                 goto eenable;
 393 
 394         _iobase = pci_iomap(dev, 0, 0);
 395 
 396         if (_iobase == NULL) {
 397                 ret = -ENOMEM;
 398                 goto erequest;
 399         }
 400 
 401         spin_lock_init(&lock);
 402 
 403         /* Clear any pending activity */
 404         iowrite32(AES_INTR_PENDING | AES_INTR_MASK, _iobase + AES_INTR_REG);
 405 
 406         ret = crypto_register_alg(&geode_alg);
 407         if (ret)
 408                 goto eiomap;
 409 
 410         ret = crypto_register_skciphers(geode_skcipher_algs,
 411                                         ARRAY_SIZE(geode_skcipher_algs));
 412         if (ret)
 413                 goto ealg;
 414 
 415         dev_notice(&dev->dev, "GEODE AES engine enabled.\n");
 416         return 0;
 417 
 418  ealg:
 419         crypto_unregister_alg(&geode_alg);
 420 
 421  eiomap:
 422         pci_iounmap(dev, _iobase);
 423 
 424  erequest:
 425         pci_release_regions(dev);
 426 
 427  eenable:
 428         pci_disable_device(dev);
 429 
 430         dev_err(&dev->dev, "GEODE AES initialization failed.\n");
 431         return ret;
 432 }
 433 
 434 static struct pci_device_id geode_aes_tbl[] = {
 435         { PCI_VDEVICE(AMD, PCI_DEVICE_ID_AMD_LX_AES), },
 436         { 0, }
 437 };
 438 
 439 MODULE_DEVICE_TABLE(pci, geode_aes_tbl);
 440 
 441 static struct pci_driver geode_aes_driver = {
 442         .name = "Geode LX AES",
 443         .id_table = geode_aes_tbl,
 444         .probe = geode_aes_probe,
 445         .remove = geode_aes_remove,
 446 };
 447 
 448 module_pci_driver(geode_aes_driver);
 449 
 450 MODULE_AUTHOR("Advanced Micro Devices, Inc.");
 451 MODULE_DESCRIPTION("Geode LX Hardware AES driver");
 452 MODULE_LICENSE("GPL");

/* [<][>][^][v][top][bottom][index][help] */