/linux-4.1.27/arch/x86/crypto/ |
D | glue_helper.c | 41 unsigned int nbytes, i, func_bytes; in __glue_ecb_crypt_128bit() local 47 while ((nbytes = walk->nbytes)) { in __glue_ecb_crypt_128bit() 52 desc, fpu_enabled, nbytes); in __glue_ecb_crypt_128bit() 58 if (nbytes >= func_bytes) { in __glue_ecb_crypt_128bit() 65 nbytes -= func_bytes; in __glue_ecb_crypt_128bit() 66 } while (nbytes >= func_bytes); in __glue_ecb_crypt_128bit() 68 if (nbytes < bsize) in __glue_ecb_crypt_128bit() 74 err = blkcipher_walk_done(desc, walk, nbytes); in __glue_ecb_crypt_128bit() 83 struct scatterlist *src, unsigned int nbytes) in glue_ecb_crypt_128bit() argument 87 blkcipher_walk_init(&walk, dst, src, nbytes); in glue_ecb_crypt_128bit() [all …]
|
D | cast5_avx_glue.c | 49 static inline bool cast5_fpu_begin(bool fpu_enabled, unsigned int nbytes) in cast5_fpu_begin() argument 52 NULL, fpu_enabled, nbytes); in cast5_fpu_begin() 66 unsigned int nbytes; in ecb_crypt() local 75 while ((nbytes = walk->nbytes)) { in ecb_crypt() 79 fpu_enabled = cast5_fpu_begin(fpu_enabled, nbytes); in ecb_crypt() 82 if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { in ecb_crypt() 88 nbytes -= bsize * CAST5_PARALLEL_BLOCKS; in ecb_crypt() 89 } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); in ecb_crypt() 91 if (nbytes < bsize) in ecb_crypt() 103 nbytes -= bsize; in ecb_crypt() [all …]
|
D | blowfish_glue.c | 86 unsigned int nbytes; in ecb_crypt() local 91 while ((nbytes = walk->nbytes)) { in ecb_crypt() 96 if (nbytes >= bsize * 4) { in ecb_crypt() 102 nbytes -= bsize * 4; in ecb_crypt() 103 } while (nbytes >= bsize * 4); in ecb_crypt() 105 if (nbytes < bsize) in ecb_crypt() 115 nbytes -= bsize; in ecb_crypt() 116 } while (nbytes >= bsize); in ecb_crypt() 119 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt() 126 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument [all …]
|
D | des3_ede_glue.c | 90 unsigned int nbytes; in ecb_crypt() local 95 while ((nbytes = walk->nbytes)) { in ecb_crypt() 100 if (nbytes >= bsize * 3) { in ecb_crypt() 107 nbytes -= bsize * 3; in ecb_crypt() 108 } while (nbytes >= bsize * 3); in ecb_crypt() 110 if (nbytes < bsize) in ecb_crypt() 120 nbytes -= bsize; in ecb_crypt() 121 } while (nbytes >= bsize); in ecb_crypt() 124 err = blkcipher_walk_done(desc, walk, nbytes); in ecb_crypt() 131 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument [all …]
|
D | camellia_aesni_avx2_glue.c | 155 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 157 return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes); in ecb_encrypt() 161 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 163 return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes); in ecb_decrypt() 167 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 170 dst, src, nbytes); in cbc_encrypt() 174 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 177 nbytes); in cbc_decrypt() 181 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 183 return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | serpent_avx2_glue.c | 141 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 143 return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes); in ecb_encrypt() 147 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 149 return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes); in ecb_decrypt() 153 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 156 dst, src, nbytes); in cbc_encrypt() 160 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 163 nbytes); in cbc_decrypt() 167 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 169 return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | camellia_aesni_avx_glue.c | 159 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 161 return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes); in ecb_encrypt() 165 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 167 return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes); in ecb_decrypt() 171 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 174 dst, src, nbytes); in cbc_encrypt() 178 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 181 nbytes); in cbc_decrypt() 185 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 187 return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | twofish_avx_glue.c | 176 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 178 return glue_ecb_crypt_128bit(&twofish_enc, desc, dst, src, nbytes); in ecb_encrypt() 182 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 184 return glue_ecb_crypt_128bit(&twofish_dec, desc, dst, src, nbytes); in ecb_decrypt() 188 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 191 dst, src, nbytes); in cbc_encrypt() 195 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 198 nbytes); in cbc_decrypt() 202 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 204 return glue_ctr_crypt_128bit(&twofish_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | salsa20_glue.c | 50 unsigned int nbytes) in encrypt() argument 57 blkcipher_walk_init(&walk, dst, src, nbytes); in encrypt() 62 if (likely(walk.nbytes == nbytes)) in encrypt() 65 walk.dst.virt.addr, nbytes); in encrypt() 69 while (walk.nbytes >= 64) { in encrypt() 72 walk.nbytes - (walk.nbytes % 64)); in encrypt() 73 err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64); in encrypt() 76 if (walk.nbytes) { in encrypt() 78 walk.dst.virt.addr, walk.nbytes); in encrypt()
|
D | twofish_glue_3way.c | 155 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 157 return glue_ecb_crypt_128bit(&twofish_enc, desc, dst, src, nbytes); in ecb_encrypt() 161 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 163 return glue_ecb_crypt_128bit(&twofish_dec, desc, dst, src, nbytes); in ecb_decrypt() 167 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 170 dst, src, nbytes); in cbc_encrypt() 174 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 177 nbytes); in cbc_decrypt() 181 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 183 return glue_ctr_crypt_128bit(&twofish_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | cast6_avx_glue.c | 162 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 164 return glue_ecb_crypt_128bit(&cast6_enc, desc, dst, src, nbytes); in ecb_encrypt() 168 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 170 return glue_ecb_crypt_128bit(&cast6_dec, desc, dst, src, nbytes); in ecb_decrypt() 174 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 177 dst, src, nbytes); in cbc_encrypt() 181 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 184 nbytes); in cbc_decrypt() 188 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 190 return glue_ctr_crypt_128bit(&cast6_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | serpent_avx_glue.c | 175 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 177 return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes); in ecb_encrypt() 181 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 183 return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes); in ecb_decrypt() 187 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 190 dst, src, nbytes); in cbc_encrypt() 194 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 197 nbytes); in cbc_decrypt() 201 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 203 return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | serpent_sse2_glue.c | 143 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 145 return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes); in ecb_encrypt() 149 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 151 return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes); in ecb_decrypt() 155 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 158 dst, src, nbytes); in cbc_encrypt() 162 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 165 nbytes); in cbc_decrypt() 169 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 171 return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | aesni-intel_glue.c | 375 unsigned int nbytes) in ecb_encrypt() argument 381 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt() 386 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 388 nbytes & AES_BLOCK_MASK); in ecb_encrypt() 389 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 390 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt() 399 unsigned int nbytes) in ecb_decrypt() argument 405 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt() 410 while ((nbytes = walk.nbytes)) { in ecb_decrypt() 412 nbytes & AES_BLOCK_MASK); in ecb_decrypt() [all …]
|
D | camellia_glue.c | 1377 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 1379 return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes); in ecb_encrypt() 1383 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 1385 return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes); in ecb_decrypt() 1389 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 1392 dst, src, nbytes); in cbc_encrypt() 1396 struct scatterlist *src, unsigned int nbytes) in cbc_decrypt() argument 1399 nbytes); in cbc_decrypt() 1403 struct scatterlist *src, unsigned int nbytes) in ctr_crypt() argument 1405 return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes); in ctr_crypt() [all …]
|
D | fpu.c | 45 unsigned int nbytes) in crypto_fpu_encrypt() argument 57 err = crypto_blkcipher_crt(desc.tfm)->encrypt(&desc, dst, src, nbytes); in crypto_fpu_encrypt() 64 unsigned int nbytes) in crypto_fpu_decrypt() argument 76 err = crypto_blkcipher_crt(desc.tfm)->decrypt(&desc, dst, src, nbytes); in crypto_fpu_decrypt()
|
/linux-4.1.27/drivers/staging/unisys/visorutil/ |
D | memregion_direct.c | 30 ulong nbytes; member 40 visor_memregion_create(HOSTADDRESS physaddr, ulong nbytes) in visor_memregion_create() argument 50 memregion->nbytes = nbytes; in visor_memregion_create() 68 ulong nbytes) in visor_memregion_create_overlapped() argument 78 if ((offset >= parent->nbytes) || in visor_memregion_create_overlapped() 79 ((offset + nbytes) >= parent->nbytes)) in visor_memregion_create_overlapped() 87 memregion->nbytes = nbytes; in visor_memregion_create_overlapped() 99 ulong nbytes = memregion->nbytes; in mapit() local 102 if (request_mem_region(physaddr, nbytes, MYDRVNAME)) in mapit() 104 memregion->mapped = ioremap_cache(physaddr, nbytes); in mapit() [all …]
|
D | memregion.h | 28 struct memregion *visor_memregion_create(HOSTADDRESS physaddr, ulong nbytes); 30 ulong offset, ulong nbytes); 33 ulong offset, void *dest, ulong nbytes); 35 ulong offset, void *src, ulong nbytes);
|
/linux-4.1.27/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 180 struct scatterlist *src, unsigned int nbytes) in ppc_ecb_encrypt() argument 188 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_encrypt() 191 while ((nbytes = walk.nbytes)) { in ppc_ecb_encrypt() 192 ubytes = nbytes > MAX_BYTES ? in ppc_ecb_encrypt() 193 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_encrypt() 194 nbytes -= ubytes; in ppc_ecb_encrypt() 198 ctx->key_enc, ctx->rounds, nbytes); in ppc_ecb_encrypt() 208 struct scatterlist *src, unsigned int nbytes) in ppc_ecb_decrypt() argument 216 blkcipher_walk_init(&walk, dst, src, nbytes); in ppc_ecb_decrypt() 219 while ((nbytes = walk.nbytes)) { in ppc_ecb_decrypt() [all …]
|
/linux-4.1.27/crypto/ |
D | cbc.c | 49 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_segment() local 61 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_segment() 63 return nbytes; in crypto_cbc_encrypt_segment() 73 unsigned int nbytes = walk->nbytes; in crypto_cbc_encrypt_inplace() local 83 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_inplace() 87 return nbytes; in crypto_cbc_encrypt_inplace() 92 unsigned int nbytes) in crypto_cbc_encrypt() argument 100 blkcipher_walk_init(&walk, dst, src, nbytes); in crypto_cbc_encrypt() 103 while ((nbytes = walk.nbytes)) { in crypto_cbc_encrypt() 105 nbytes = crypto_cbc_encrypt_inplace(desc, &walk, child); in crypto_cbc_encrypt() [all …]
|
D | pcbc.c | 52 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_segment() local 65 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_segment() 67 return nbytes; in crypto_pcbc_encrypt_segment() 77 unsigned int nbytes = walk->nbytes; in crypto_pcbc_encrypt_inplace() local 90 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_inplace() 94 return nbytes; in crypto_pcbc_encrypt_inplace() 99 unsigned int nbytes) in crypto_pcbc_encrypt() argument 107 blkcipher_walk_init(&walk, dst, src, nbytes); in crypto_pcbc_encrypt() 110 while ((nbytes = walk.nbytes)) { in crypto_pcbc_encrypt() 112 nbytes = crypto_pcbc_encrypt_inplace(desc, &walk, in crypto_pcbc_encrypt() [all …]
|
D | xts.c | 109 if (!w->nbytes) in crypt() 113 avail = w->nbytes; in crypt() 135 if (!w->nbytes) in crypt() 138 avail = w->nbytes; in crypt() 148 struct scatterlist *src, unsigned int nbytes) in encrypt() argument 153 blkcipher_walk_init(&w, dst, src, nbytes); in encrypt() 159 struct scatterlist *src, unsigned int nbytes) in decrypt() argument 164 blkcipher_walk_init(&w, dst, src, nbytes); in decrypt() 170 struct scatterlist *ssrc, unsigned int nbytes, in xts_crypt() argument 183 blkcipher_walk_init(&walk, sdst, ssrc, nbytes); in xts_crypt() [all …]
|
D | scatterwalk.c | 25 static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out) in memcpy_dir() argument 30 memcpy(dst, src, nbytes); in memcpy_dir() 77 size_t nbytes, int out) in scatterwalk_copychunks() argument 83 if (len_this_page > nbytes) in scatterwalk_copychunks() 84 len_this_page = nbytes; in scatterwalk_copychunks() 92 if (nbytes == len_this_page) in scatterwalk_copychunks() 96 nbytes -= len_this_page; in scatterwalk_copychunks() 104 unsigned int start, unsigned int nbytes, int out) in scatterwalk_map_and_copy() argument 109 if (!nbytes) in scatterwalk_map_and_copy() 123 scatterwalk_copychunks(buf, &walk, nbytes, out); in scatterwalk_map_and_copy()
|
D | shash.c | 223 int nbytes; in shash_ahash_update() local 225 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; in shash_ahash_update() 226 nbytes = crypto_hash_walk_done(&walk, nbytes)) in shash_ahash_update() 227 nbytes = crypto_shash_update(desc, walk.data, nbytes); in shash_ahash_update() 229 return nbytes; in shash_ahash_update() 246 int nbytes; in shash_ahash_finup() local 248 nbytes = crypto_hash_walk_first(req, &walk); in shash_ahash_finup() 249 if (!nbytes) in shash_ahash_finup() 253 nbytes = crypto_hash_walk_last(&walk) ? in shash_ahash_finup() 254 crypto_shash_finup(desc, walk.data, nbytes, in shash_ahash_finup() [all …]
|
D | cts.c | 78 unsigned int nbytes) in cts_cbc_encrypt() argument 84 int lastn = nbytes - bsize; in cts_cbc_encrypt() 96 scatterwalk_map_and_copy(s, src, offset, nbytes, 0); in cts_cbc_encrypt() 118 scatterwalk_map_and_copy(d, dst, offset, nbytes, 1); in cts_cbc_encrypt() 127 unsigned int nbytes) in crypto_cts_encrypt() argument 131 int tot_blocks = (nbytes + bsize - 1) / bsize; in crypto_cts_encrypt() 142 } else if (nbytes <= bsize * 2) { in crypto_cts_encrypt() 143 err = cts_cbc_encrypt(ctx, desc, dst, src, 0, nbytes); in crypto_cts_encrypt() 152 nbytes - (cbc_blocks * bsize)); in crypto_cts_encrypt() 164 unsigned int nbytes) in cts_cbc_decrypt() argument [all …]
|
D | lrw.c | 154 if (!(avail = w->nbytes)) in crypt() 185 if (!(avail = w->nbytes)) in crypt() 196 struct scatterlist *src, unsigned int nbytes) in encrypt() argument 201 blkcipher_walk_init(&w, dst, src, nbytes); in encrypt() 207 struct scatterlist *src, unsigned int nbytes) in decrypt() argument 212 blkcipher_walk_init(&w, dst, src, nbytes); in decrypt() 218 struct scatterlist *ssrc, unsigned int nbytes, in lrw_crypt() argument 232 blkcipher_walk_init(&walk, sdst, ssrc, nbytes); in lrw_crypt() 235 nbytes = walk.nbytes; in lrw_crypt() 236 if (!nbytes) in lrw_crypt() [all …]
|
D | ctr.c | 65 unsigned int nbytes = walk->nbytes; in crypto_ctr_crypt_final() local 68 crypto_xor(keystream, src, nbytes); in crypto_ctr_crypt_final() 69 memcpy(dst, keystream, nbytes); in crypto_ctr_crypt_final() 83 unsigned int nbytes = walk->nbytes; in crypto_ctr_crypt_segment() local 95 } while ((nbytes -= bsize) >= bsize); in crypto_ctr_crypt_segment() 97 return nbytes; in crypto_ctr_crypt_segment() 107 unsigned int nbytes = walk->nbytes; in crypto_ctr_crypt_inplace() local 122 } while ((nbytes -= bsize) >= bsize); in crypto_ctr_crypt_inplace() 124 return nbytes; in crypto_ctr_crypt_inplace() 129 unsigned int nbytes) in crypto_ctr_crypt() argument [all …]
|
D | ecb.c | 47 unsigned int nbytes; in crypto_ecb_crypt() local 52 while ((nbytes = walk->nbytes)) { in crypto_ecb_crypt() 61 } while ((nbytes -= bsize) >= bsize); in crypto_ecb_crypt() 63 err = blkcipher_walk_done(desc, walk, nbytes); in crypto_ecb_crypt() 71 unsigned int nbytes) in crypto_ecb_encrypt() argument 78 blkcipher_walk_init(&walk, dst, src, nbytes); in crypto_ecb_encrypt() 85 unsigned int nbytes) in crypto_ecb_decrypt() argument 92 blkcipher_walk_init(&walk, dst, src, nbytes); in crypto_ecb_decrypt()
|
D | salsa20_generic.c | 179 unsigned int nbytes) in encrypt() argument 186 blkcipher_walk_init(&walk, dst, src, nbytes); in encrypt() 191 if (likely(walk.nbytes == nbytes)) in encrypt() 194 walk.src.virt.addr, nbytes); in encrypt() 198 while (walk.nbytes >= 64) { in encrypt() 201 walk.nbytes - (walk.nbytes % 64)); in encrypt() 202 err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64); in encrypt() 205 if (walk.nbytes) { in encrypt() 207 walk.src.virt.addr, walk.nbytes); in encrypt()
|
D | ahash.c | 47 unsigned int nbytes = min(walk->entrylen, in hash_walk_next() local 59 if (nbytes > unaligned) in hash_walk_next() 60 nbytes = unaligned; in hash_walk_next() 63 walk->entrylen -= nbytes; in hash_walk_next() 64 return nbytes; in hash_walk_next() 87 unsigned int nbytes = walk->entrylen; in crypto_hash_walk_done() local 91 if (nbytes && walk->offset & alignmask && !err) { in crypto_hash_walk_done() 95 nbytes = min(nbytes, in crypto_hash_walk_done() 97 walk->entrylen -= nbytes; in crypto_hash_walk_done() 99 return nbytes; in crypto_hash_walk_done() [all …]
|
D | crypto_null.c | 75 struct scatterlist *src, unsigned int nbytes) in skcipher_null_crypt() argument 80 blkcipher_walk_init(&walk, dst, src, nbytes); in skcipher_null_crypt() 83 while (walk.nbytes) { in skcipher_null_crypt() 86 walk.nbytes); in skcipher_null_crypt()
|
D | blkcipher.c | 106 unsigned int nbytes = 0; in blkcipher_walk_done() local 109 unsigned int n = walk->nbytes - err; in blkcipher_walk_done() 119 nbytes = walk->total - n; in blkcipher_walk_done() 123 scatterwalk_done(&walk->in, 0, nbytes); in blkcipher_walk_done() 124 scatterwalk_done(&walk->out, 1, nbytes); in blkcipher_walk_done() 127 walk->total = nbytes; in blkcipher_walk_done() 128 walk->nbytes = nbytes; in blkcipher_walk_done() 130 if (nbytes) { in blkcipher_walk_done() 176 walk->nbytes = bsize; in blkcipher_next_slow() 187 memcpy(tmp, walk->src.virt.addr, walk->nbytes); in blkcipher_next_copy() [all …]
|
D | arc4.c | 96 struct scatterlist *src, unsigned int nbytes) in ecb_arc4_crypt() argument 102 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_arc4_crypt() 106 while (walk.nbytes > 0) { in ecb_arc4_crypt() 110 arc4_crypt(ctx, wdst, wsrc, walk.nbytes); in ecb_arc4_crypt()
|
D | ablkcipher.c | 112 unsigned int nbytes = 0; in ablkcipher_walk_done() local 115 unsigned int n = walk->nbytes - err; in ablkcipher_walk_done() 125 nbytes = walk->total - n; in ablkcipher_walk_done() 129 scatterwalk_done(&walk->in, 0, nbytes); in ablkcipher_walk_done() 130 scatterwalk_done(&walk->out, 1, nbytes); in ablkcipher_walk_done() 133 walk->total = nbytes; in ablkcipher_walk_done() 134 walk->nbytes = nbytes; in ablkcipher_walk_done() 136 if (nbytes) { in ablkcipher_walk_done() 180 walk->nbytes = bsize; in ablkcipher_next_slow() 255 walk->nbytes = n; in ablkcipher_walk_next() [all …]
|
D | ansi_cprng.c | 188 static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx, in get_prng_bytes() argument 192 unsigned int byte_count = (unsigned int)nbytes; in get_prng_bytes() 208 if (nbytes < DEFAULT_BLK_SZ) in get_prng_bytes() 226 memset(buf, 0, nbytes); in get_prng_bytes() 253 memset(buf, 0, nbytes); in get_prng_bytes()
|
D | hmac.c | 115 const u8 *data, unsigned int nbytes) in hmac_update() argument 121 return crypto_shash_update(desc, data, nbytes); in hmac_update() 140 unsigned int nbytes, u8 *out) in hmac_finup() argument 151 return crypto_shash_finup(desc, data, nbytes, out) ?: in hmac_finup()
|
D | ablk_helper.c | 65 &desc, req->dst, req->src, req->nbytes); in __ablk_encrypt() 109 &desc, req->dst, req->src, req->nbytes); in ablk_decrypt()
|
/linux-4.1.27/arch/s390/crypto/ |
D | des_s390.c | 89 unsigned int nbytes; in ecb_desall_crypt() local 91 while ((nbytes = walk->nbytes)) { in ecb_desall_crypt() 93 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); in ecb_desall_crypt() 101 nbytes &= DES_BLOCK_SIZE - 1; in ecb_desall_crypt() 102 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_desall_crypt() 113 unsigned int nbytes = walk->nbytes; in cbc_desall_crypt() local 119 if (!nbytes) in cbc_desall_crypt() 126 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); in cbc_desall_crypt() 134 nbytes &= DES_BLOCK_SIZE - 1; in cbc_desall_crypt() 135 ret = blkcipher_walk_done(desc, walk, nbytes); in cbc_desall_crypt() [all …]
|
D | aes_s390.c | 253 unsigned int nbytes) in fallback_blk_dec() argument 262 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); in fallback_blk_dec() 270 unsigned int nbytes) in fallback_blk_enc() argument 279 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); in fallback_blk_enc() 319 unsigned int nbytes; in ecb_aes_crypt() local 321 while ((nbytes = walk->nbytes)) { in ecb_aes_crypt() 323 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_aes_crypt() 331 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_crypt() 332 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_aes_crypt() 340 unsigned int nbytes) in ecb_aes_encrypt() argument [all …]
|
D | prng.c | 113 static int generate_entropy(u8 *ebuf, size_t nbytes) in generate_entropy() argument 124 while (nbytes) { in generate_entropy() 132 n = (nbytes < sizeof(hash)) ? nbytes : sizeof(hash); in generate_entropy() 148 nbytes -= n; in generate_entropy() 175 static void prng_tdes_seed(int nbytes) in prng_tdes_seed() argument 180 BUG_ON(nbytes > sizeof(buf)); in prng_tdes_seed() 182 get_random_bytes(buf, nbytes); in prng_tdes_seed() 185 while (nbytes >= 8) { in prng_tdes_seed() 189 nbytes -= 8; in prng_tdes_seed() 461 static int prng_sha512_generate(u8 *buf, size_t nbytes) in prng_sha512_generate() argument [all …]
|
/linux-4.1.27/arch/sparc/crypto/ |
D | des_glue.c | 95 unsigned int nbytes, bool encrypt) in __ecb_crypt() argument 101 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt() 109 while ((nbytes = walk.nbytes)) { in __ecb_crypt() 110 unsigned int block_len = nbytes & DES_BLOCK_MASK; in __ecb_crypt() 117 nbytes &= DES_BLOCK_SIZE - 1; in __ecb_crypt() 118 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt() 126 unsigned int nbytes) in ecb_encrypt() argument 128 return __ecb_crypt(desc, dst, src, nbytes, true); in ecb_encrypt() 133 unsigned int nbytes) in ecb_decrypt() argument 135 return __ecb_crypt(desc, dst, src, nbytes, false); in ecb_decrypt() [all …]
|
D | camellia_glue.c | 87 unsigned int nbytes, bool encrypt) in __ecb_crypt() argument 99 blkcipher_walk_init(&walk, dst, src, nbytes); in __ecb_crypt() 108 while ((nbytes = walk.nbytes)) { in __ecb_crypt() 109 unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK; in __ecb_crypt() 119 nbytes &= CAMELLIA_BLOCK_SIZE - 1; in __ecb_crypt() 120 err = blkcipher_walk_done(desc, &walk, nbytes); in __ecb_crypt() 128 unsigned int nbytes) in ecb_encrypt() argument 130 return __ecb_crypt(desc, dst, src, nbytes, true); in ecb_encrypt() 135 unsigned int nbytes) in ecb_decrypt() argument 137 return __ecb_crypt(desc, dst, src, nbytes, false); in ecb_decrypt() [all …]
|
D | aes_glue.c | 217 unsigned int nbytes) in ecb_encrypt() argument 223 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt() 228 while ((nbytes = walk.nbytes)) { in ecb_encrypt() 229 unsigned int block_len = nbytes & AES_BLOCK_MASK; in ecb_encrypt() 237 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 238 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt() 246 unsigned int nbytes) in ecb_decrypt() argument 253 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt() 259 while ((nbytes = walk.nbytes)) { in ecb_decrypt() 260 unsigned int block_len = nbytes & AES_BLOCK_MASK; in ecb_decrypt() [all …]
|
/linux-4.1.27/drivers/staging/comedi/ |
D | comedi_buf.c | 258 unsigned int nbytes) in comedi_buf_write_alloc() argument 263 if (nbytes > available) in comedi_buf_write_alloc() 264 nbytes = available; in comedi_buf_write_alloc() 266 async->buf_write_alloc_count += nbytes; in comedi_buf_write_alloc() 274 return nbytes; in comedi_buf_write_alloc() 334 unsigned int nbytes) in comedi_buf_write_free() argument 339 if (nbytes > allocated) in comedi_buf_write_free() 340 nbytes = allocated; in comedi_buf_write_free() 342 async->buf_write_count += nbytes; in comedi_buf_write_free() 343 async->buf_write_ptr += nbytes; in comedi_buf_write_free() [all …]
|
/linux-4.1.27/drivers/crypto/vmx/ |
D | aes_cbc.c | 95 unsigned int nbytes) in p8_aes_cbc_encrypt() argument 108 ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes); in p8_aes_cbc_encrypt() 114 blkcipher_walk_init(&walk, dst, src, nbytes); in p8_aes_cbc_encrypt() 116 while ((nbytes = walk.nbytes)) { in p8_aes_cbc_encrypt() 118 nbytes & AES_BLOCK_MASK, &ctx->enc_key, walk.iv, 1); in p8_aes_cbc_encrypt() 119 nbytes &= AES_BLOCK_SIZE - 1; in p8_aes_cbc_encrypt() 120 ret = blkcipher_walk_done(desc, &walk, nbytes); in p8_aes_cbc_encrypt() 131 unsigned int nbytes) in p8_aes_cbc_decrypt() argument 144 ret = crypto_blkcipher_decrypt(&fallback_desc, dst, src, nbytes); in p8_aes_cbc_decrypt() 150 blkcipher_walk_init(&walk, dst, src, nbytes); in p8_aes_cbc_decrypt() [all …]
|
D | aes_ctr.c | 97 unsigned int nbytes = walk->nbytes; in p8_aes_ctr_final() local 105 crypto_xor(keystream, src, nbytes); in p8_aes_ctr_final() 106 memcpy(dst, keystream, nbytes); in p8_aes_ctr_final() 112 unsigned int nbytes) in p8_aes_ctr_crypt() argument 125 ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes); in p8_aes_ctr_crypt() 127 blkcipher_walk_init(&walk, dst, src, nbytes); in p8_aes_ctr_crypt() 129 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 134 (nbytes & AES_BLOCK_MASK)/AES_BLOCK_SIZE, &ctx->enc_key, walk.iv); in p8_aes_ctr_crypt() 138 nbytes &= AES_BLOCK_SIZE - 1; in p8_aes_ctr_crypt() 139 ret = blkcipher_walk_done(desc, &walk, nbytes); in p8_aes_ctr_crypt() [all …]
|
/linux-4.1.27/drivers/crypto/ |
D | geode-aes.c | 184 unsigned int nbytes) in fallback_blk_dec() argument 193 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes); in fallback_blk_dec() 200 unsigned int nbytes) in fallback_blk_enc() argument 209 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes); in fallback_blk_enc() 306 unsigned int nbytes) in geode_cbc_decrypt() argument 313 return fallback_blk_dec(desc, dst, src, nbytes); in geode_cbc_decrypt() 315 blkcipher_walk_init(&walk, dst, src, nbytes); in geode_cbc_decrypt() 319 while ((nbytes = walk.nbytes)) { in geode_cbc_decrypt() 323 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_cbc_decrypt() 328 nbytes -= ret; in geode_cbc_decrypt() [all …]
|
D | padlock-aes.c | 344 unsigned int nbytes) in ecb_aes_encrypt() argument 353 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_encrypt() 357 while ((nbytes = walk.nbytes)) { in ecb_aes_encrypt() 360 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt() 361 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt() 362 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_aes_encrypt() 373 unsigned int nbytes) in ecb_aes_decrypt() argument 382 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_decrypt() 386 while ((nbytes = walk.nbytes)) { in ecb_aes_decrypt() 389 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt() [all …]
|
D | hifn_795x.c | 1167 void *priv, unsigned int nbytes) in hifn_setup_cmd_desc() argument 1192 buf_pos += hifn_setup_base_command(dev, buf_pos, nbytes, in hifn_setup_cmd_desc() 1193 nbytes, mask, dev->snum); in hifn_setup_cmd_desc() 1254 nbytes, nbytes, ctx->key, ctx->keysize, in hifn_setup_cmd_desc() 1373 unsigned int nbytes, void *priv) in hifn_setup_dma() argument 1380 n = nbytes; in hifn_setup_dma() 1393 n = nbytes; in hifn_setup_dma() 1415 hifn_setup_cmd_desc(dev, ctx, rctx, priv, nbytes); in hifn_setup_dma() 1463 unsigned int copy, drest = *drestp, nbytes = *nbytesp; in ablkcipher_add() local 1466 if (drest < size || size > nbytes) in ablkcipher_add() [all …]
|
D | img-hash.c | 257 ctx->buffer, hdev->req->nbytes); in img_hash_write_via_cpu() 259 ctx->total = hdev->req->nbytes; in img_hash_write_via_cpu() 361 size_t nbytes, bleft, wsend, len, tbc; in img_hash_dma_task() local 368 nbytes = ctx->sg->length - ctx->offset; in img_hash_dma_task() 380 bleft = nbytes % 4; in img_hash_dma_task() 381 wsend = (nbytes / 4); in img_hash_dma_task() 446 if (req->nbytes >= IMG_HASH_DMA_THRESHOLD) { in img_hash_process_data() 448 req->nbytes); in img_hash_process_data() 452 req->nbytes); in img_hash_process_data() 467 nbits = (u64)hdev->req->nbytes << 3; in img_hash_hw_init() [all …]
|
D | sahara.c | 588 req->nbytes, req->src, req->dst); in sahara_aes_process() 591 dev->total = req->nbytes; in sahara_aes_process() 671 req->nbytes, !!(mode & FLAGS_ENCRYPT), !!(mode & FLAGS_CBC)); in sahara_aes_crypt() 673 if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) { in sahara_aes_crypt() 938 static int sahara_walk_and_recalc(struct scatterlist *sg, unsigned int nbytes) in sahara_walk_and_recalc() argument 941 return nbytes; in sahara_walk_and_recalc() 943 while (nbytes && sg) { in sahara_walk_and_recalc() 944 if (nbytes <= sg->length) { in sahara_walk_and_recalc() 945 sg->length = nbytes; in sahara_walk_and_recalc() 949 nbytes -= sg->length; in sahara_walk_and_recalc() [all …]
|
D | n2_core.c | 323 rctx->fallback_req.nbytes = req->nbytes; in n2_hash_async_update() 350 rctx->fallback_req.nbytes = req->nbytes; in n2_hash_async_finup() 517 int nbytes, cpu; in n2_do_async_digest() local 522 if (unlikely(req->nbytes > (1 << 16))) { in n2_do_async_digest() 529 rctx->fallback_req.nbytes = req->nbytes; in n2_do_async_digest() 536 nbytes = crypto_hash_walk_first(req, &walk); in n2_do_async_digest() 550 ent->control = control_word_base(nbytes, auth_key_len, 0, in n2_do_async_digest() 563 nbytes = crypto_hash_walk_done(&walk, 0); in n2_do_async_digest() 564 while (nbytes > 0) { in n2_do_async_digest() 567 ent->control = (nbytes - 1); in n2_do_async_digest() [all …]
|
D | bfin_crc.c | 338 ctx->flag, req->nbytes); in bfin_crypto_crc_handle_queue() 351 if (ctx->bufnext_len + req->nbytes < CHKSUM_DIGEST_SIZE) { in bfin_crypto_crc_handle_queue() 353 sg_virt(req->src), req->nbytes); in bfin_crypto_crc_handle_queue() 354 ctx->bufnext_len += req->nbytes; in bfin_crypto_crc_handle_queue() 381 ctx->sg_buflen = ctx->buflast_len + req->nbytes; in bfin_crypto_crc_handle_queue() 425 if (!req->nbytes) in bfin_crypto_crc_update() 429 ctx->total += req->nbytes; in bfin_crypto_crc_update() 455 ctx->total += req->nbytes; in bfin_crypto_crc_finup()
|
D | picoxcell_crypto.c | 264 static int sg_count(struct scatterlist *sg_list, int nbytes) in sg_count() argument 269 while (nbytes > 0) { in sg_count() 271 nbytes -= sg->length; in sg_count() 291 unsigned nbytes, in spacc_sg_to_ddt() argument 300 nents = sg_count(payload, nbytes); in spacc_sg_to_ddt() 433 unsigned nbytes, enum dma_data_direction dir) in spacc_free_ddt() argument 435 unsigned nents = sg_count(payload, nbytes); in spacc_free_ddt() 932 ablk_req->nbytes, DMA_TO_DEVICE); in spacc_ablk_complete() 934 ablk_req->nbytes, DMA_FROM_DEVICE); in spacc_ablk_complete() 937 ablk_req->nbytes, DMA_BIDIRECTIONAL); in spacc_ablk_complete() [all …]
|
D | ixp4xx_crypto.c | 783 struct scatterlist *sg, unsigned nbytes, in chainup_buffers() argument 787 for (; nbytes > 0; sg = sg_next(sg)) { in chainup_buffers() 788 unsigned len = min(nbytes, sg->length); in chainup_buffers() 793 nbytes -= len; in chainup_buffers() 873 unsigned int nbytes = req->nbytes; in ablk_perform() local 898 crypt->crypt_len = nbytes; in ablk_perform() 908 if (!chainup_buffers(dev, req->dst, nbytes, &dst_hook, in ablk_perform() 918 if (!chainup_buffers(dev, req->src, nbytes, &src_hook, in ablk_perform() 972 unsigned int nbytes) in hmac_inconsistent() argument 976 if (!nbytes) in hmac_inconsistent() [all …]
|
D | mv_cesa.c | 532 p->hw_nbytes = req->nbytes; in mv_start_new_crypt_req() 537 num_sgs = count_sgs(req->src, req->nbytes); in mv_start_new_crypt_req() 540 num_sgs = count_sgs(req->dst, req->nbytes); in mv_start_new_crypt_req() 553 hw_bytes = req->nbytes + ctx->extra_bytes; in mv_start_new_hash_req() 563 num_sgs = count_sgs(req->src, req->nbytes); in mv_start_new_hash_req() 732 if (!req->nbytes) in mv_hash_update() 735 mv_update_hash_req_ctx(ahash_request_ctx(req), 0, req->nbytes); in mv_hash_update() 750 mv_update_hash_req_ctx(ahash_request_ctx(req), 1, req->nbytes); in mv_hash_finup() 758 req->nbytes, tfm_ctx->count_add); in mv_hash_digest()
|
D | talitos.c | 1093 static int sg_count(struct scatterlist *sg_list, int nbytes, bool *chained) in sg_count() argument 1099 while (nbytes > 0) { in sg_count() 1101 nbytes -= sg->length; in sg_count() 1372 unsigned int cryptlen = areq->nbytes; in common_nonsnoop() 1469 areq->info, 0, areq->nbytes, 0, ivsize, 0, in ablkcipher_edesc_alloc() 1642 unsigned int nbytes) in ahash_edesc_alloc() argument 1649 nbytes, 0, 0, 0, areq->base.flags, false); in ahash_edesc_alloc() 1696 static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes) in ahash_process_req() argument 1709 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) { in ahash_process_req() 1712 sg_count(areq->src, nbytes, &chained), in ahash_process_req() [all …]
|
D | omap-des.c | 387 unsigned int start, unsigned int nbytes, int out) in sg_copy_buf() argument 391 if (!nbytes) in sg_copy_buf() 396 scatterwalk_copychunks(buf, &walk, nbytes, out); in sg_copy_buf() 621 dd->total = req->nbytes; in omap_des_handle_queue() 622 dd->total_save = req->nbytes; in omap_des_handle_queue() 707 pr_debug("nbytes: %d, enc: %d, cbc: %d\n", req->nbytes, in omap_des_crypt() 711 if (!IS_ALIGNED(req->nbytes, DES_BLOCK_SIZE)) { in omap_des_crypt()
|
/linux-4.1.27/arch/x86/lib/ |
D | insn.c | 116 prefixes->nbytes++; in insn_get_prefixes() 140 insn->rex_prefix.nbytes = 1; in insn_get_prefixes() 168 insn->vex_prefix.nbytes = 3; in insn_get_prefixes() 180 insn->vex_prefix.nbytes = 2; in insn_get_prefixes() 216 opcode->nbytes = 1; in insn_get_opcode() 233 opcode->bytes[opcode->nbytes++] = op; in insn_get_opcode() 266 modrm->nbytes = 1; in insn_get_modrm() 304 return (modrm->nbytes && (modrm->value & 0xc7) == 0x5); in insn_rip_relative() 322 if (insn->modrm.nbytes) { in insn_get_sib() 327 insn->sib.nbytes = 1; in insn_get_sib() [all …]
|
/linux-4.1.27/arch/arm/crypto/ |
D | aesbs-glue.c | 106 struct scatterlist *src, unsigned int nbytes) in aesbs_cbc_encrypt() argument 112 blkcipher_walk_init(&walk, dst, src, nbytes); in aesbs_cbc_encrypt() 115 while (walk.nbytes) { in aesbs_cbc_encrypt() 116 u32 blocks = walk.nbytes / AES_BLOCK_SIZE; in aesbs_cbc_encrypt() 140 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in aesbs_cbc_encrypt() 147 struct scatterlist *src, unsigned int nbytes) in aesbs_cbc_decrypt() argument 153 blkcipher_walk_init(&walk, dst, src, nbytes); in aesbs_cbc_decrypt() 156 while ((walk.nbytes / AES_BLOCK_SIZE) >= 8) { in aesbs_cbc_decrypt() 159 walk.nbytes, &ctx->dec, walk.iv); in aesbs_cbc_decrypt() 161 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in aesbs_cbc_decrypt() [all …]
|
D | aes-ce-glue.c | 167 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 175 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt() 179 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 183 walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 190 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 198 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt() 202 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 206 walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 213 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 221 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt() [all …]
|
/linux-4.1.27/lib/mpi/ |
D | mpicoder.c | 32 MPI mpi_read_raw_data(const void *xbuffer, size_t nbytes) in mpi_read_raw_data() argument 40 while (nbytes > 0 && buffer[0] == 0) { in mpi_read_raw_data() 42 nbytes--; in mpi_read_raw_data() 45 nbits = nbytes * 8; in mpi_read_raw_data() 50 if (nbytes > 0) in mpi_read_raw_data() 55 nlimbs = DIV_ROUND_UP(nbytes, BYTES_PER_MPI_LIMB); in mpi_read_raw_data() 63 if (nbytes > 0) { in mpi_read_raw_data() 64 i = BYTES_PER_MPI_LIMB - nbytes % BYTES_PER_MPI_LIMB; in mpi_read_raw_data() 84 unsigned nbits, nbytes, nlimbs, nread = 0; in mpi_read_from_buffer() local 99 nbytes = DIV_ROUND_UP(nbits, 8); in mpi_read_from_buffer() [all …]
|
/linux-4.1.27/arch/arm64/crypto/ |
D | aes-glue.c | 100 struct scatterlist *src, unsigned int nbytes) in ecb_encrypt() argument 108 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt() 112 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_encrypt() 115 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 122 struct scatterlist *src, unsigned int nbytes) in ecb_decrypt() argument 130 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt() 134 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_decrypt() 137 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 144 struct scatterlist *src, unsigned int nbytes) in cbc_encrypt() argument 152 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt() [all …]
|
D | aes-ce-ccm-glue.c | 175 while (walk.nbytes) { in ccm_encrypt() 176 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt() 178 if (walk.nbytes == len) in ccm_encrypt() 182 walk.nbytes - tail, ctx->key_enc, in ccm_encrypt() 185 len -= walk.nbytes - tail; in ccm_encrypt() 231 while (walk.nbytes) { in ccm_decrypt() 232 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_decrypt() 234 if (walk.nbytes == len) in ccm_decrypt() 238 walk.nbytes - tail, ctx->key_enc, in ccm_decrypt() 241 len -= walk.nbytes - tail; in ccm_decrypt()
|
/linux-4.1.27/drivers/pci/hotplug/ |
D | rpadlpar_sysfs.c | 33 const char *buf, size_t nbytes) in add_slot_store() argument 39 if (nbytes >= MAX_DRC_NAME_LEN) in add_slot_store() 42 memcpy(drc_name, buf, nbytes); in add_slot_store() 46 end = &drc_name[nbytes]; in add_slot_store() 53 return nbytes; in add_slot_store() 64 const char *buf, size_t nbytes) in remove_slot_store() argument 70 if (nbytes >= MAX_DRC_NAME_LEN) in remove_slot_store() 73 memcpy(drc_name, buf, nbytes); in remove_slot_store() 77 end = &drc_name[nbytes]; in remove_slot_store() 84 return nbytes; in remove_slot_store()
|
D | cpqphp_sysfs.c | 175 size_t nbytes, loff_t *ppos) in read() argument 178 return simple_read_from_buffer(buf, nbytes, ppos, dbg->data, dbg->size); in read()
|
/linux-4.1.27/include/trace/events/ |
D | random.h | 196 TP_PROTO(int nbytes, unsigned long IP), 198 TP_ARGS(nbytes, IP), 201 __field( int, nbytes ) 206 __entry->nbytes = nbytes; 210 TP_printk("nbytes %d caller %pS", __entry->nbytes, (void *)__entry->IP) 214 TP_PROTO(int nbytes, unsigned long IP), 216 TP_ARGS(nbytes, IP) 220 TP_PROTO(int nbytes, unsigned long IP), 222 TP_ARGS(nbytes, IP) 226 TP_PROTO(const char *pool_name, int nbytes, int entropy_count, [all …]
|
/linux-4.1.27/drivers/char/ |
D | random.c | 490 int nbytes) in _mix_pool_bytes() argument 508 while (nbytes--) { in _mix_pool_bytes() 537 int nbytes) in __mix_pool_bytes() argument 539 trace_mix_pool_bytes_nolock(r->name, nbytes, _RET_IP_); in __mix_pool_bytes() 540 _mix_pool_bytes(r, in, nbytes); in __mix_pool_bytes() 544 int nbytes) in mix_pool_bytes() argument 548 trace_mix_pool_bytes(r->name, nbytes, _RET_IP_); in mix_pool_bytes() 550 _mix_pool_bytes(r, in, nbytes); in mix_pool_bytes() 948 size_t nbytes, int min, int rsvd); 955 static void _xfer_secondary_pool(struct entropy_store *r, size_t nbytes); [all …]
|
/linux-4.1.27/fs/coda/ |
D | psdev.c | 98 size_t nbytes, loff_t *off) in coda_psdev_write() argument 116 if ( nbytes < sizeof(struct coda_out_hdr) ) { in coda_psdev_write() 119 count = nbytes; in coda_psdev_write() 122 if ( nbytes > size ) { in coda_psdev_write() 125 nbytes = size; in coda_psdev_write() 127 CODA_ALLOC(dcbuf, union outputArgs *, nbytes); in coda_psdev_write() 128 if (copy_from_user(dcbuf, buf, nbytes)) { in coda_psdev_write() 129 CODA_FREE(dcbuf, nbytes); in coda_psdev_write() 137 CODA_FREE(dcbuf, nbytes); in coda_psdev_write() 144 count = nbytes; in coda_psdev_write() [all …]
|
/linux-4.1.27/drivers/macintosh/ |
D | via-pmu68k.c | 286 for (i = 0; i < req->nbytes - 1; ++i) in pmu_send_request() 288 --req->nbytes; in pmu_send_request() 299 if (req->nbytes != 2) in pmu_send_request() 302 req->nbytes = 1; in pmu_send_request() 310 if (req->nbytes != 6) in pmu_send_request() 313 req->nbytes = 5; in pmu_send_request() 323 if (req->nbytes != 4) in pmu_send_request() 328 req->nbytes = 3; in pmu_send_request() 336 if (req->nbytes != 5) in pmu_send_request() 342 req->nbytes = 4; in pmu_send_request() [all …]
|
D | adb-iop.c | 177 printk("adb_iop_start %p: sending packet, %d bytes:", req, req->nbytes); in adb_iop_start() 178 for (i = 0 ; i < req->nbytes ; i++) in adb_iop_start() 187 amsg.count = req->nbytes - 2; in adb_iop_start() 191 memcpy(&amsg.cmd, req->data + 1, req->nbytes - 1); in adb_iop_start() 234 if ((req->nbytes < 2) || (req->data[0] != ADB_PACKET)) { in adb_iop_write() 275 .nbytes = 2, in adb_iop_reset_bus()
|
D | via-maciisi.c | 245 for (i = 0; i < req->nbytes; i++) { in maciisi_send_request() 297 int nbytes, ...) in maciisi_request() argument 302 req->nbytes = nbytes; in maciisi_request() 305 va_start(list, nbytes); in maciisi_request() 306 for (i = 0; i < nbytes; i++) in maciisi_request() 322 if (req->nbytes < 2 || req->data[0] > CUDA_PACKET) { in maciisi_write() 512 if (data_index >= req->nbytes) { in maciisi_interrupt()
|
D | via-cuda.c | 104 void (*done)(struct adb_request *), int nbytes, ...); 363 int nbytes, ...) in cuda_request() argument 373 req->nbytes = nbytes; in cuda_request() 375 va_start(list, nbytes); in cuda_request() 376 for (i = 0; i < nbytes; ++i) in cuda_request() 389 if (req->nbytes < 2 || req->data[0] > CUDA_PACKET) { in cuda_write() 521 if (data_index >= req->nbytes) { in cuda_interrupt()
|
D | via-pmu.c | 975 for (i = 0; i < req->nbytes - 1; ++i) in pmu_send_request() 977 --req->nbytes; in pmu_send_request() 988 if (req->nbytes != 2) in pmu_send_request() 991 req->nbytes = 1; in pmu_send_request() 999 if (req->nbytes != 6) in pmu_send_request() 1002 req->nbytes = 5; in pmu_send_request() 1016 for (i = req->nbytes - 1; i > 1; --i) in pmu_send_request() 1018 req->data[3] = req->nbytes - 2; in pmu_send_request() 1022 req->nbytes += 2; in pmu_send_request() 1079 req.nbytes = 4; in pmu_adb_reset_bus() [all …]
|
D | adb.c | 425 int flags, int nbytes, ...) in adb_request() argument 434 if (nbytes < 1) in adb_request() 437 req->nbytes = nbytes+1; in adb_request() 441 va_start(list, nbytes); in adb_request() 442 for (i = 0; i < nbytes; ++i) in adb_request() 653 if (req->nbytes < 3) in do_adb_query() 796 req->nbytes = count; in adb_write()
|
D | macio-adb.c | 174 for (i = 0; i < req->nbytes - 1; ++i) in macio_send_request() 176 --req->nbytes; in macio_send_request() 216 for (i = 0; i < req->nbytes; ++i) in macio_adb_interrupt() 218 out_8(&adb->dcount.r, req->nbytes & HMB); in macio_adb_interrupt()
|
D | via-macii.c | 258 if (req->nbytes < 2 || req->data[0] != ADB_PACKET || req->nbytes > 15) { in macii_write() 435 if (data_index >= req->nbytes) { in macii_interrupt()
|
/linux-4.1.27/arch/x86/include/asm/ |
D | insn.h | 33 unsigned char nbytes; member 145 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ in insn_vex_m_bits() 153 if (insn->vex_prefix.nbytes == 2) /* 2 bytes VEX */ in insn_vex_p_bits() 174 return insn->prefixes.nbytes; in insn_offset_rex_prefix() 178 return insn_offset_rex_prefix(insn) + insn->rex_prefix.nbytes; in insn_offset_vex_prefix() 182 return insn_offset_vex_prefix(insn) + insn->vex_prefix.nbytes; in insn_offset_opcode() 186 return insn_offset_opcode(insn) + insn->opcode.nbytes; in insn_offset_modrm() 190 return insn_offset_modrm(insn) + insn->modrm.nbytes; in insn_offset_sib() 194 return insn_offset_sib(insn) + insn->sib.nbytes; in insn_offset_displacement() 198 return insn_offset_displacement(insn) + insn->displacement.nbytes; in insn_offset_immediate()
|
/linux-4.1.27/drivers/pnp/isapnp/ |
D | proc.c | 36 size_t nbytes, loff_t * ppos) in isapnp_proc_bus_read() argument 44 if (nbytes >= size) in isapnp_proc_bus_read() 45 nbytes = size; in isapnp_proc_bus_read() 46 if (pos + nbytes > size) in isapnp_proc_bus_read() 47 nbytes = size - pos; in isapnp_proc_bus_read() 48 cnt = nbytes; in isapnp_proc_bus_read() 62 return nbytes; in isapnp_proc_bus_read()
|
/linux-4.1.27/include/crypto/ |
D | scatterwalk.h | 66 unsigned int nbytes) in scatterwalk_clamp() argument 69 return nbytes > len_this_page ? len_this_page : nbytes; in scatterwalk_clamp() 73 unsigned int nbytes) in scatterwalk_advance() argument 75 walk->offset += nbytes; in scatterwalk_advance() 96 size_t nbytes, int out); 101 unsigned int start, unsigned int nbytes, int out);
|
D | xts.h | 18 void (*crypt_fn)(void *ctx, u8 *blks, unsigned int nbytes); 24 struct scatterlist *src, unsigned int nbytes,
|
D | lrw.h | 36 void (*crypt_fn)(void *ctx, u8 *blks, unsigned int nbytes); 40 struct scatterlist *src, unsigned int nbytes,
|
D | algapi.h | 95 unsigned int nbytes; member 118 unsigned int nbytes; member 312 unsigned int nbytes) in blkcipher_walk_init() argument 316 walk->total = nbytes; in blkcipher_walk_init() 322 unsigned int nbytes) in ablkcipher_walk_init() argument 326 walk->total = nbytes; in ablkcipher_walk_init()
|
D | skcipher.h | 97 unsigned int nbytes, void *iv) in skcipher_givcrypt_set_crypt() argument 99 ablkcipher_request_set_crypt(&req->creq, src, dst, nbytes, iv); in skcipher_givcrypt_set_crypt()
|
D | aead.h | 86 unsigned int nbytes, void *iv) in aead_givcrypt_set_crypt() argument 88 aead_request_set_crypt(&req->areq, src, dst, nbytes, iv); in aead_givcrypt_set_crypt()
|
D | hash.h | 56 unsigned int nbytes; member 590 unsigned int nbytes) in ahash_request_set_crypt() argument 593 req->nbytes = nbytes; in ahash_request_set_crypt()
|
/linux-4.1.27/drivers/zorro/ |
D | proc.c | 30 proc_bus_zorro_read(struct file *file, char __user *buf, size_t nbytes, loff_t *ppos) in proc_bus_zorro_read() argument 38 if (nbytes >= sizeof(struct ConfigDev)) in proc_bus_zorro_read() 39 nbytes = sizeof(struct ConfigDev); in proc_bus_zorro_read() 40 if (pos + nbytes > sizeof(struct ConfigDev)) in proc_bus_zorro_read() 41 nbytes = sizeof(struct ConfigDev) - pos; in proc_bus_zorro_read() 51 if (copy_to_user(buf, (void *)&cd + pos, nbytes)) in proc_bus_zorro_read() 53 *ppos += nbytes; in proc_bus_zorro_read() 55 return nbytes; in proc_bus_zorro_read()
|
/linux-4.1.27/tools/usb/ |
D | ffs-test.c | 288 static ssize_t read_wrap(struct thread *t, void *buf, size_t nbytes); 289 static ssize_t write_wrap(struct thread *t, const void *buf, size_t nbytes); 290 static ssize_t ep0_consume(struct thread *t, const void *buf, size_t nbytes); 291 static ssize_t fill_in_buf(struct thread *t, void *buf, size_t nbytes); 292 static ssize_t empty_out_buf(struct thread *t, const void *buf, size_t nbytes); 435 static ssize_t read_wrap(struct thread *t, void *buf, size_t nbytes) in read_wrap() argument 437 return read(t->fd, buf, nbytes); in read_wrap() 440 static ssize_t write_wrap(struct thread *t, const void *buf, size_t nbytes) in write_wrap() argument 442 return write(t->fd, buf, nbytes); in write_wrap() 453 fill_in_buf(struct thread *ignore, void *buf, size_t nbytes) in fill_in_buf() argument [all …]
|
/linux-4.1.27/drivers/crypto/nx/ |
D | nx-aes-gcm.c | 132 unsigned int nbytes = req->assoclen; in nx_gca() local 136 if (nbytes <= AES_BLOCK_SIZE) { in nx_gca() 138 scatterwalk_copychunks(out, &walk, nbytes, SCATTERWALK_FROM_SG); in nx_gca() 156 to_process = min_t(u64, nbytes - processed, in nx_gca() 164 if ((to_process + processed) < nbytes) in nx_gca() 186 } while (processed < nbytes); in nx_gca() 199 unsigned int nbytes = req->assoclen; in gmac() local 222 to_process = min_t(u64, nbytes - processed, in gmac() 230 if ((to_process + processed) < nbytes) in gmac() 239 csbcpb->cpb.aes_gcm.bit_length_aad = 8 * nbytes; in gmac() [all …]
|
D | nx-aes-cbc.c | 68 unsigned int nbytes, in cbc_aes_nx_crypt() argument 85 to_process = nbytes - processed; in cbc_aes_nx_crypt() 108 } while (processed < nbytes); in cbc_aes_nx_crypt() 117 unsigned int nbytes) in cbc_aes_nx_encrypt() argument 119 return cbc_aes_nx_crypt(desc, dst, src, nbytes, 1); in cbc_aes_nx_encrypt() 125 unsigned int nbytes) in cbc_aes_nx_decrypt() argument 127 return cbc_aes_nx_crypt(desc, dst, src, nbytes, 0); in cbc_aes_nx_decrypt()
|
D | nx-aes-ecb.c | 68 unsigned int nbytes, in ecb_aes_nx_crypt() argument 85 to_process = nbytes - processed; in ecb_aes_nx_crypt() 107 } while (processed < nbytes); in ecb_aes_nx_crypt() 117 unsigned int nbytes) in ecb_aes_nx_encrypt() argument 119 return ecb_aes_nx_crypt(desc, dst, src, nbytes, 1); in ecb_aes_nx_encrypt() 125 unsigned int nbytes) in ecb_aes_nx_decrypt() argument 127 return ecb_aes_nx_crypt(desc, dst, src, nbytes, 0); in ecb_aes_nx_decrypt()
|
D | nx-aes-ccm.c | 176 unsigned int nbytes, in generate_pat() argument 226 rc = generate_b0(iv, req->assoclen, authsize, nbytes, b0); in generate_pat() 350 unsigned int nbytes = req->cryptlen; in ccm_nx_decrypt() local 359 nbytes -= authsize; in ccm_nx_decrypt() 363 req->src, nbytes, authsize, in ccm_nx_decrypt() 366 rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, in ccm_nx_decrypt() 376 to_process = nbytes - processed; in ccm_nx_decrypt() 378 if ((to_process + processed) < nbytes) in ccm_nx_decrypt() 413 } while (processed < nbytes); in ccm_nx_decrypt() 427 unsigned int nbytes = req->cryptlen; in ccm_nx_encrypt() local [all …]
|
D | nx.c | 221 unsigned int *nbytes) in trim_sg_list() argument 247 data_back = *nbytes - (data_back & ~(AES_BLOCK_SIZE - 1)); in trim_sg_list() 248 *nbytes -= data_back; in trim_sg_list() 275 unsigned int *nbytes, in nx_build_sg_lists() argument 280 unsigned int total = *nbytes; in nx_build_sg_lists() 293 *nbytes = min_t(u64, *nbytes, nx_ctx->ap->databytelen); in nx_build_sg_lists() 296 offset, nbytes); in nx_build_sg_lists() 298 offset, nbytes); in nx_build_sg_lists() 300 if (*nbytes < total) in nx_build_sg_lists() 301 delta = *nbytes - (*nbytes & ~(AES_BLOCK_SIZE - 1)); in nx_build_sg_lists() [all …]
|
D | nx-aes-ctr.c | 87 unsigned int nbytes) in ctr_aes_nx_crypt() argument 98 to_process = nbytes - processed; in ctr_aes_nx_crypt() 122 } while (processed < nbytes); in ctr_aes_nx_crypt() 131 unsigned int nbytes) in ctr3686_aes_nx_crypt() argument 144 return ctr_aes_nx_crypt(desc, dst, src, nbytes); in ctr3686_aes_nx_crypt()
|
/linux-4.1.27/arch/nios2/lib/ |
D | memcpy.c | 35 #define BYTE_COPY_FWD(dst_bp, src_bp, nbytes) \ argument 37 size_t __nbytes = (nbytes); \ 53 #define WORD_COPY_FWD(dst_bp, src_bp, nbytes_left, nbytes) \ argument 56 _wordcopy_fwd_aligned(dst_bp, src_bp, (nbytes) / OPSIZ);\ 58 _wordcopy_fwd_dest_aligned(dst_bp, src_bp, (nbytes) / OPSIZ);\ 59 src_bp += (nbytes) & -OPSIZ; \ 60 dst_bp += (nbytes) & -OPSIZ; \ 61 (nbytes_left) = (nbytes) % OPSIZ; \
|
/linux-4.1.27/arch/alpha/boot/ |
D | bootp.c | 148 static long nbytes; in start_kernel() local 179 nbytes = callback_getenv(ENV_BOOTED_OSFLAGS, envval, sizeof(envval)); in start_kernel() 180 if (nbytes < 0 || nbytes >= sizeof(envval)) { in start_kernel() 181 nbytes = 0; in start_kernel() 183 envval[nbytes] = '\0'; in start_kernel()
|
D | main.c | 155 int nbytes; in start_kernel() local 178 nbytes = callback_getenv(ENV_BOOTED_OSFLAGS, envval, sizeof(envval)); in start_kernel() 179 if (nbytes < 0) { in start_kernel() 180 nbytes = 0; in start_kernel() 182 envval[nbytes] = '\0'; in start_kernel()
|
D | bootpz.c | 288 static long nbytes; in start_kernel() local 312 nbytes = callback_getenv(ENV_BOOTED_OSFLAGS, envval, sizeof(envval)); in start_kernel() 313 if (nbytes < 0 || nbytes >= sizeof(envval)) { in start_kernel() 314 nbytes = 0; in start_kernel() 316 envval[nbytes] = '\0'; in start_kernel()
|
/linux-4.1.27/arch/x86/include/asm/crypto/ |
D | glue_helper.h | 48 bool fpu_enabled, unsigned int nbytes) in glue_fpu_begin() argument 60 if (nbytes < bsize * (unsigned int)fpu_blocks_limit) in glue_fpu_begin() 116 struct scatterlist *src, unsigned int nbytes); 122 unsigned int nbytes); 128 unsigned int nbytes); 133 struct scatterlist *src, unsigned int nbytes); 138 struct scatterlist *src, unsigned int nbytes,
|
/linux-4.1.27/drivers/pci/ |
D | proc.c | 27 size_t nbytes, loff_t *ppos) in proc_bus_pci_read() argument 48 if (nbytes >= size) in proc_bus_pci_read() 49 nbytes = size; in proc_bus_pci_read() 50 if (pos + nbytes > size) in proc_bus_pci_read() 51 nbytes = size - pos; in proc_bus_pci_read() 52 cnt = nbytes; in proc_bus_pci_read() 107 return nbytes; in proc_bus_pci_read() 111 size_t nbytes, loff_t *ppos) in proc_bus_pci_write() argument 121 if (nbytes >= size) in proc_bus_pci_write() 122 nbytes = size; in proc_bus_pci_write() [all …]
|
/linux-4.1.27/drivers/staging/rtl8192e/rtl8192e/ |
D | rtl_crypto.h | 146 unsigned int nbytes); 150 unsigned int nbytes, u8 *iv); 154 unsigned int nbytes); 158 unsigned int nbytes, u8 *iv); 317 unsigned int nbytes) in crypto_cipher_encrypt() argument 320 return tfm->crt_cipher.cit_encrypt(tfm, dst, src, nbytes); in crypto_cipher_encrypt() 326 unsigned int nbytes, u8 *iv) in crypto_cipher_encrypt_iv() argument 330 return tfm->crt_cipher.cit_encrypt_iv(tfm, dst, src, nbytes, iv); in crypto_cipher_encrypt_iv() 336 unsigned int nbytes) in crypto_cipher_decrypt() argument 339 return tfm->crt_cipher.cit_decrypt(tfm, dst, src, nbytes); in crypto_cipher_decrypt() [all …]
|
/linux-4.1.27/drivers/crypto/caam/ |
D | sg_sw_sec4.h | 59 static inline int __sg_count(struct scatterlist *sg_list, int nbytes, in __sg_count() argument 65 while (nbytes > 0) { in __sg_count() 67 nbytes -= sg->length; in __sg_count() 77 static inline int sg_count(struct scatterlist *sg_list, int nbytes, in sg_count() argument 80 int sg_nents = __sg_count(sg_list, nbytes, chained); in sg_count()
|
D | caamhash.c | 786 int in_len = *buflen + req->nbytes, to_hash; in ahash_update_ctx() 800 src_nents = __sg_count(req->src, req->nbytes - (*next_buflen), in ahash_update_ctx() 883 req->nbytes, 0); in ahash_update_ctx() 1003 src_nents = __sg_count(req->src, req->nbytes, &chained); in ahash_finup_ctx() 1046 buflen + req->nbytes, LDST_SGF); in ahash_finup_ctx() 1089 src_nents = sg_count(req->src, req->nbytes, &chained); in ahash_digest() 1125 append_seq_in_ptr(desc, src_dma, req->nbytes, options); in ahash_digest() 1227 int in_len = *buflen + req->nbytes, to_hash; in ahash_update_no_ctx() 1240 src_nents = __sg_count(req->src, req->nbytes - (*next_buflen), in ahash_update_no_ctx() 1313 req->nbytes, 0); in ahash_update_no_ctx() [all …]
|
/linux-4.1.27/drivers/crypto/amcc/ |
D | crypto4xx_core.c | 485 u32 *idx, u32 *offset, u32 *nbytes) in crypto4xx_fill_one_page() argument 496 *nbytes -= dev->scatter_buffer_size; in crypto4xx_fill_one_page() 512 *nbytes -= *length; in crypto4xx_fill_one_page() 515 *nbytes -= *length; in crypto4xx_fill_one_page() 521 len = (*nbytes <= dev->scatter_buffer_size) ? in crypto4xx_fill_one_page() 522 (*nbytes) : dev->scatter_buffer_size; in crypto4xx_fill_one_page() 528 *nbytes -= len; in crypto4xx_fill_one_page() 542 u32 nbytes, in crypto4xx_copy_pkt_to_dst() argument 557 while (nbytes) { in crypto4xx_copy_pkt_to_dst() 564 len = (nbytes <= sg->length) ? nbytes : sg->length; in crypto4xx_copy_pkt_to_dst() [all …]
|
D | crypto4xx_alg.c | 83 req->nbytes, req->info, in crypto4xx_encrypt() 97 req->nbytes, req->info, in crypto4xx_decrypt() 265 req->nbytes, NULL, 0); in crypto4xx_hash_update() 283 req->nbytes, NULL, 0); in crypto4xx_hash_digest()
|
/linux-4.1.27/drivers/crypto/qce/ |
D | sha.c | 70 req->nbytes = rctx->nbytes_orig; in qce_ahash_done() 95 rctx->src_nents = qce_countsg(req->src, req->nbytes, in qce_ahash_async_req_handle() 242 unsigned int nbytes; in qce_ahash_update() local 246 rctx->count += req->nbytes; in qce_ahash_update() 249 total = req->nbytes + rctx->buflen; in qce_ahash_update() 253 0, req->nbytes, 0); in qce_ahash_update() 254 rctx->buflen += req->nbytes; in qce_ahash_update() 260 rctx->nbytes_orig = req->nbytes; in qce_ahash_update() 272 unsigned int src_offset = req->nbytes - hash_later; in qce_ahash_update() 278 nbytes = total - hash_later; in qce_ahash_update() [all …]
|
D | dma.c | 90 int qce_countsg(struct scatterlist *sglist, int nbytes, bool *chained) in qce_countsg() argument 98 while (nbytes > 0 && sg) { in qce_countsg() 100 nbytes -= sg->length; in qce_countsg()
|
D | ablkcipher.c | 77 rctx->cryptlen = req->nbytes; in qce_ablkcipher_async_req_handle() 83 rctx->src_nents = qce_countsg(req->src, req->nbytes, in qce_ablkcipher_async_req_handle() 86 rctx->dst_nents = qce_countsg(req->dst, req->nbytes, in qce_ablkcipher_async_req_handle() 142 ret = qce_start(async_req, tmpl->crypto_alg_type, req->nbytes, 0); in qce_ablkcipher_async_req_handle()
|
D | dma.h | 52 int qce_countsg(struct scatterlist *sg_list, int nbytes, bool *chained);
|
/linux-4.1.27/tools/usb/usbip/src/ |
D | usbip_network.c | 110 ssize_t nbytes; in usbip_net_xmit() local 118 nbytes = send(sockfd, buff, bufflen, 0); in usbip_net_xmit() 120 nbytes = recv(sockfd, buff, bufflen, MSG_WAITALL); in usbip_net_xmit() 122 if (nbytes <= 0) in usbip_net_xmit() 125 buff = (void *)((intptr_t) buff + nbytes); in usbip_net_xmit() 126 bufflen -= nbytes; in usbip_net_xmit() 127 total += nbytes; in usbip_net_xmit()
|
/linux-4.1.27/include/linux/ |
D | random.h | 16 extern void get_random_bytes(void *buf, int nbytes); 17 extern void get_random_bytes_arch(void *buf, int nbytes); 29 void prandom_bytes(void *buf, size_t nbytes); 38 void prandom_bytes_state(struct rnd_state *state, void *buf, size_t nbytes);
|
D | crypto.h | 168 unsigned int nbytes; member 214 const u8 *src, unsigned int nbytes); 354 unsigned int nbytes); 357 unsigned int nbytes); 625 struct scatterlist *src, unsigned int nbytes); 627 struct scatterlist *src, unsigned int nbytes); 1188 unsigned int nbytes, void *iv) in ablkcipher_request_set_crypt() argument 1192 req->nbytes = nbytes; in ablkcipher_request_set_crypt() 1811 unsigned int nbytes) in crypto_blkcipher_encrypt() argument 1814 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); in crypto_blkcipher_encrypt() [all …]
|
D | mpi.h | 79 MPI mpi_read_raw_data(const void *xbuffer, size_t nbytes); 83 void *mpi_get_buffer(MPI a, unsigned *nbytes, int *sign); 84 void *mpi_get_secure_buffer(MPI a, unsigned *nbytes, int *sign); 85 int mpi_set_buffer(MPI a, const void *buffer, unsigned nbytes, int sign);
|
D | adb.h | 12 int nbytes; member 55 int flags, int nbytes, ...);
|
D | cuda.h | 15 void (*done)(struct adb_request *), int nbytes, ...);
|
D | crc8.h | 99 u8 crc8(const u8 table[CRC8_TABLE_SIZE], u8 *pdata, size_t nbytes, u8 crc);
|
D | net.h | 246 bool __net_get_random_once(void *buf, int nbytes, bool *done, 249 #define net_get_random_once(buf, nbytes) \ argument 257 nbytes, \
|
D | pmu.h | 17 void (*done)(struct adb_request *), int nbytes, ...);
|
/linux-4.1.27/drivers/staging/speakup/ |
D | devsynth.c | 17 size_t nbytes, loff_t *ppos) in speakup_file_write() argument 19 size_t count = nbytes; in speakup_file_write() 37 return (ssize_t) nbytes; in speakup_file_write() 41 size_t nbytes, loff_t *ppos) in speakup_file_read() argument
|
/linux-4.1.27/drivers/media/usb/uvc/ |
D | uvc_isight.c | 48 unsigned int maxlen, nbytes; in isight_decode() local 88 nbytes = min(len, maxlen); in isight_decode() 89 memcpy(mem, data, nbytes); in isight_decode() 90 buf->bytesused += nbytes; in isight_decode()
|
D | uvc_debugfs.c | 48 size_t nbytes, loff_t *ppos) in uvc_debugfs_stats_read() argument 52 return simple_read_from_buffer(user_buf, nbytes, ppos, buf->data, in uvc_debugfs_stats_read()
|
D | uvc_video.c | 1064 unsigned int maxlen, nbytes; in uvc_video_decode_data() local 1073 nbytes = min((unsigned int)len, maxlen); in uvc_video_decode_data() 1074 memcpy(mem, data, nbytes); in uvc_video_decode_data() 1075 buf->bytesused += nbytes; in uvc_video_decode_data() 1122 unsigned int nbytes; in uvc_video_encode_data() local 1127 nbytes = min((unsigned int)len, buf->bytesused - queue->buf_used); in uvc_video_encode_data() 1128 nbytes = min(stream->bulk.max_payload_size - stream->bulk.payload_size, in uvc_video_encode_data() 1129 nbytes); in uvc_video_encode_data() 1130 memcpy(data, mem, nbytes); in uvc_video_encode_data() 1132 queue->buf_used += nbytes; in uvc_video_encode_data() [all …]
|
/linux-4.1.27/drivers/staging/unisys/visorchannel/ |
D | visorchannel_funcs.c | 222 void *local, ulong nbytes) in visorchannel_read() argument 225 local, nbytes); in visorchannel_read() 227 (nbytes >= sizeof(struct channel_header))) { in visorchannel_read() 237 void *local, ulong nbytes) in visorchannel_write() argument 239 if (offset == 0 && nbytes >= sizeof(struct channel_header)) in visorchannel_write() 242 return visor_memregion_write(channel->memregion, offset, local, nbytes); in visorchannel_write() 248 ulong nbytes) in visorchannel_clear() argument 259 while (nbytes > 0) { in visorchannel_clear() 263 if (nbytes < thisbytes) in visorchannel_clear() 264 thisbytes = nbytes; in visorchannel_clear() [all …]
|
D | visorchannel.h | 50 void *local, ulong nbytes); 52 void *local, ulong nbytes); 54 u8 ch, ulong nbytes);
|
/linux-4.1.27/net/sunrpc/ |
D | xdr.c | 61 __be32 *xdr_encode_opaque_fixed(__be32 *p, const void *ptr, unsigned int nbytes) in xdr_encode_opaque_fixed() argument 63 if (likely(nbytes != 0)) { in xdr_encode_opaque_fixed() 64 unsigned int quadlen = XDR_QUADLEN(nbytes); in xdr_encode_opaque_fixed() 65 unsigned int padding = (quadlen << 2) - nbytes; in xdr_encode_opaque_fixed() 68 memcpy(p, ptr, nbytes); in xdr_encode_opaque_fixed() 70 memset((char *)p + nbytes, 0, padding); in xdr_encode_opaque_fixed() 85 __be32 *xdr_encode_opaque(__be32 *p, const void *ptr, unsigned int nbytes) in xdr_encode_opaque() argument 87 *p++ = cpu_to_be32(nbytes); in xdr_encode_opaque() 88 return xdr_encode_opaque_fixed(p, ptr, nbytes); in xdr_encode_opaque() 513 size_t nbytes) in xdr_get_next_encode_buffer() argument [all …]
|
/linux-4.1.27/drivers/net/ethernet/brocade/bna/ |
D | bnad_debugfs.c | 244 size_t nbytes, loff_t *pos) in bnad_debugfs_read() argument 251 return simple_read_from_buffer(buf, nbytes, pos, in bnad_debugfs_read() 290 size_t nbytes, loff_t *pos) in bnad_debugfs_read_regrd() argument 299 rc = simple_read_from_buffer(buf, nbytes, pos, in bnad_debugfs_read_regrd() 302 if ((*pos + nbytes) >= bnad->reglen) { in bnad_debugfs_read_regrd() 313 size_t nbytes, loff_t *ppos) in bnad_debugfs_write_regrd() argument 325 kern_buf = kzalloc(nbytes, GFP_KERNEL); in bnad_debugfs_write_regrd() 329 if (copy_from_user(kern_buf, (void __user *)buf, nbytes)) { in bnad_debugfs_write_regrd() 376 return nbytes; in bnad_debugfs_write_regrd() 381 size_t nbytes, loff_t *ppos) in bnad_debugfs_write_regwr() argument [all …]
|
/linux-4.1.27/drivers/crypto/ccp/ |
D | ccp-crypto-aes-cmac.c | 39 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_aes_cmac_complete() 58 static int ccp_do_cmac_update(struct ahash_request *req, unsigned int nbytes, in ccp_do_cmac_update() argument 75 if (nbytes) in ccp_do_cmac_update() 78 len = (u64)rctx->buf_count + (u64)nbytes; in ccp_do_cmac_update() 82 0, nbytes, 0); in ccp_do_cmac_update() 83 rctx->buf_count += nbytes; in ccp_do_cmac_update() 89 rctx->nbytes = nbytes; in ccp_do_cmac_update() 110 sg_count = (nbytes) ? sg_nents(req->src) + 2 : 2; in ccp_do_cmac_update() 123 if (nbytes) in ccp_do_cmac_update() 181 return ccp_do_cmac_update(req, req->nbytes, 0); in ccp_aes_cmac_update() [all …]
|
D | ccp-crypto-sha.c | 38 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_sha_complete() 57 static int ccp_do_sha_update(struct ahash_request *req, unsigned int nbytes, in ccp_do_sha_update() argument 71 len = (u64)rctx->buf_count + (u64)nbytes; in ccp_do_sha_update() 75 0, nbytes, 0); in ccp_do_sha_update() 76 rctx->buf_count += nbytes; in ccp_do_sha_update() 82 rctx->nbytes = nbytes; in ccp_do_sha_update() 97 if (rctx->buf_count && nbytes) { in ccp_do_sha_update() 118 } else if (nbytes) { in ccp_do_sha_update() 173 return ccp_do_sha_update(req, req->nbytes, 0); in ccp_sha_update() 183 return ccp_do_sha_update(req, req->nbytes, 1); in ccp_sha_finup()
|
D | ccp-crypto-aes-xts.c | 131 if (req->nbytes & (AES_BLOCK_SIZE - 1)) in ccp_aes_xts_crypt() 138 if (req->nbytes <= unit_size_map[0].size) { in ccp_aes_xts_crypt() 140 if (!(req->nbytes & (unit_size_map[unit].size - 1))) { in ccp_aes_xts_crypt() 174 rctx->cmd.u.xts.src_len = req->nbytes; in ccp_aes_xts_crypt()
|
D | ccp-ops.c | 522 unsigned int nbytes = min_t(u64, len, wa->bytes_left); in ccp_update_sg_workarea() local 527 wa->sg_used += nbytes; in ccp_update_sg_workarea() 528 wa->bytes_left -= nbytes; in ccp_update_sg_workarea() 618 unsigned int nbytes, sg_offset, dm_offset, ksb_len, i; in ccp_reverse_set_dm_area() local 625 nbytes = len; in ccp_reverse_set_dm_area() 626 while (nbytes) { in ccp_reverse_set_dm_area() 627 ksb_len = min_t(unsigned int, nbytes, se_len); in ccp_reverse_set_dm_area() 635 nbytes -= ksb_len; in ccp_reverse_set_dm_area() 650 unsigned int nbytes, sg_offset, dm_offset, ksb_len, i; in ccp_reverse_get_dm_area() local 655 nbytes = len; in ccp_reverse_get_dm_area() [all …]
|
D | ccp-crypto.h | 111 unsigned int nbytes; member 165 unsigned int nbytes; member
|
/linux-4.1.27/drivers/isdn/i4l/ |
D | isdn_v110.c | 102 v->nbytes = 8 / v->nbits; in isdn_v110_open() 119 v->framelen = v->nbytes * 10; in isdn_v110_open() 154 for (i = 0; (i < v->decodelen) && (i < v->nbytes); i++) in ValidHeaderBytes() 287 if (v->decodelen < v->nbytes) { /* got a new header ? */ in isdn_v110_decode() 291 if (ValidHeaderBytes(v) != v->nbytes) { /* is that a valid header? */ in isdn_v110_decode() 295 len = (v->decodelen - (v->decodelen % (10 * v->nbytes))) / v->nbytes; in isdn_v110_decode() 303 for (j = 0; j < v->nbytes; j++) in isdn_v110_decode() 304 v110_buf[i] |= (v->decodebuf[(i * v->nbytes) + j] & v->key) << (8 - ((j + 1) * v->nbits)); in isdn_v110_decode() 307 v->decodelen = (v->decodelen % (10 * v->nbytes)); in isdn_v110_decode() 308 memcpy(v->decodebuf, &(v->decodebuf[len * v->nbytes]), v->decodelen); in isdn_v110_decode() [all …]
|
/linux-4.1.27/drivers/scsi/bfa/ |
D | bfad_debugfs.c | 183 size_t nbytes, loff_t *pos) in bfad_debugfs_read() argument 190 return simple_read_from_buffer(buf, nbytes, pos, in bfad_debugfs_read() 226 size_t nbytes, loff_t *pos) in bfad_debugfs_read_regrd() argument 236 rc = simple_read_from_buffer(buf, nbytes, pos, in bfad_debugfs_read_regrd() 239 if ((*pos + nbytes) >= bfad->reglen) { in bfad_debugfs_read_regrd() 250 size_t nbytes, loff_t *ppos) in bfad_debugfs_write_regrd() argument 263 kern_buf = memdup_user(buf, nbytes); in bfad_debugfs_write_regrd() 313 return nbytes; in bfad_debugfs_write_regrd() 318 size_t nbytes, loff_t *ppos) in bfad_debugfs_write_regwr() argument 330 kern_buf = memdup_user(buf, nbytes); in bfad_debugfs_write_regwr() [all …]
|
/linux-4.1.27/net/ipv4/ |
D | tcp_memcontrol.c | 116 char *buf, size_t nbytes, loff_t off) in tcp_cgroup_write() argument 138 return ret ?: nbytes; in tcp_cgroup_write() 179 char *buf, size_t nbytes, loff_t off) in tcp_cgroup_reset() argument 187 return nbytes; in tcp_cgroup_reset() 198 return nbytes; in tcp_cgroup_reset()
|
/linux-4.1.27/fs/ |
D | sync.c | 277 SYSCALL_DEFINE4(sync_file_range, int, fd, loff_t, offset, loff_t, nbytes, in SYSCALL_DEFINE4() argument 290 endbyte = offset + nbytes; in SYSCALL_DEFINE4() 312 nbytes = 0; in SYSCALL_DEFINE4() 316 if (nbytes == 0) in SYSCALL_DEFINE4() 363 loff_t, offset, loff_t, nbytes) in SYSCALL_DEFINE4() argument 365 return sys_sync_file_range(fd, offset, nbytes, flags); in SYSCALL_DEFINE4()
|
/linux-4.1.27/arch/x86/crypto/sha-mb/ |
D | sha1_mb.c | 382 int nbytes, err = 0; in sha_finish_walk() local 388 nbytes = crypto_ahash_walk_done(&rctx->walk, 0); in sha_finish_walk() 389 if (nbytes < 0) { in sha_finish_walk() 390 err = nbytes; in sha_finish_walk() 402 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, flag); in sha_finish_walk() 501 int ret = 0, nbytes; in sha1_mb_update() local 513 nbytes = crypto_ahash_walk_first(req, &rctx->walk); in sha1_mb_update() 515 if (nbytes < 0) { in sha1_mb_update() 516 ret = nbytes; in sha1_mb_update() 527 sha_ctx = sha1_ctx_mgr_submit(cstate->mgr, sha_ctx, rctx->walk.data, nbytes, HASH_UPDATE); in sha1_mb_update() [all …]
|
/linux-4.1.27/drivers/i2c/busses/ |
D | i2c-xlr.c | 136 int nbytes, timedout; in xlr_i2c_rx() local 146 nbytes = 0; in xlr_i2c_rx() 154 if (nbytes > len) in xlr_i2c_rx() 159 if (nbytes < len) in xlr_i2c_rx() 160 buf[nbytes] = byte; in xlr_i2c_rx() 161 nbytes++; in xlr_i2c_rx()
|
/linux-4.1.27/net/atm/ |
D | mpoa_proc.c | 51 size_t nbytes, loff_t *ppos); 207 size_t nbytes, loff_t *ppos) in proc_mpc_write() argument 212 if (nbytes == 0) in proc_mpc_write() 215 if (nbytes >= PAGE_SIZE) in proc_mpc_write() 216 nbytes = PAGE_SIZE-1; in proc_mpc_write() 222 for (p = page, len = 0; len < nbytes; p++, len++) { in proc_mpc_write()
|
/linux-4.1.27/fs/xfs/ |
D | xfs_inode_item.c | 44 int *nbytes) in xfs_inode_item_data_fork_size() argument 54 *nbytes += XFS_IFORK_DSIZE(ip); in xfs_inode_item_data_fork_size() 61 *nbytes += ip->i_df.if_broot_bytes; in xfs_inode_item_data_fork_size() 68 *nbytes += roundup(ip->i_df.if_bytes, 4); in xfs_inode_item_data_fork_size() 86 int *nbytes) in xfs_inode_item_attr_fork_size() argument 96 *nbytes += XFS_IFORK_ASIZE(ip); in xfs_inode_item_attr_fork_size() 103 *nbytes += ip->i_afp->if_broot_bytes; in xfs_inode_item_attr_fork_size() 110 *nbytes += roundup(ip->i_afp->if_bytes, 4); in xfs_inode_item_attr_fork_size() 131 int *nbytes) in xfs_inode_item_size() argument 137 *nbytes += sizeof(struct xfs_inode_log_format) + in xfs_inode_item_size() [all …]
|
D | xfs_log_cil.c | 176 int nbytes = 0; in xlog_cil_insert_format_items() local 185 lip->li_ops->iop_size(lip, &niovecs, &nbytes); in xlog_cil_insert_format_items() 199 nbytes = 0; in xlog_cil_insert_format_items() 209 nbytes += niovecs * sizeof(uint64_t); in xlog_cil_insert_format_items() 210 nbytes = round_up(nbytes, sizeof(uint64_t)); in xlog_cil_insert_format_items() 220 buf_size = nbytes + in xlog_cil_insert_format_items() 260 lv->lv_buf = (char *)lv + buf_size - nbytes; in xlog_cil_insert_format_items() 265 ASSERT(lv->lv_buf_len <= nbytes); in xlog_cil_insert_format_items()
|
D | xfs_dquot_item.c | 45 int *nbytes) in xfs_qm_dquot_logitem_size() argument 48 *nbytes += sizeof(struct xfs_dq_logformat) + in xfs_qm_dquot_logitem_size() 281 int *nbytes) in xfs_qm_qoff_logitem_size() argument 284 *nbytes += sizeof(struct xfs_qoff_logitem); in xfs_qm_qoff_logitem_size()
|
D | xfs_extfree_item.c | 88 int *nbytes) in xfs_efi_item_size() argument 91 *nbytes += xfs_efi_item_sizeof(EFI_ITEM(lip)); in xfs_efi_item_size() 355 int *nbytes) in xfs_efd_item_size() argument 358 *nbytes += xfs_efd_item_sizeof(EFD_ITEM(lip)); in xfs_efd_item_size()
|
D | xfs_icreate_item.c | 48 int *nbytes) in xfs_icreate_item_size() argument 51 *nbytes += sizeof(struct xfs_icreate_log); in xfs_icreate_item_size()
|
D | xfs_buf_item.c | 66 int *nbytes) in xfs_buf_item_size_segment() argument 81 *nbytes += xfs_buf_log_format_size(blfp) + XFS_BLF_CHUNK; in xfs_buf_item_size_segment() 110 *nbytes += XFS_BLF_CHUNK; in xfs_buf_item_size_segment() 135 int *nbytes) in xfs_buf_item_size() argument 151 *nbytes += xfs_buf_log_format_size(&bip->bli_formats[i]); in xfs_buf_item_size() 180 nvecs, nbytes); in xfs_buf_item_size()
|
D | xfs_buf.c | 291 size_t nbytes, offset; in xfs_buf_allocate_memory() local 367 nbytes = min_t(size_t, size, PAGE_SIZE - offset); in xfs_buf_allocate_memory() 368 size -= nbytes; in xfs_buf_allocate_memory() 1178 int rbytes, nbytes = PAGE_SIZE - offset; in xfs_buf_ioapply_map() local 1180 if (nbytes > size) in xfs_buf_ioapply_map() 1181 nbytes = size; in xfs_buf_ioapply_map() 1183 rbytes = bio_add_page(bio, bp->b_pages[page_index], nbytes, in xfs_buf_ioapply_map() 1185 if (rbytes < nbytes) in xfs_buf_ioapply_map() 1189 sector += BTOBB(nbytes); in xfs_buf_ioapply_map() 1190 size -= nbytes; in xfs_buf_ioapply_map()
|
/linux-4.1.27/drivers/usb/core/ |
D | devices.c | 495 static ssize_t usb_device_dump(char __user **buffer, size_t *nbytes, in usb_device_dump() argument 509 if (*nbytes <= 0) in usb_device_dump() 583 if (length > *nbytes) in usb_device_dump() 584 length = *nbytes; in usb_device_dump() 589 *nbytes -= length; in usb_device_dump() 602 ret = usb_device_dump(buffer, nbytes, skip_bytes, in usb_device_dump() 614 size_t nbytes, loff_t *ppos) in usb_device_read() argument 622 if (nbytes <= 0) in usb_device_read() 624 if (!access_ok(VERIFY_WRITE, buf, nbytes)) in usb_device_read() 634 ret = usb_device_dump(&buf, &nbytes, &skip_bytes, ppos, in usb_device_read()
|
/linux-4.1.27/arch/cris/arch-v32/drivers/ |
D | i2c.h | 5 int i2c_write(unsigned char theSlave, void *data, size_t nbytes); 6 int i2c_read(unsigned char theSlave, void *data, size_t nbytes);
|
D | i2c.c | 391 i2c_write(unsigned char theSlave, void *data, size_t nbytes) in i2c_write() argument 416 for (bytes_wrote = 0; bytes_wrote < nbytes; bytes_wrote++) { in i2c_write() 447 i2c_read(unsigned char theSlave, void *data, size_t nbytes) in i2c_read() argument 458 memset(data, 0, nbytes); in i2c_read() 475 for (bytes_read = 0; bytes_read < nbytes; bytes_read++) { in i2c_read() 479 if (bytes_read < (nbytes - 1)) in i2c_read()
|
/linux-4.1.27/drivers/net/wireless/ath/ath10k/ |
D | ce.c | 271 unsigned int nbytes, in ath10k_ce_send_nolock() argument 285 if (nbytes > ce_state->src_sz_max) in ath10k_ce_send_nolock() 287 __func__, nbytes, ce_state->src_sz_max); in ath10k_ce_send_nolock() 307 sdesc->nbytes = __cpu_to_le16(nbytes); in ath10k_ce_send_nolock() 356 unsigned int nbytes, in ath10k_ce_send() argument 366 buffer, nbytes, transfer_id, flags); in ath10k_ce_send() 419 desc->nbytes = 0; in __ath10k_ce_rx_post_buf() 460 u16 nbytes; in ath10k_ce_completed_recv_next_nolock() local 465 nbytes = __le16_to_cpu(sdesc.nbytes); in ath10k_ce_completed_recv_next_nolock() 466 if (nbytes == 0) { in ath10k_ce_completed_recv_next_nolock() [all …]
|
D | pci.c | 488 int nbytes) in ath10k_pci_diag_read_mem() argument 513 orig_nbytes = nbytes; in ath10k_pci_diag_read_mem() 528 nbytes = min_t(unsigned int, remaining_bytes, in ath10k_pci_diag_read_mem() 547 ret = ath10k_ce_send_nolock(ce_diag, NULL, (u32)address, nbytes, 0, in ath10k_pci_diag_read_mem() 563 if (nbytes != completed_nbytes) { in ath10k_pci_diag_read_mem() 585 if (nbytes != completed_nbytes) { in ath10k_pci_diag_read_mem() 595 remaining_bytes -= nbytes; in ath10k_pci_diag_read_mem() 596 address += nbytes; in ath10k_pci_diag_read_mem() 597 ce_data += nbytes; in ath10k_pci_diag_read_mem() 656 const void *data, int nbytes) in ath10k_pci_diag_write_mem() argument [all …]
|
D | hif.h | 50 int nbytes); 117 const void *data, int nbytes) in ath10k_hif_diag_write() argument 122 return ar->hif.ops->diag_write(ar, address, data, nbytes); in ath10k_hif_diag_write()
|
D | ce.h | 46 __le16 nbytes; member 149 unsigned int nbytes, 157 unsigned int nbytes,
|
/linux-4.1.27/fs/fuse/ |
D | dev.c | 303 unsigned nbytes = 0; in len_args() local 307 nbytes += args[i].size; in len_args() 309 return nbytes; in len_args() 1015 static int fuse_copy_pages(struct fuse_copy_state *cs, unsigned nbytes, in fuse_copy_pages() argument 1021 for (i = 0; i < req->num_pages && (nbytes || zeroing); i++) { in fuse_copy_pages() 1024 unsigned count = min(nbytes, req->page_descs[i].length); in fuse_copy_pages() 1031 nbytes -= count; in fuse_copy_pages() 1109 size_t nbytes, struct fuse_req *req) in fuse_read_interrupt() argument 1127 if (nbytes < reqsize) in fuse_read_interrupt() 1162 size_t nbytes) in fuse_read_single_forget() argument [all …]
|
/linux-4.1.27/arch/xtensa/platforms/iss/ |
D | simdisk.c | 76 unsigned long nbytes = nsect << SECTOR_SHIFT; in simdisk_transfer() local 78 if (offset > dev->size || dev->size - offset < nbytes) { in simdisk_transfer() 80 write ? "write" : "read", offset, nbytes); in simdisk_transfer() 85 while (nbytes > 0) { in simdisk_transfer() 90 io = simc_write(dev->fd, buffer, nbytes); in simdisk_transfer() 92 io = simc_read(dev->fd, buffer, nbytes); in simdisk_transfer() 99 nbytes -= io; in simdisk_transfer()
|
/linux-4.1.27/drivers/mtd/nand/gpmi-nand/ |
D | gpmi-lib.c | 1377 size_t nbytes; in gpmi_copy_bits() local 1411 nbytes = nbits / 8; in gpmi_copy_bits() 1415 if (bits_in_src_buffer < (8 - dst_bit_off) && nbytes) { in gpmi_copy_bits() 1419 nbytes--; in gpmi_copy_bits() 1443 if (nbytes) in gpmi_copy_bits() 1444 memcpy(dst, src, nbytes); in gpmi_copy_bits() 1451 for (i = 0; i < nbytes; i++) { in gpmi_copy_bits() 1458 dst += nbytes; in gpmi_copy_bits() 1459 src += nbytes; in gpmi_copy_bits() 1495 nbytes = bits_in_src_buffer / 8; in gpmi_copy_bits() [all …]
|
/linux-4.1.27/drivers/rtc/ |
D | rtc-ds1374.c | 86 int reg, int nbytes) in ds1374_read_rtc() argument 92 if (nbytes > 4) { in ds1374_read_rtc() 97 ret = i2c_smbus_read_i2c_block_data(client, reg, nbytes, buf); in ds1374_read_rtc() 101 if (ret < nbytes) in ds1374_read_rtc() 104 for (i = nbytes - 1, *time = 0; i >= 0; i--) in ds1374_read_rtc() 111 int reg, int nbytes) in ds1374_write_rtc() argument 116 if (nbytes > 4) { in ds1374_write_rtc() 121 for (i = 0; i < nbytes; i++) { in ds1374_write_rtc() 126 return i2c_smbus_write_i2c_block_data(client, reg, nbytes, buf); in ds1374_write_rtc()
|
/linux-4.1.27/arch/x86/kernel/ |
D | uprobes.c | 272 for (i = 0; i < insn->prefixes.nbytes; i++) { in is_prefix_bad() 306 if (insn->opcode.nbytes == 2) { in uprobe_init_insn() 358 if (insn->rex_prefix.nbytes) { in riprel_analyze() 367 if (insn->vex_prefix.nbytes == 3) { in riprel_analyze() 422 if (insn->vex_prefix.nbytes == 2) in riprel_analyze() 424 else if (insn->vex_prefix.nbytes == 3) in riprel_analyze() 703 0, insn->immediate.nbytes); in branch_clear_offset() 728 if (insn->opcode.nbytes != 2) in branch_setup_xol_ops() 745 for (i = 0; i < insn->prefixes.nbytes; i++) { in branch_setup_xol_ops()
|
/linux-4.1.27/drivers/scsi/lpfc/ |
D | lpfc_debugfs.c | 989 size_t nbytes, loff_t *ppos) in lpfc_debugfs_dumpDataDif_write() argument 1007 return nbytes; in lpfc_debugfs_dumpDataDif_write() 1012 size_t nbytes, loff_t *ppos) in lpfc_debugfs_dif_err_read() argument 1048 return simple_read_from_buffer(buf, nbytes, ppos, &cbuf, cnt); in lpfc_debugfs_dif_err_read() 1053 size_t nbytes, loff_t *ppos) in lpfc_debugfs_dif_err_write() argument 1062 size = (nbytes < 32) ? nbytes : 32; in lpfc_debugfs_dif_err_write() 1097 return nbytes; in lpfc_debugfs_dif_err_write() 1190 size_t nbytes, loff_t *ppos) in lpfc_debugfs_read() argument 1194 return simple_read_from_buffer(buf, nbytes, ppos, debug->buffer, in lpfc_debugfs_read() 1256 static int lpfc_idiag_cmd_get(const char __user *buf, size_t nbytes, in lpfc_idiag_cmd_get() argument [all …]
|
/linux-4.1.27/arch/mips/alchemy/common/ |
D | dbdma.c | 591 u32 au1xxx_dbdma_put_source(u32 chanid, dma_addr_t buf, int nbytes, u32 flags) in au1xxx_dbdma_put_source() argument 618 dp->dscr_cmd1 = nbytes; in au1xxx_dbdma_put_source() 632 dma_cache_wback_inv((unsigned long)buf, nbytes); in au1xxx_dbdma_put_source() 642 return nbytes; in au1xxx_dbdma_put_source() 650 u32 au1xxx_dbdma_put_dest(u32 chanid, dma_addr_t buf, int nbytes, u32 flags) in au1xxx_dbdma_put_dest() argument 681 dp->dscr_cmd1 = nbytes; in au1xxx_dbdma_put_dest() 694 dma_cache_inv((unsigned long)buf, nbytes); in au1xxx_dbdma_put_dest() 704 return nbytes; in au1xxx_dbdma_put_dest() 714 u32 au1xxx_dbdma_get_dest(u32 chanid, void **buf, int *nbytes) in au1xxx_dbdma_get_dest() argument 742 *nbytes = dp->dscr_cmd1; in au1xxx_dbdma_get_dest() [all …]
|
/linux-4.1.27/drivers/dma/ |
D | fsl-edma.c | 124 __le32 nbytes; member 370 len += le32_to_cpu(edesc->tcd[i].vtcd->nbytes) in fsl_edma_desc_residue() 383 size = le32_to_cpu(edesc->tcd[i].vtcd->nbytes) in fsl_edma_desc_residue() 448 edma_writel(edma, le32_to_cpu(tcd->nbytes), addr + EDMA_TCD_NBYTES(ch)); in fsl_edma_set_tcd_regs() 462 u16 attr, u16 soff, u32 nbytes, u32 slast, u16 citer, in fsl_edma_fill_tcd() argument 481 tcd->nbytes = cpu_to_le32(EDMA_TCD_NBYTES_NBYTES(nbytes)); in fsl_edma_fill_tcd() 540 u32 src_addr, dst_addr, last_sg, nbytes; in fsl_edma_prep_dma_cyclic() local 553 nbytes = fsl_chan->fsc.addr_width * fsl_chan->fsc.burst; in fsl_edma_prep_dma_cyclic() 554 iter = period_len / nbytes; in fsl_edma_prep_dma_cyclic() 576 fsl_chan->fsc.attr, soff, nbytes, 0, iter, in fsl_edma_prep_dma_cyclic() [all …]
|
/linux-4.1.27/kernel/ |
D | module_signing.c | 111 size_t nbytes; in mod_extract_mpi_array() local 116 nbytes = ((const u8 *)data)[0] << 8 | ((const u8 *)data)[1]; in mod_extract_mpi_array() 119 if (len != nbytes) in mod_extract_mpi_array() 122 mpi = mpi_read_raw_data(data, nbytes); in mod_extract_mpi_array()
|
/linux-4.1.27/drivers/block/ |
D | skd_main.c | 1441 uint nbytes = sizeof(*iov) * sgp->iovec_count; in skd_sg_io_get_and_check_args() local 1444 iov = kmalloc(nbytes, GFP_KERNEL); in skd_sg_io_get_and_check_args() 1454 if (copy_from_user(iov, sgp->dxferp, nbytes)) { in skd_sg_io_get_and_check_args() 1566 u32 nbytes = PAGE_SIZE; in skd_skreq_prep_buffering() local 1572 if (nbytes > resid) in skd_skreq_prep_buffering() 1573 nbytes = resid; in skd_skreq_prep_buffering() 1579 sg_set_page(sg, page, nbytes, 0); in skd_skreq_prep_buffering() 1584 sksg->byte_count = nbytes; in skd_skreq_prep_buffering() 1593 resid -= nbytes; in skd_skreq_prep_buffering() 1662 u32 nbytes = PAGE_SIZE; in skd_sg_io_copy_buffer() local [all …]
|
/linux-4.1.27/drivers/usb/mon/ |
D | mon_stat.c | 45 size_t nbytes, loff_t *ppos) in mon_stat_read() argument 49 return simple_read_from_buffer(buf, nbytes, ppos, sp->str, sp->slen); in mon_stat_read()
|
D | mon_bin.c | 735 void __user *data, unsigned int nbytes) in mon_bin_get_event() argument 757 step_len = min(ep->len_cap, nbytes); in mon_bin_get_event() 797 size_t nbytes, loff_t *ppos) in mon_bin_read() argument 819 step_len = min(nbytes, (size_t)(hdrbytes - rp->b_read)); in mon_bin_read() 825 nbytes -= step_len; in mon_bin_read() 834 if (step_len > nbytes) in mon_bin_read() 835 step_len = nbytes; in mon_bin_read() 844 nbytes -= step_len; in mon_bin_read()
|
/linux-4.1.27/lib/ |
D | crc8.c | 74 u8 crc8(const u8 table[CRC8_TABLE_SIZE], u8 *pdata, size_t nbytes, u8 crc) in crc8() argument 77 while (nbytes-- > 0) in crc8()
|
/linux-4.1.27/drivers/usb/gadget/function/ |
D | uvc_video.c | 48 unsigned int nbytes; in uvc_video_encode_data() local 53 nbytes = min((unsigned int)len, buf->bytesused - queue->buf_used); in uvc_video_encode_data() 55 memcpy(data, mem, nbytes); in uvc_video_encode_data() 56 queue->buf_used += nbytes; in uvc_video_encode_data() 58 return nbytes; in uvc_video_encode_data()
|
/linux-4.1.27/arch/powerpc/kernel/ |
D | proc_powerpc.c | 37 static ssize_t page_map_read( struct file *file, char __user *buf, size_t nbytes, in page_map_read() argument 40 return simple_read_from_buffer(buf, nbytes, ppos, in page_map_read()
|
D | sys_ppc32.c | 125 loff_t nbytes = ((loff_t)nbytes_hi << 32) | nbytes_lo; in compat_sys_sync_file_range2() local 127 return sys_sync_file_range(fd, offset, nbytes, flags); in compat_sys_sync_file_range2()
|
/linux-4.1.27/drivers/iio/accel/ |
D | mma9551_core.c | 90 u8 nbytes; member 103 u8 nbytes; member 139 req.nbytes = num_inbytes; in mma9551_transfer() 141 req.nbytes = num_outbytes; in mma9551_transfer() 194 if (rsp.nbytes != rsp.req_bytes) { in mma9551_transfer() 197 rsp.nbytes, rsp.req_bytes); in mma9551_transfer()
|
/linux-4.1.27/arch/alpha/kernel/ |
D | osf_sys.c | 610 long nbytes; member 615 long nbytes; member 622 long nbytes; member 629 long nbytes; member 658 if (get_user(error, &args->set.nbytes)) in SYSCALL_DEFINE2() 662 if (get_user(error, &args->fset.nbytes)) in SYSCALL_DEFINE2() 768 unsigned long, nbytes, int __user *, start, void __user *, arg) in SYSCALL_DEFINE5() argument 793 if (nbytes < sizeof(unsigned int)) in SYSCALL_DEFINE5() 801 if (nbytes < sizeof(unsigned long)) in SYSCALL_DEFINE5() 811 if (nbytes > sizeof(*hwrpb)) in SYSCALL_DEFINE5() [all …]
|
D | smc37c669.c | 2388 int nbytes; 2396 nbytes = 0; 2404 nbytes++; 2407 return nbytes; 2414 int nbytes; 2421 nbytes = 0; 2430 nbytes++; 2433 return nbytes;
|
/linux-4.1.27/drivers/net/ppp/ |
D | ppp_mppe.c | 140 unsigned int nbytes; in get_new_key_from_sha() local 144 nbytes = setup_sg(&sg[0], state->master_key, state->keylen); in get_new_key_from_sha() 145 nbytes += setup_sg(&sg[1], sha_pad->sha_pad1, in get_new_key_from_sha() 147 nbytes += setup_sg(&sg[2], state->session_key, state->keylen); in get_new_key_from_sha() 148 nbytes += setup_sg(&sg[3], sha_pad->sha_pad2, in get_new_key_from_sha() 154 crypto_hash_digest(&desc, sg, nbytes, state->sha1_digest); in get_new_key_from_sha()
|
/linux-4.1.27/fs/jfs/ |
D | xattr.c | 226 s32 nbytes, nb; in ea_write() local 260 nbytes = size; in ea_write() 266 nb = min(PSIZE, nbytes); in ea_write() 299 nbytes -= nb; in ea_write() 375 int nbytes, nb; in ea_read() local 383 nbytes = sizeDXD(&ji->ea); in ea_read() 384 if (!nbytes) { in ea_read() 406 nb = min(PSIZE, nbytes); in ea_read() 418 nbytes -= nb; in ea_read()
|
D | jfs_logmgr.c | 369 int nbytes; /* number of bytes to move */ in lmWriteRecord() local 444 nbytes = min(freespace, srclen); in lmWriteRecord() 446 memcpy(dst, src, nbytes); in lmWriteRecord() 447 dstoffset += nbytes; in lmWriteRecord() 460 srclen -= nbytes; in lmWriteRecord() 461 src += nbytes; in lmWriteRecord() 492 nbytes = min(freespace, srclen); in lmWriteRecord() 494 memcpy(dst, src, nbytes); in lmWriteRecord() 496 dstoffset += nbytes; in lmWriteRecord() 497 srclen -= nbytes; in lmWriteRecord() [all …]
|
/linux-4.1.27/mm/ |
D | hugetlb_cgroup.c | 272 char *buf, size_t nbytes, loff_t off) in hugetlb_cgroup_write() argument 298 return ret ?: nbytes; in hugetlb_cgroup_write() 302 char *buf, size_t nbytes, loff_t off) in hugetlb_cgroup_reset() argument 321 return ret ?: nbytes; in hugetlb_cgroup_reset()
|
/linux-4.1.27/arch/mips/include/asm/mach-au1x00/ |
D | au1xxx_dbdma.h | 361 u32 au1xxx_dbdma_put_source(u32 chanid, dma_addr_t buf, int nbytes, u32 flags); 362 u32 au1xxx_dbdma_put_dest(u32 chanid, dma_addr_t buf, int nbytes, u32 flags); 365 u32 au1xxx_dbdma_get_dest(u32 chanid, void **buf, int *nbytes);
|
/linux-4.1.27/drivers/crypto/ux500/cryp/ |
D | cryp_core.c | 811 static int get_nents(struct scatterlist *sg, int nbytes) in get_nents() argument 815 while (nbytes > 0) { in get_nents() 816 nbytes -= sg->length; in get_nents() 836 ctx->datalen = areq->nbytes; in ablk_dma_crypt() 837 ctx->outlen = areq->nbytes; in ablk_dma_crypt() 890 int nbytes; in ablk_crypt() local 898 ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes); in ablk_crypt() 907 while ((nbytes = walk.nbytes) > 0) { in ablk_crypt() 915 ctx->datalen = nbytes - (nbytes % ctx->blocksize); in ablk_crypt() 921 nbytes -= ctx->datalen; in ablk_crypt() [all …]
|
/linux-4.1.27/arch/sparc/include/asm/ |
D | openprom.h | 41 int (*v2_dev_read)(int d, char *buf, int nbytes); 42 int (*v2_dev_write)(int d, const char *buf, int nbytes);
|
/linux-4.1.27/drivers/staging/comedi/drivers/ |
D | quatech_daqp_cs.c | 570 unsigned long long nbytes; in daqp_ai_cmd() local 574 nbytes = nsamples * comedi_bytes_per_sample(s); in daqp_ai_cmd() 575 while (nbytes > DAQP_FIFO_SIZE * 3 / 4) in daqp_ai_cmd() 576 nbytes /= 2; in daqp_ai_cmd() 577 threshold = nbytes; in daqp_ai_cmd()
|
D | dt282x.c | 397 unsigned int nbytes) in dt282x_munge() argument 403 if (nbytes % 2) in dt282x_munge() 407 for (i = 0; i < nbytes / 2; i++) { in dt282x_munge() 425 unsigned int nbytes; in dt282x_ao_setup_dma() local 427 nbytes = comedi_buf_read_samples(s, desc->virt_addr, nsamples); in dt282x_ao_setup_dma() 428 if (nbytes) in dt282x_ao_setup_dma() 429 dt282x_prep_ao_dma(dev, cur_dma, nbytes); in dt282x_ao_setup_dma() 433 return nbytes; in dt282x_ao_setup_dma()
|
/linux-4.1.27/arch/m32r/kernel/ |
D | sys_m32r.c | 86 asmlinkage int sys_cachectl(char *addr, int nbytes, int op) in sys_cachectl() argument
|
/linux-4.1.27/drivers/scsi/fnic/ |
D | fnic_debugfs.c | 323 size_t nbytes, in fnic_trace_debugfs_read() argument 328 rc = simple_read_from_buffer(ubuf, nbytes, pos, in fnic_trace_debugfs_read() 719 size_t nbytes, in fnic_stats_debugfs_read() argument 724 rc = simple_read_from_buffer(ubuf, nbytes, pos, in fnic_stats_debugfs_read()
|
/linux-4.1.27/drivers/staging/unisys/visorchipset/ |
D | parser.c | 168 void *parser_byte_stream_get(struct parser_context *ctx, ulong *nbytes) in parser_byte_stream_get() argument 172 if (nbytes) in parser_byte_stream_get() 173 *nbytes = ctx->param_bytes; in parser_byte_stream_get()
|
D | parser.h | 43 void *parser_byte_stream_get(struct parser_context *ctx, ulong *nbytes);
|
/linux-4.1.27/drivers/atm/ |
D | nicstarmac.c | 239 u_int8_t prom_offset, u_int8_t * buffer, u_int32_t nbytes) in nicstar_read_eprom() argument 243 for (i = 0; i < nbytes; i++) { in nicstar_read_eprom()
|
/linux-4.1.27/arch/m68k/include/asm/ |
D | openprom.h | 49 int (*v2_dev_read)(int d, char *buf, int nbytes); 50 int (*v2_dev_write)(int d, char *buf, int nbytes);
|
/linux-4.1.27/net/rds/ |
D | message.c | 269 unsigned long to_copy, nbytes; in rds_message_copy_from_user() local 296 nbytes = copy_page_from_iter(sg_page(sg), sg->offset + sg_off, in rds_message_copy_from_user() 298 if (nbytes != to_copy) in rds_message_copy_from_user()
|
/linux-4.1.27/drivers/spi/ |
D | spi-tegra20-sflash.c | 186 unsigned nbytes; in tegra_sflash_fill_tx_fifo_from_client_txbuf() local 193 nbytes = max_n_32bit * tsd->bytes_per_word; in tegra_sflash_fill_tx_fifo_from_client_txbuf() 200 for (i = 0; nbytes && (i < tsd->bytes_per_word); in tegra_sflash_fill_tx_fifo_from_client_txbuf() 201 i++, nbytes--) in tegra_sflash_fill_tx_fifo_from_client_txbuf() 204 if (!nbytes) in tegra_sflash_fill_tx_fifo_from_client_txbuf()
|
D | spi-tegra114.c | 286 unsigned nbytes; in tegra_spi_fill_tx_fifo_from_client_txbuf() local 301 nbytes = written_words * tspi->bytes_per_word; in tegra_spi_fill_tx_fifo_from_client_txbuf() 302 max_n_32bit = DIV_ROUND_UP(nbytes, 4); in tegra_spi_fill_tx_fifo_from_client_txbuf() 306 for (i = 0; (i < 4) && nbytes; i++, nbytes--) in tegra_spi_fill_tx_fifo_from_client_txbuf() 313 nbytes = written_words * tspi->bytes_per_word; in tegra_spi_fill_tx_fifo_from_client_txbuf() 317 for (i = 0; nbytes && (i < tspi->bytes_per_word); in tegra_spi_fill_tx_fifo_from_client_txbuf() 318 i++, nbytes--) in tegra_spi_fill_tx_fifo_from_client_txbuf()
|
D | spi-tegra20-slink.c | 303 unsigned nbytes; in tegra_slink_fill_tx_fifo_from_client_txbuf() local 318 nbytes = written_words * tspi->bytes_per_word; in tegra_slink_fill_tx_fifo_from_client_txbuf() 319 max_n_32bit = DIV_ROUND_UP(nbytes, 4); in tegra_slink_fill_tx_fifo_from_client_txbuf() 322 for (i = 0; (i < 4) && nbytes; i++, nbytes--) in tegra_slink_fill_tx_fifo_from_client_txbuf() 329 nbytes = written_words * tspi->bytes_per_word; in tegra_slink_fill_tx_fifo_from_client_txbuf() 332 for (i = 0; nbytes && (i < tspi->bytes_per_word); in tegra_slink_fill_tx_fifo_from_client_txbuf() 333 i++, nbytes--) in tegra_slink_fill_tx_fifo_from_client_txbuf()
|
/linux-4.1.27/arch/ia64/include/asm/sn/ |
D | tioce_provider.h | 51 u64 nbytes; /* # bytes mapped */ member
|
/linux-4.1.27/net/core/ |
D | netprio_cgroup.c | 189 char *buf, size_t nbytes, loff_t off) in write_priomap() argument 209 return ret ?: nbytes; in write_priomap()
|
/linux-4.1.27/block/ |
D | blk-merge.c | 164 int nbytes = bvec->bv_len; in __blk_segment_map_sg() local 167 if ((*sg)->length + nbytes > queue_max_segment_size(q)) in __blk_segment_map_sg() 175 (*sg)->length += nbytes; in __blk_segment_map_sg() 195 sg_set_page(*sg, bvec->bv_page, nbytes, bvec->bv_offset); in __blk_segment_map_sg()
|
/linux-4.1.27/include/linux/sunrpc/ |
D | xdr.h | 217 extern __be32 *xdr_reserve_space(struct xdr_stream *xdr, size_t nbytes); 228 extern __be32 *xdr_inline_decode(struct xdr_stream *xdr, size_t nbytes);
|
/linux-4.1.27/drivers/crypto/ux500/hash/ |
D | hash_core.c | 576 if (req->nbytes < HASH_DMA_ALIGN_SIZE) { in hash_init() 582 if (req->nbytes >= HASH_DMA_PERFORMANCE_MIN_SIZE && in hash_init() 583 hash_dma_valid_data(req->src, req->nbytes)) { in hash_init() 920 HASH_SET_NBLW((req->nbytes * 8) % 32); in hash_dma_final() 925 ctx->device->dma.nents = hash_get_nents(req->src, req->nbytes, NULL); in hash_dma_final() 933 bytes_written = hash_dma_write(ctx, req->src, req->nbytes); in hash_dma_final() 934 if (bytes_written != req->nbytes) { in hash_dma_final() 997 } else if (req->nbytes == 0 && ctx->keylen == 0) { in hash_hw_final() 1024 } else if (req->nbytes == 0 && ctx->keylen > 0) { in hash_hw_final() 1332 pr_debug("%s: data size: %d\n", __func__, req->nbytes); in ahash_final()
|
/linux-4.1.27/drivers/gpu/drm/ |
D | drm_ioc32.c | 443 size_t nbytes; in compat_drm_infobufs() local 458 nbytes = sizeof(*request) + count * sizeof(struct drm_buf_desc); in compat_drm_infobufs() 459 request = compat_alloc_user_space(nbytes); in compat_drm_infobufs() 460 if (!access_ok(VERIFY_WRITE, request, nbytes)) in compat_drm_infobufs() 509 size_t nbytes; in compat_drm_mapbufs() local 519 nbytes = sizeof(*request) + count * sizeof(struct drm_buf_pub); in compat_drm_mapbufs() 520 request = compat_alloc_user_space(nbytes); in compat_drm_mapbufs() 521 if (!access_ok(VERIFY_WRITE, request, nbytes)) in compat_drm_mapbufs()
|
/linux-4.1.27/drivers/net/wireless/brcm80211/brcmfmac/ |
D | bcmsdh.c | 667 int brcmf_sdiod_recv_buf(struct brcmf_sdio_dev *sdiodev, u8 *buf, uint nbytes) in brcmf_sdiod_recv_buf() argument 672 mypkt = brcmu_pkt_buf_get_skb(nbytes); in brcmf_sdiod_recv_buf() 675 nbytes); in brcmf_sdiod_recv_buf() 681 memcpy(buf, mypkt->data, nbytes); in brcmf_sdiod_recv_buf() 743 int brcmf_sdiod_send_buf(struct brcmf_sdio_dev *sdiodev, u8 *buf, uint nbytes) in brcmf_sdiod_send_buf() argument 749 mypkt = brcmu_pkt_buf_get_skb(nbytes); in brcmf_sdiod_send_buf() 752 nbytes); in brcmf_sdiod_send_buf() 756 memcpy(mypkt->data, buf, nbytes); in brcmf_sdiod_send_buf()
|
D | sdio.h | 319 int brcmf_sdiod_send_buf(struct brcmf_sdio_dev *sdiodev, u8 *buf, uint nbytes); 322 int brcmf_sdiod_recv_buf(struct brcmf_sdio_dev *sdiodev, u8 *buf, uint nbytes);
|
/linux-4.1.27/fs/cifs/ |
D | smb2proto.h | 131 unsigned int *nbytes, char **buf, int *buf_type); 135 unsigned int *nbytes, struct kvec *iov, int n_vec);
|
/linux-4.1.27/drivers/mmc/host/ |
D | atmel-mci.c | 1862 unsigned int nbytes = 0; in atmci_read_data_pio() local 1870 nbytes += 4; in atmci_read_data_pio() 1885 nbytes += remaining; in atmci_read_data_pio() 1896 nbytes += offset; in atmci_read_data_pio() 1904 data->bytes_xfered += nbytes; in atmci_read_data_pio() 1910 data->bytes_xfered += nbytes; in atmci_read_data_pio() 1917 data->bytes_xfered += nbytes; in atmci_read_data_pio() 1930 unsigned int nbytes = 0; in atmci_write_data_pio() local 1938 nbytes += 4; in atmci_write_data_pio() 1953 nbytes += remaining; in atmci_write_data_pio() [all …]
|