Home
last modified time | relevance | path

Searched refs:sctx (Results 1 – 53 of 53) sorted by relevance

/linux-4.4.14/drivers/md/
Ddm-switch.c62 struct switch_ctx *sctx; in alloc_switch_ctx() local
64 sctx = kzalloc(sizeof(struct switch_ctx) + nr_paths * sizeof(struct switch_path), in alloc_switch_ctx()
66 if (!sctx) in alloc_switch_ctx()
69 sctx->ti = ti; in alloc_switch_ctx()
70 sctx->region_size = region_size; in alloc_switch_ctx()
72 ti->private = sctx; in alloc_switch_ctx()
74 return sctx; in alloc_switch_ctx()
79 struct switch_ctx *sctx = ti->private; in alloc_region_table() local
83 if (!(sctx->region_size & (sctx->region_size - 1))) in alloc_region_table()
84 sctx->region_size_bits = __ffs(sctx->region_size); in alloc_region_table()
[all …]
/linux-4.4.14/include/crypto/
Dsha512_base.h23 struct sha512_state *sctx = shash_desc_ctx(desc); in sha384_base_init() local
25 sctx->state[0] = SHA384_H0; in sha384_base_init()
26 sctx->state[1] = SHA384_H1; in sha384_base_init()
27 sctx->state[2] = SHA384_H2; in sha384_base_init()
28 sctx->state[3] = SHA384_H3; in sha384_base_init()
29 sctx->state[4] = SHA384_H4; in sha384_base_init()
30 sctx->state[5] = SHA384_H5; in sha384_base_init()
31 sctx->state[6] = SHA384_H6; in sha384_base_init()
32 sctx->state[7] = SHA384_H7; in sha384_base_init()
33 sctx->count[0] = sctx->count[1] = 0; in sha384_base_init()
[all …]
Dsha256_base.h23 struct sha256_state *sctx = shash_desc_ctx(desc); in sha224_base_init() local
25 sctx->state[0] = SHA224_H0; in sha224_base_init()
26 sctx->state[1] = SHA224_H1; in sha224_base_init()
27 sctx->state[2] = SHA224_H2; in sha224_base_init()
28 sctx->state[3] = SHA224_H3; in sha224_base_init()
29 sctx->state[4] = SHA224_H4; in sha224_base_init()
30 sctx->state[5] = SHA224_H5; in sha224_base_init()
31 sctx->state[6] = SHA224_H6; in sha224_base_init()
32 sctx->state[7] = SHA224_H7; in sha224_base_init()
33 sctx->count = 0; in sha224_base_init()
[all …]
Dsha1_base.h22 struct sha1_state *sctx = shash_desc_ctx(desc); in sha1_base_init() local
24 sctx->state[0] = SHA1_H0; in sha1_base_init()
25 sctx->state[1] = SHA1_H1; in sha1_base_init()
26 sctx->state[2] = SHA1_H2; in sha1_base_init()
27 sctx->state[3] = SHA1_H3; in sha1_base_init()
28 sctx->state[4] = SHA1_H4; in sha1_base_init()
29 sctx->count = 0; in sha1_base_init()
39 struct sha1_state *sctx = shash_desc_ctx(desc); in sha1_base_do_update() local
40 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_update()
42 sctx->count += len; in sha1_base_do_update()
[all …]
/linux-4.4.14/arch/mips/cavium-octeon/crypto/
Docteon-sha512.c35 static void octeon_sha512_store_hash(struct sha512_state *sctx) in octeon_sha512_store_hash() argument
37 write_octeon_64bit_hash_sha512(sctx->state[0], 0); in octeon_sha512_store_hash()
38 write_octeon_64bit_hash_sha512(sctx->state[1], 1); in octeon_sha512_store_hash()
39 write_octeon_64bit_hash_sha512(sctx->state[2], 2); in octeon_sha512_store_hash()
40 write_octeon_64bit_hash_sha512(sctx->state[3], 3); in octeon_sha512_store_hash()
41 write_octeon_64bit_hash_sha512(sctx->state[4], 4); in octeon_sha512_store_hash()
42 write_octeon_64bit_hash_sha512(sctx->state[5], 5); in octeon_sha512_store_hash()
43 write_octeon_64bit_hash_sha512(sctx->state[6], 6); in octeon_sha512_store_hash()
44 write_octeon_64bit_hash_sha512(sctx->state[7], 7); in octeon_sha512_store_hash()
47 static void octeon_sha512_read_hash(struct sha512_state *sctx) in octeon_sha512_read_hash() argument
[all …]
Docteon-sha256.c36 static void octeon_sha256_store_hash(struct sha256_state *sctx) in octeon_sha256_store_hash() argument
38 u64 *hash = (u64 *)sctx->state; in octeon_sha256_store_hash()
46 static void octeon_sha256_read_hash(struct sha256_state *sctx) in octeon_sha256_read_hash() argument
48 u64 *hash = (u64 *)sctx->state; in octeon_sha256_read_hash()
72 struct sha256_state *sctx = shash_desc_ctx(desc); in octeon_sha224_init() local
74 sctx->state[0] = SHA224_H0; in octeon_sha224_init()
75 sctx->state[1] = SHA224_H1; in octeon_sha224_init()
76 sctx->state[2] = SHA224_H2; in octeon_sha224_init()
77 sctx->state[3] = SHA224_H3; in octeon_sha224_init()
78 sctx->state[4] = SHA224_H4; in octeon_sha224_init()
[all …]
Docteon-sha1.c35 static void octeon_sha1_store_hash(struct sha1_state *sctx) in octeon_sha1_store_hash() argument
37 u64 *hash = (u64 *)sctx->state; in octeon_sha1_store_hash()
41 } hash_tail = { { sctx->state[4], } }; in octeon_sha1_store_hash()
49 static void octeon_sha1_read_hash(struct sha1_state *sctx) in octeon_sha1_read_hash() argument
51 u64 *hash = (u64 *)sctx->state; in octeon_sha1_read_hash()
60 sctx->state[4] = hash_tail.word[0]; in octeon_sha1_read_hash()
80 struct sha1_state *sctx = shash_desc_ctx(desc); in octeon_sha1_init() local
82 sctx->state[0] = SHA1_H0; in octeon_sha1_init()
83 sctx->state[1] = SHA1_H1; in octeon_sha1_init()
84 sctx->state[2] = SHA1_H2; in octeon_sha1_init()
[all …]
/linux-4.4.14/arch/powerpc/crypto/
Dsha256-spe-glue.c54 static inline void ppc_sha256_clear_context(struct sha256_state *sctx) in ppc_sha256_clear_context() argument
57 u32 *ptr = (u32 *)sctx; in ppc_sha256_clear_context()
66 struct sha256_state *sctx = shash_desc_ctx(desc); in ppc_spe_sha256_init() local
68 sctx->state[0] = SHA256_H0; in ppc_spe_sha256_init()
69 sctx->state[1] = SHA256_H1; in ppc_spe_sha256_init()
70 sctx->state[2] = SHA256_H2; in ppc_spe_sha256_init()
71 sctx->state[3] = SHA256_H3; in ppc_spe_sha256_init()
72 sctx->state[4] = SHA256_H4; in ppc_spe_sha256_init()
73 sctx->state[5] = SHA256_H5; in ppc_spe_sha256_init()
74 sctx->state[6] = SHA256_H6; in ppc_spe_sha256_init()
[all …]
Dsha1-spe-glue.c53 static inline void ppc_sha1_clear_context(struct sha1_state *sctx) in ppc_sha1_clear_context() argument
56 u32 *ptr = (u32 *)sctx; in ppc_sha1_clear_context()
65 struct sha1_state *sctx = shash_desc_ctx(desc); in ppc_spe_sha1_init() local
67 sctx->state[0] = SHA1_H0; in ppc_spe_sha1_init()
68 sctx->state[1] = SHA1_H1; in ppc_spe_sha1_init()
69 sctx->state[2] = SHA1_H2; in ppc_spe_sha1_init()
70 sctx->state[3] = SHA1_H3; in ppc_spe_sha1_init()
71 sctx->state[4] = SHA1_H4; in ppc_spe_sha1_init()
72 sctx->count = 0; in ppc_spe_sha1_init()
80 struct sha1_state *sctx = shash_desc_ctx(desc); in ppc_spe_sha1_update() local
[all …]
Dmd5-glue.c26 static inline void ppc_md5_clear_context(struct md5_state *sctx) in ppc_md5_clear_context() argument
29 u32 *ptr = (u32 *)sctx; in ppc_md5_clear_context()
38 struct md5_state *sctx = shash_desc_ctx(desc); in ppc_md5_init() local
40 sctx->hash[0] = MD5_H0; in ppc_md5_init()
41 sctx->hash[1] = MD5_H1; in ppc_md5_init()
42 sctx->hash[2] = MD5_H2; in ppc_md5_init()
43 sctx->hash[3] = MD5_H3; in ppc_md5_init()
44 sctx->byte_count = 0; in ppc_md5_init()
52 struct md5_state *sctx = shash_desc_ctx(desc); in ppc_md5_update() local
53 const unsigned int offset = sctx->byte_count & 0x3f; in ppc_md5_update()
[all …]
Dsha1.c33 struct sha1_state *sctx = shash_desc_ctx(desc); in sha1_init() local
35 *sctx = (struct sha1_state){ in sha1_init()
45 struct sha1_state *sctx = shash_desc_ctx(desc); in sha1_update() local
49 partial = sctx->count & 0x3f; in sha1_update()
50 sctx->count += len; in sha1_update()
59 memcpy(sctx->buffer + partial, data, done + 64); in sha1_update()
60 src = sctx->buffer; in sha1_update()
64 powerpc_sha_transform(sctx->state, src, temp); in sha1_update()
72 memcpy(sctx->buffer + partial, src, len - done); in sha1_update()
81 struct sha1_state *sctx = shash_desc_ctx(desc); in sha1_final() local
[all …]
/linux-4.4.14/fs/btrfs/
Dsend.c276 static int is_waiting_for_move(struct send_ctx *sctx, u64 ino);
279 get_waiting_dir_move(struct send_ctx *sctx, u64 ino);
281 static int is_waiting_for_rm(struct send_ctx *sctx, u64 dir_ino);
283 static int need_send_hole(struct send_ctx *sctx) in need_send_hole() argument
285 return (sctx->parent_root && !sctx->cur_inode_new && in need_send_hole()
286 !sctx->cur_inode_new_gen && !sctx->cur_inode_deleted && in need_send_hole()
287 S_ISREG(sctx->cur_inode_mode)); in need_send_hole()
542 static int tlv_put(struct send_ctx *sctx, u16 attr, const void *data, int len) in tlv_put() argument
546 int left = sctx->send_max_size - sctx->send_size; in tlv_put()
551 hdr = (struct btrfs_tlv_header *) (sctx->send_buf + sctx->send_size); in tlv_put()
[all …]
Dscrub.c95 struct scrub_ctx *sctx; member
116 struct scrub_ctx *sctx; member
133 struct scrub_ctx *sctx; member
209 struct scrub_ctx *sctx; member
225 struct scrub_ctx *sctx; member
243 static void scrub_pending_bio_inc(struct scrub_ctx *sctx);
244 static void scrub_pending_bio_dec(struct scrub_ctx *sctx);
245 static void scrub_pending_trans_workers_inc(struct scrub_ctx *sctx);
246 static void scrub_pending_trans_workers_dec(struct scrub_ctx *sctx);
271 static int scrub_add_page_to_rd_bio(struct scrub_ctx *sctx,
[all …]
/linux-4.4.14/arch/sparc/crypto/
Dsha512_glue.c30 struct sha512_state *sctx = shash_desc_ctx(desc); in sha512_sparc64_init() local
31 sctx->state[0] = SHA512_H0; in sha512_sparc64_init()
32 sctx->state[1] = SHA512_H1; in sha512_sparc64_init()
33 sctx->state[2] = SHA512_H2; in sha512_sparc64_init()
34 sctx->state[3] = SHA512_H3; in sha512_sparc64_init()
35 sctx->state[4] = SHA512_H4; in sha512_sparc64_init()
36 sctx->state[5] = SHA512_H5; in sha512_sparc64_init()
37 sctx->state[6] = SHA512_H6; in sha512_sparc64_init()
38 sctx->state[7] = SHA512_H7; in sha512_sparc64_init()
39 sctx->count[0] = sctx->count[1] = 0; in sha512_sparc64_init()
[all …]
Dsha256_glue.c31 struct sha256_state *sctx = shash_desc_ctx(desc); in sha224_sparc64_init() local
32 sctx->state[0] = SHA224_H0; in sha224_sparc64_init()
33 sctx->state[1] = SHA224_H1; in sha224_sparc64_init()
34 sctx->state[2] = SHA224_H2; in sha224_sparc64_init()
35 sctx->state[3] = SHA224_H3; in sha224_sparc64_init()
36 sctx->state[4] = SHA224_H4; in sha224_sparc64_init()
37 sctx->state[5] = SHA224_H5; in sha224_sparc64_init()
38 sctx->state[6] = SHA224_H6; in sha224_sparc64_init()
39 sctx->state[7] = SHA224_H7; in sha224_sparc64_init()
40 sctx->count = 0; in sha224_sparc64_init()
[all …]
Dsha1_glue.c31 struct sha1_state *sctx = shash_desc_ctx(desc); in sha1_sparc64_init() local
33 *sctx = (struct sha1_state){ in sha1_sparc64_init()
40 static void __sha1_sparc64_update(struct sha1_state *sctx, const u8 *data, in __sha1_sparc64_update() argument
45 sctx->count += len; in __sha1_sparc64_update()
48 memcpy(sctx->buffer + partial, data, done); in __sha1_sparc64_update()
49 sha1_sparc64_transform(sctx->state, sctx->buffer, 1); in __sha1_sparc64_update()
54 sha1_sparc64_transform(sctx->state, data + done, rounds); in __sha1_sparc64_update()
58 memcpy(sctx->buffer, data + done, len - done); in __sha1_sparc64_update()
64 struct sha1_state *sctx = shash_desc_ctx(desc); in sha1_sparc64_update() local
65 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_sparc64_update()
[all …]
Dmd5_glue.c45 static void __md5_sparc64_update(struct md5_state *sctx, const u8 *data, in __md5_sparc64_update() argument
50 sctx->byte_count += len; in __md5_sparc64_update()
53 memcpy((u8 *)sctx->block + partial, data, done); in __md5_sparc64_update()
54 md5_sparc64_transform(sctx->hash, (u8 *)sctx->block, 1); in __md5_sparc64_update()
59 md5_sparc64_transform(sctx->hash, data + done, rounds); in __md5_sparc64_update()
63 memcpy(sctx->block, data + done, len - done); in __md5_sparc64_update()
69 struct md5_state *sctx = shash_desc_ctx(desc); in md5_sparc64_update() local
70 unsigned int partial = sctx->byte_count % MD5_HMAC_BLOCK_SIZE; in md5_sparc64_update()
74 sctx->byte_count += len; in md5_sparc64_update()
75 memcpy((u8 *)sctx->block + partial, data, len); in md5_sparc64_update()
[all …]
/linux-4.4.14/arch/s390/crypto/
Dsha256_s390.c27 struct s390_sha_ctx *sctx = shash_desc_ctx(desc); in sha256_init() local
29 sctx->state[0] = SHA256_H0; in sha256_init()
30 sctx->state[1] = SHA256_H1; in sha256_init()
31 sctx->state[2] = SHA256_H2; in sha256_init()
32 sctx->state[3] = SHA256_H3; in sha256_init()
33 sctx->state[4] = SHA256_H4; in sha256_init()
34 sctx->state[5] = SHA256_H5; in sha256_init()
35 sctx->state[6] = SHA256_H6; in sha256_init()
36 sctx->state[7] = SHA256_H7; in sha256_init()
37 sctx->count = 0; in sha256_init()
[all …]
Dsha1_s390.c37 struct s390_sha_ctx *sctx = shash_desc_ctx(desc); in sha1_init() local
39 sctx->state[0] = SHA1_H0; in sha1_init()
40 sctx->state[1] = SHA1_H1; in sha1_init()
41 sctx->state[2] = SHA1_H2; in sha1_init()
42 sctx->state[3] = SHA1_H3; in sha1_init()
43 sctx->state[4] = SHA1_H4; in sha1_init()
44 sctx->count = 0; in sha1_init()
45 sctx->func = KIMD_SHA_1; in sha1_init()
52 struct s390_sha_ctx *sctx = shash_desc_ctx(desc); in sha1_export() local
55 octx->count = sctx->count; in sha1_export()
[all …]
Daes_s390.c98 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); in setkey_fallback_cip() local
101 sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; in setkey_fallback_cip()
102 sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags & in setkey_fallback_cip()
105 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); in setkey_fallback_cip()
108 tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags & in setkey_fallback_cip()
117 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); in aes_set_key() local
127 sctx->key_len = key_len; in aes_set_key()
129 memcpy(sctx->key, in_key, key_len); in aes_set_key()
138 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); in aes_encrypt() local
140 if (unlikely(need_fallback(sctx->key_len))) { in aes_encrypt()
[all …]
Dsha512_s390.c46 struct s390_sha_ctx *sctx = shash_desc_ctx(desc); in sha512_export() local
49 octx->count[0] = sctx->count; in sha512_export()
51 memcpy(octx->state, sctx->state, sizeof(octx->state)); in sha512_export()
52 memcpy(octx->buf, sctx->buf, sizeof(octx->buf)); in sha512_export()
58 struct s390_sha_ctx *sctx = shash_desc_ctx(desc); in sha512_import() local
63 sctx->count = ictx->count[0]; in sha512_import()
65 memcpy(sctx->state, ictx->state, sizeof(ictx->state)); in sha512_import()
66 memcpy(sctx->buf, ictx->buf, sizeof(ictx->buf)); in sha512_import()
67 sctx->func = KIMD_SHA_512; in sha512_import()
/linux-4.4.14/drivers/crypto/nx/
Dnx-sha256.c51 struct sha256_state *sctx = shash_desc_ctx(desc); in nx_sha256_init() local
53 memset(sctx, 0, sizeof *sctx); in nx_sha256_init()
55 sctx->state[0] = __cpu_to_be32(SHA256_H0); in nx_sha256_init()
56 sctx->state[1] = __cpu_to_be32(SHA256_H1); in nx_sha256_init()
57 sctx->state[2] = __cpu_to_be32(SHA256_H2); in nx_sha256_init()
58 sctx->state[3] = __cpu_to_be32(SHA256_H3); in nx_sha256_init()
59 sctx->state[4] = __cpu_to_be32(SHA256_H4); in nx_sha256_init()
60 sctx->state[5] = __cpu_to_be32(SHA256_H5); in nx_sha256_init()
61 sctx->state[6] = __cpu_to_be32(SHA256_H6); in nx_sha256_init()
62 sctx->state[7] = __cpu_to_be32(SHA256_H7); in nx_sha256_init()
[all …]
Dnx-sha512.c51 struct sha512_state *sctx = shash_desc_ctx(desc); in nx_sha512_init() local
53 memset(sctx, 0, sizeof *sctx); in nx_sha512_init()
55 sctx->state[0] = __cpu_to_be64(SHA512_H0); in nx_sha512_init()
56 sctx->state[1] = __cpu_to_be64(SHA512_H1); in nx_sha512_init()
57 sctx->state[2] = __cpu_to_be64(SHA512_H2); in nx_sha512_init()
58 sctx->state[3] = __cpu_to_be64(SHA512_H3); in nx_sha512_init()
59 sctx->state[4] = __cpu_to_be64(SHA512_H4); in nx_sha512_init()
60 sctx->state[5] = __cpu_to_be64(SHA512_H5); in nx_sha512_init()
61 sctx->state[6] = __cpu_to_be64(SHA512_H6); in nx_sha512_init()
62 sctx->state[7] = __cpu_to_be64(SHA512_H7); in nx_sha512_init()
[all …]
Dnx-aes-xcbc.c172 struct xcbc_state *sctx = shash_desc_ctx(desc); in nx_xcbc_init() local
174 memset(sctx, 0, sizeof *sctx); in nx_xcbc_init()
183 struct xcbc_state *sctx = shash_desc_ctx(desc); in nx_xcbc_update() local
197 total = sctx->count + len; in nx_xcbc_update()
204 memcpy(sctx->buffer + sctx->count, data, len); in nx_xcbc_update()
205 sctx->count += len; in nx_xcbc_update()
216 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_xcbc_update()
242 if (sctx->count) { in nx_xcbc_update()
243 data_len = sctx->count; in nx_xcbc_update()
245 (u8 *) sctx->buffer, in nx_xcbc_update()
[all …]
/linux-4.4.14/arch/x86/crypto/
Dpoly1305_glue.c46 struct poly1305_simd_desc_ctx *sctx = shash_desc_ctx(desc); in poly1305_simd_init() local
48 sctx->uset = false; in poly1305_simd_init()
50 sctx->wset = false; in poly1305_simd_init()
70 struct poly1305_simd_desc_ctx *sctx; in poly1305_simd_blocks() local
74 sctx = container_of(dctx, struct poly1305_simd_desc_ctx, base); in poly1305_simd_blocks()
84 if (unlikely(!sctx->wset)) { in poly1305_simd_blocks()
85 if (!sctx->uset) { in poly1305_simd_blocks()
86 memcpy(sctx->u, dctx->r, sizeof(sctx->u)); in poly1305_simd_blocks()
87 poly1305_simd_mult(sctx->u, dctx->r); in poly1305_simd_blocks()
88 sctx->uset = true; in poly1305_simd_blocks()
[all …]
Dsha512_ssse3_glue.c50 struct sha512_state *sctx = shash_desc_ctx(desc); in sha512_update() local
53 (sctx->count[0] % SHA512_BLOCK_SIZE) + len < SHA512_BLOCK_SIZE) in sha512_update()
Dsha1_ssse3_glue.c40 struct sha1_state *sctx = shash_desc_ctx(desc); in sha1_update() local
43 (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE) in sha1_update()
Dsha256_ssse3_glue.c50 struct sha256_state *sctx = shash_desc_ctx(desc); in sha256_update() local
53 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in sha256_update()
/linux-4.4.14/drivers/staging/skein/
Dskein_generic.c44 struct skein_256_ctx *sctx = shash_desc_ctx(desc); in skein256_export() local
46 memcpy(out, sctx, sizeof(*sctx)); in skein256_export()
52 struct skein_256_ctx *sctx = shash_desc_ctx(desc); in skein256_import() local
54 memcpy(sctx, in, sizeof(*sctx)); in skein256_import()
79 struct skein_512_ctx *sctx = shash_desc_ctx(desc); in skein512_export() local
81 memcpy(out, sctx, sizeof(*sctx)); in skein512_export()
87 struct skein_512_ctx *sctx = shash_desc_ctx(desc); in skein512_import() local
89 memcpy(sctx, in, sizeof(*sctx)); in skein512_import()
114 struct skein_1024_ctx *sctx = shash_desc_ctx(desc); in skein1024_export() local
116 memcpy(out, sctx, sizeof(*sctx)); in skein1024_export()
[all …]
/linux-4.4.14/drivers/crypto/
Dpadlock-sha.c296 struct sha1_state *sctx = shash_desc_ctx(desc); in padlock_sha1_init_nano() local
298 *sctx = (struct sha1_state){ in padlock_sha1_init_nano()
308 struct sha1_state *sctx = shash_desc_ctx(desc); in padlock_sha1_update_nano() local
317 partial = sctx->count & 0x3f; in padlock_sha1_update_nano()
318 sctx->count += len; in padlock_sha1_update_nano()
321 memcpy(dst, (u8 *)(sctx->state), SHA1_DIGEST_SIZE); in padlock_sha1_update_nano()
328 memcpy(sctx->buffer + partial, data, in padlock_sha1_update_nano()
330 src = sctx->buffer; in padlock_sha1_update_nano()
353 memcpy((u8 *)(sctx->state), dst, SHA1_DIGEST_SIZE); in padlock_sha1_update_nano()
354 memcpy(sctx->buffer + partial, src, len - done); in padlock_sha1_update_nano()
[all …]
/linux-4.4.14/arch/arm64/crypto/
Dsha1-ce-glue.c38 struct sha1_ce_state *sctx = shash_desc_ctx(desc); in sha1_ce_update() local
40 sctx->finalize = 0; in sha1_ce_update()
52 struct sha1_ce_state *sctx = shash_desc_ctx(desc); in sha1_ce_finup() local
53 bool finalize = !sctx->sst.count && !(len % SHA1_BLOCK_SIZE); in sha1_ce_finup()
64 sctx->finalize = finalize; in sha1_ce_finup()
77 struct sha1_ce_state *sctx = shash_desc_ctx(desc); in sha1_ce_final() local
79 sctx->finalize = 0; in sha1_ce_final()
Dsha2-ce-glue.c38 struct sha256_ce_state *sctx = shash_desc_ctx(desc); in sha256_ce_update() local
40 sctx->finalize = 0; in sha256_ce_update()
52 struct sha256_ce_state *sctx = shash_desc_ctx(desc); in sha256_ce_finup() local
53 bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE); in sha256_ce_finup()
64 sctx->finalize = finalize; in sha256_ce_finup()
78 struct sha256_ce_state *sctx = shash_desc_ctx(desc); in sha256_ce_final() local
80 sctx->finalize = 0; in sha256_ce_final()
/linux-4.4.14/arch/x86/purgatory/
Dpurgatory.c46 struct sha256_state sctx; in verify_sha256_digest() local
48 sha256_init(&sctx); in verify_sha256_digest()
51 sha256_update(&sctx, (uint8_t *)(ptr->start), ptr->len); in verify_sha256_digest()
53 sha256_final(&sctx, digest); in verify_sha256_digest()
Dsha256.c211 int sha256_init(struct sha256_state *sctx) in sha256_init() argument
213 sctx->state[0] = SHA256_H0; in sha256_init()
214 sctx->state[1] = SHA256_H1; in sha256_init()
215 sctx->state[2] = SHA256_H2; in sha256_init()
216 sctx->state[3] = SHA256_H3; in sha256_init()
217 sctx->state[4] = SHA256_H4; in sha256_init()
218 sctx->state[5] = SHA256_H5; in sha256_init()
219 sctx->state[6] = SHA256_H6; in sha256_init()
220 sctx->state[7] = SHA256_H7; in sha256_init()
221 sctx->count = 0; in sha256_init()
[all …]
Dsha256.h17 extern int sha256_init(struct sha256_state *sctx);
18 extern int sha256_update(struct sha256_state *sctx, const u8 *input,
20 extern int sha256_final(struct sha256_state *sctx, u8 *hash);
/linux-4.4.14/arch/x86/crypto/sha-mb/
Dsha1_mb.c348 struct sha1_hash_ctx *sctx = shash_desc_ctx(desc); in sha1_mb_init() local
350 hash_ctx_init(sctx); in sha1_mb_init()
351 sctx->job.result_digest[0] = SHA1_H0; in sha1_mb_init()
352 sctx->job.result_digest[1] = SHA1_H1; in sha1_mb_init()
353 sctx->job.result_digest[2] = SHA1_H2; in sha1_mb_init()
354 sctx->job.result_digest[3] = SHA1_H3; in sha1_mb_init()
355 sctx->job.result_digest[4] = SHA1_H4; in sha1_mb_init()
356 sctx->total_length = 0; in sha1_mb_init()
357 sctx->partial_block_buffer_length = 0; in sha1_mb_init()
358 sctx->status = HASH_CTX_STS_IDLE; in sha1_mb_init()
[all …]
/linux-4.4.14/arch/arm/crypto/
Dsha1-ce-glue.c33 struct sha1_state *sctx = shash_desc_ctx(desc); in sha1_ce_update() local
36 (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE) in sha1_ce_update()
Dsha1_neon_glue.c40 struct sha1_state *sctx = shash_desc_ctx(desc); in sha1_neon_update() local
43 (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE) in sha1_neon_update()
Dsha256_neon_glue.c35 struct sha256_state *sctx = shash_desc_ctx(desc); in sha256_update() local
38 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in sha256_update()
Dsha512-neon-glue.c31 struct sha512_state *sctx = shash_desc_ctx(desc); in sha512_neon_update() local
34 (sctx->count[0] % SHA512_BLOCK_SIZE) + len < SHA512_BLOCK_SIZE) in sha512_neon_update()
Dsha2-ce-glue.c34 struct sha256_state *sctx = shash_desc_ctx(desc); in sha2_ce_update() local
37 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in sha2_ce_update()
/linux-4.4.14/drivers/staging/rtl8188eu/include/
Drtw_xmit.h197 void rtw_sctx_init(struct submit_ctx *sctx, int timeout_ms);
198 int rtw_sctx_wait(struct submit_ctx *sctx);
199 void rtw_sctx_done_err(struct submit_ctx **sctx, int status);
200 void rtw_sctx_done(struct submit_ctx **sctx);
212 struct submit_ctx *sctx; member
/linux-4.4.14/drivers/staging/rtl8188eu/core/
Drtw_xmit.c1244 if (pxmitbuf->sctx) { in rtw_alloc_xmitbuf_ext()
1246 rtw_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_BUF_ALLOC); in rtw_alloc_xmitbuf_ext()
1305 if (pxmitbuf->sctx) { in rtw_alloc_xmitbuf()
1307 rtw_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_BUF_ALLOC); in rtw_alloc_xmitbuf()
1324 if (pxmitbuf->sctx) { in rtw_free_xmitbuf()
1326 rtw_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_BUF_FREE); in rtw_free_xmitbuf()
2134 void rtw_sctx_init(struct submit_ctx *sctx, int timeout_ms) in rtw_sctx_init() argument
2136 sctx->timeout_ms = timeout_ms; in rtw_sctx_init()
2137 sctx->submit_time = jiffies; in rtw_sctx_init()
2138 init_completion(&sctx->done); in rtw_sctx_init()
[all …]
Drtw_mlme_ext.c248 struct submit_ctx sctx; in dump_mgntframe_and_wait() local
253 rtw_sctx_init(&sctx, timeout_ms); in dump_mgntframe_and_wait()
254 pxmitbuf->sctx = &sctx; in dump_mgntframe_and_wait()
259 ret = rtw_sctx_wait(&sctx); in dump_mgntframe_and_wait()
/linux-4.4.14/drivers/staging/rtl8723au/os_dep/
Dusb_ops_linux.c109 rtw23a_sctx_done_err(&pxmitbuf->sctx, in usb_write_port23a_complete()
138 rtw23a_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_TX_DENY); in rtl8723au_write_port()
184 rtw23a_sctx_done_err(&pxmitbuf->sctx, in rtl8723au_write_port()
/linux-4.4.14/drivers/staging/rtl8723au/core/
Drtw_xmit.c1317 if (pxmitbuf->sctx) { in rtw_alloc_xmitbuf23a_ext()
1319 rtw23a_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_BUF_ALLOC); in rtw_alloc_xmitbuf23a_ext()
1372 if (pxmitbuf->sctx) { in rtw_alloc_xmitbuf23a()
1374 rtw23a_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_BUF_ALLOC); in rtw_alloc_xmitbuf23a()
1393 if (pxmitbuf->sctx) { in rtw_free_xmitbuf23a()
1395 rtw23a_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_BUF_FREE); in rtw_free_xmitbuf23a()
2314 void rtw_sctx_init23a(struct submit_ctx *sctx, int timeout_ms) in rtw_sctx_init23a() argument
2316 sctx->timeout_ms = timeout_ms; in rtw_sctx_init23a()
2317 init_completion(&sctx->done); in rtw_sctx_init23a()
2318 sctx->status = RTW_SCTX_SUBMITTED; in rtw_sctx_init23a()
[all …]
Drtw_mlme_ext.c2270 struct submit_ctx sctx; in dump_mgntframe23a_and_wait() local
2276 rtw_sctx_init23a(&sctx, timeout_ms); in dump_mgntframe23a_and_wait()
2277 pxmitbuf->sctx = &sctx; in dump_mgntframe23a_and_wait()
2282 ret = rtw_sctx_wait23a(&sctx); in dump_mgntframe23a_and_wait()
2285 pxmitbuf->sctx = NULL; in dump_mgntframe23a_and_wait()
/linux-4.4.14/drivers/staging/rtl8723au/include/
Drtw_xmit.h196 void rtw_sctx_init23a(struct submit_ctx *sctx, int timeout_ms);
197 int rtw_sctx_wait23a(struct submit_ctx *sctx);
198 void rtw23a_sctx_done_err(struct submit_ctx **sctx, int status);
212 struct submit_ctx *sctx; member
/linux-4.4.14/arch/powerpc/perf/
Dcallchain.c352 struct sigcontext32 sctx; member
394 if (read_user_stack_32((unsigned int __user *) &sf->sctx.regs, &regs)) in sane_signal_32_frame()
/linux-4.4.14/drivers/staging/rtl8188eu/os_dep/
Dusb_ops_linux.c689 rtw_sctx_done_err(&pxmitbuf->sctx, in usb_write_port_complete()
718 rtw_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_TX_DENY); in usb_write_port()
764 rtw_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_WRITE_PORT_ERR); in usb_write_port()
/linux-4.4.14/arch/powerpc/kernel/
Dsignal_32.c219 struct sigcontext sctx; /* the sigcontext */ member
1430 sc = (struct sigcontext __user *) &frame->sctx; in handle_signal32()
1517 sc = &sf->sctx; in sys_sigreturn()
/linux-4.4.14/drivers/staging/rtl8723au/hal/
Drtl8723au_xmit.c352 rtw23a_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_UNKNOWN); in rtw_dump_xframe()
/linux-4.4.14/drivers/staging/rtl8188eu/hal/
Drtl8188eu_xmit.c409 rtw_sctx_done_err(&pxmitbuf->sctx, RTW_SCTX_DONE_UNKNOWN); in rtw_dump_xframe()