kctx              197 drivers/crypto/chelsio/chtls/chtls_hw.c static void chtls_rxkey_ivauth(struct _key_ctx *kctx)
kctx              199 drivers/crypto/chelsio/chtls/chtls_hw.c 	kctx->iv_to_auth = cpu_to_be64(KEYCTX_TX_WR_IV_V(6ULL) |
kctx              210 drivers/crypto/chelsio/chtls/chtls_hw.c 			  struct _key_ctx *kctx,
kctx              251 drivers/crypto/chelsio/chtls/chtls_hw.c 		kctx->ctx_hdr = FILL_KEY_CRX_HDR(ck_size,
kctx              254 drivers/crypto/chelsio/chtls/chtls_hw.c 		chtls_rxkey_ivauth(kctx);
kctx              256 drivers/crypto/chelsio/chtls/chtls_hw.c 		kctx->ctx_hdr = FILL_KEY_CTX_HDR(ck_size,
kctx              261 drivers/crypto/chelsio/chtls/chtls_hw.c 	memcpy(kctx->salt, gcm_ctx->salt, TLS_CIPHER_AES_GCM_128_SALT_SIZE);
kctx              262 drivers/crypto/chelsio/chtls/chtls_hw.c 	memcpy(kctx->key, gcm_ctx->key, keylen);
kctx              263 drivers/crypto/chelsio/chtls/chtls_hw.c 	memcpy(kctx->key + keylen, ghash_h, AEAD_H_SIZE);
kctx              295 drivers/crypto/chelsio/chtls/chtls_hw.c 	struct _key_ctx *kctx;
kctx              306 drivers/crypto/chelsio/chtls/chtls_hw.c 	klen = roundup((keylen + AEAD_H_SIZE) + sizeof(*kctx), 32);
kctx              355 drivers/crypto/chelsio/chtls/chtls_hw.c 	kctx = (struct _key_ctx *)(kwr + 1);
kctx              356 drivers/crypto/chelsio/chtls/chtls_hw.c 	ret = chtls_key_info(csk, kctx, keylen, optname);
kctx               83 include/linux/sunrpc/gss_krb5.h 	u32 (*encrypt_v2) (struct krb5_ctx *kctx, u32 offset,
kctx               86 include/linux/sunrpc/gss_krb5.h 	u32 (*decrypt_v2) (struct krb5_ctx *kctx, u32 offset, u32 len,
kctx              238 include/linux/sunrpc/gss_krb5.h make_checksum(struct krb5_ctx *kctx, char *header, int hdrlen,
kctx              279 include/linux/sunrpc/gss_krb5.h krb5_make_seq_num(struct krb5_ctx *kctx,
kctx              285 include/linux/sunrpc/gss_krb5.h krb5_get_seq_num(struct krb5_ctx *kctx,
kctx              310 include/linux/sunrpc/gss_krb5.h gss_krb5_aes_encrypt(struct krb5_ctx *kctx, u32 offset,
kctx              315 include/linux/sunrpc/gss_krb5.h gss_krb5_aes_decrypt(struct krb5_ctx *kctx, u32 offset, u32 len,
kctx              320 include/linux/sunrpc/gss_krb5.h krb5_rc4_setup_seq_key(struct krb5_ctx *kctx,
kctx              325 include/linux/sunrpc/gss_krb5.h krb5_rc4_setup_enc_key(struct krb5_ctx *kctx,
kctx              165 net/sunrpc/auth_gss/gss_krb5_crypto.c make_checksum_hmac_md5(struct krb5_ctx *kctx, char *header, int hdrlen,
kctx              180 net/sunrpc/auth_gss/gss_krb5_crypto.c 	if (cksumout->len < kctx->gk5e->cksumlength) {
kctx              182 net/sunrpc/auth_gss/gss_krb5_crypto.c 			__func__, cksumout->len, kctx->gk5e->name);
kctx              203 net/sunrpc/auth_gss/gss_krb5_crypto.c 	hmac_md5 = crypto_alloc_ahash(kctx->gk5e->cksum_name, 0,
kctx              244 net/sunrpc/auth_gss/gss_krb5_crypto.c 	err = crypto_ahash_setkey(hmac_md5, cksumkey, kctx->gk5e->keylength);
kctx              255 net/sunrpc/auth_gss/gss_krb5_crypto.c 	memcpy(cksumout->data, checksumdata, kctx->gk5e->cksumlength);
kctx              256 net/sunrpc/auth_gss/gss_krb5_crypto.c 	cksumout->len = kctx->gk5e->cksumlength;
kctx              276 net/sunrpc/auth_gss/gss_krb5_crypto.c make_checksum(struct krb5_ctx *kctx, char *header, int hdrlen,
kctx              287 net/sunrpc/auth_gss/gss_krb5_crypto.c 	if (kctx->gk5e->ctype == CKSUMTYPE_HMAC_MD5_ARCFOUR)
kctx              288 net/sunrpc/auth_gss/gss_krb5_crypto.c 		return make_checksum_hmac_md5(kctx, header, hdrlen,
kctx              292 net/sunrpc/auth_gss/gss_krb5_crypto.c 	if (cksumout->len < kctx->gk5e->cksumlength) {
kctx              294 net/sunrpc/auth_gss/gss_krb5_crypto.c 			__func__, cksumout->len, kctx->gk5e->name);
kctx              302 net/sunrpc/auth_gss/gss_krb5_crypto.c 	tfm = crypto_alloc_ahash(kctx->gk5e->cksum_name, 0, CRYPTO_ALG_ASYNC);
kctx              316 net/sunrpc/auth_gss/gss_krb5_crypto.c 					  kctx->gk5e->keylength);
kctx              338 net/sunrpc/auth_gss/gss_krb5_crypto.c 	switch (kctx->gk5e->ctype) {
kctx              340 net/sunrpc/auth_gss/gss_krb5_crypto.c 		err = kctx->gk5e->encrypt(kctx->seq, NULL, checksumdata,
kctx              345 net/sunrpc/auth_gss/gss_krb5_crypto.c 		       checksumdata + checksumlen - kctx->gk5e->cksumlength,
kctx              346 net/sunrpc/auth_gss/gss_krb5_crypto.c 		       kctx->gk5e->cksumlength);
kctx              349 net/sunrpc/auth_gss/gss_krb5_crypto.c 		memcpy(cksumout->data, checksumdata, kctx->gk5e->cksumlength);
kctx              355 net/sunrpc/auth_gss/gss_krb5_crypto.c 	cksumout->len = kctx->gk5e->cksumlength;
kctx              373 net/sunrpc/auth_gss/gss_krb5_crypto.c make_checksum_v2(struct krb5_ctx *kctx, char *header, int hdrlen,
kctx              383 net/sunrpc/auth_gss/gss_krb5_crypto.c 	if (kctx->gk5e->keyed_cksum == 0) {
kctx              385 net/sunrpc/auth_gss/gss_krb5_crypto.c 			__func__, kctx->gk5e->name);
kctx              390 net/sunrpc/auth_gss/gss_krb5_crypto.c 			__func__, kctx->gk5e->name);
kctx              398 net/sunrpc/auth_gss/gss_krb5_crypto.c 	tfm = crypto_alloc_ahash(kctx->gk5e->cksum_name, 0, CRYPTO_ALG_ASYNC);
kctx              408 net/sunrpc/auth_gss/gss_krb5_crypto.c 	err = crypto_ahash_setkey(tfm, cksumkey, kctx->gk5e->keylength);
kctx              431 net/sunrpc/auth_gss/gss_krb5_crypto.c 	cksumout->len = kctx->gk5e->cksumlength;
kctx              433 net/sunrpc/auth_gss/gss_krb5_crypto.c 	switch (kctx->gk5e->ctype) {
kctx              437 net/sunrpc/auth_gss/gss_krb5_crypto.c 		memcpy(cksumout->data, checksumdata, kctx->gk5e->cksumlength);
kctx              733 net/sunrpc/auth_gss/gss_krb5_crypto.c gss_krb5_aes_encrypt(struct krb5_ctx *kctx, u32 offset,
kctx              748 net/sunrpc/auth_gss/gss_krb5_crypto.c 	if (kctx->initiate) {
kctx              749 net/sunrpc/auth_gss/gss_krb5_crypto.c 		cipher = kctx->initiator_enc;
kctx              750 net/sunrpc/auth_gss/gss_krb5_crypto.c 		aux_cipher = kctx->initiator_enc_aux;
kctx              751 net/sunrpc/auth_gss/gss_krb5_crypto.c 		cksumkey = kctx->initiator_integ;
kctx              754 net/sunrpc/auth_gss/gss_krb5_crypto.c 		cipher = kctx->acceptor_enc;
kctx              755 net/sunrpc/auth_gss/gss_krb5_crypto.c 		aux_cipher = kctx->acceptor_enc_aux;
kctx              756 net/sunrpc/auth_gss/gss_krb5_crypto.c 		cksumkey = kctx->acceptor_integ;
kctx              763 net/sunrpc/auth_gss/gss_krb5_crypto.c 	if (xdr_extend_head(buf, offset, kctx->gk5e->conflen))
kctx              765 net/sunrpc/auth_gss/gss_krb5_crypto.c 	gss_krb5_make_confounder(buf->head[0].iov_base + offset, kctx->gk5e->conflen);
kctx              796 net/sunrpc/auth_gss/gss_krb5_crypto.c 	err = make_checksum_v2(kctx, NULL, 0, buf,
kctx              844 net/sunrpc/auth_gss/gss_krb5_crypto.c 	buf->tail[0].iov_len += kctx->gk5e->cksumlength;
kctx              845 net/sunrpc/auth_gss/gss_krb5_crypto.c 	buf->len += kctx->gk5e->cksumlength;
kctx              854 net/sunrpc/auth_gss/gss_krb5_crypto.c gss_krb5_aes_decrypt(struct krb5_ctx *kctx, u32 offset, u32 len,
kctx              868 net/sunrpc/auth_gss/gss_krb5_crypto.c 	if (kctx->initiate) {
kctx              869 net/sunrpc/auth_gss/gss_krb5_crypto.c 		cipher = kctx->acceptor_enc;
kctx              870 net/sunrpc/auth_gss/gss_krb5_crypto.c 		aux_cipher = kctx->acceptor_enc_aux;
kctx              871 net/sunrpc/auth_gss/gss_krb5_crypto.c 		cksum_key = kctx->acceptor_integ;
kctx              874 net/sunrpc/auth_gss/gss_krb5_crypto.c 		cipher = kctx->initiator_enc;
kctx              875 net/sunrpc/auth_gss/gss_krb5_crypto.c 		aux_cipher = kctx->initiator_enc_aux;
kctx              876 net/sunrpc/auth_gss/gss_krb5_crypto.c 		cksum_key = kctx->initiator_integ;
kctx              885 net/sunrpc/auth_gss/gss_krb5_crypto.c 				     kctx->gk5e->cksumlength));
kctx              923 net/sunrpc/auth_gss/gss_krb5_crypto.c 	ret = make_checksum_v2(kctx, NULL, 0, &subbuf, 0,
kctx              929 net/sunrpc/auth_gss/gss_krb5_crypto.c 	ret = read_bytes_from_xdr_buf(buf, len - kctx->gk5e->cksumlength,
kctx              930 net/sunrpc/auth_gss/gss_krb5_crypto.c 				      pkt_hmac, kctx->gk5e->cksumlength);
kctx              934 net/sunrpc/auth_gss/gss_krb5_crypto.c 	if (crypto_memneq(pkt_hmac, our_hmac, kctx->gk5e->cksumlength) != 0) {
kctx              938 net/sunrpc/auth_gss/gss_krb5_crypto.c 	*headskip = kctx->gk5e->conflen;
kctx              939 net/sunrpc/auth_gss/gss_krb5_crypto.c 	*tailskip = kctx->gk5e->cksumlength;
kctx              951 net/sunrpc/auth_gss/gss_krb5_crypto.c krb5_rc4_setup_seq_key(struct krb5_ctx *kctx,
kctx              963 net/sunrpc/auth_gss/gss_krb5_crypto.c 	hmac = crypto_alloc_shash(kctx->gk5e->cksum_name, 0, 0);
kctx              966 net/sunrpc/auth_gss/gss_krb5_crypto.c 			__func__, PTR_ERR(hmac), kctx->gk5e->cksum_name);
kctx              974 net/sunrpc/auth_gss/gss_krb5_crypto.c 			__func__, kctx->gk5e->cksum_name);
kctx              982 net/sunrpc/auth_gss/gss_krb5_crypto.c 	err = crypto_shash_setkey(hmac, kctx->Ksess, kctx->gk5e->keylength);
kctx              991 net/sunrpc/auth_gss/gss_krb5_crypto.c 	err = crypto_shash_setkey(hmac, Kseq, kctx->gk5e->keylength);
kctx              999 net/sunrpc/auth_gss/gss_krb5_crypto.c 	err = crypto_sync_skcipher_setkey(cipher, Kseq, kctx->gk5e->keylength);
kctx             1017 net/sunrpc/auth_gss/gss_krb5_crypto.c krb5_rc4_setup_enc_key(struct krb5_ctx *kctx,
kctx             1030 net/sunrpc/auth_gss/gss_krb5_crypto.c 	hmac = crypto_alloc_shash(kctx->gk5e->cksum_name, 0, 0);
kctx             1033 net/sunrpc/auth_gss/gss_krb5_crypto.c 			__func__, PTR_ERR(hmac), kctx->gk5e->cksum_name);
kctx             1041 net/sunrpc/auth_gss/gss_krb5_crypto.c 			__func__, kctx->gk5e->cksum_name);
kctx             1049 net/sunrpc/auth_gss/gss_krb5_crypto.c 	for (i = 0; i < kctx->gk5e->keylength; i++)
kctx             1050 net/sunrpc/auth_gss/gss_krb5_crypto.c 		Kcrypt[i] = kctx->Ksess[i] ^ 0xf0;
kctx             1052 net/sunrpc/auth_gss/gss_krb5_crypto.c 	err = crypto_shash_setkey(hmac, Kcrypt, kctx->gk5e->keylength);
kctx             1061 net/sunrpc/auth_gss/gss_krb5_crypto.c 	err = crypto_shash_setkey(hmac, Kcrypt, kctx->gk5e->keylength);
kctx             1075 net/sunrpc/auth_gss/gss_krb5_crypto.c 					  kctx->gk5e->keylength);
kctx              691 net/sunrpc/auth_gss/gss_krb5_mech.c 	struct krb5_ctx *kctx = internal_ctx;
kctx              693 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(kctx->seq);
kctx              694 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(kctx->enc);
kctx              695 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(kctx->acceptor_enc);
kctx              696 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(kctx->initiator_enc);
kctx              697 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(kctx->acceptor_enc_aux);
kctx              698 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(kctx->initiator_enc_aux);
kctx              699 net/sunrpc/auth_gss/gss_krb5_mech.c 	kfree(kctx->mech_used.data);
kctx              700 net/sunrpc/auth_gss/gss_krb5_mech.c 	kfree(kctx);
kctx               43 net/sunrpc/auth_gss/gss_krb5_seqnum.c krb5_make_rc4_seq_num(struct krb5_ctx *kctx, int direction, s32 seqnum,
kctx               51 net/sunrpc/auth_gss/gss_krb5_seqnum.c 	cipher = crypto_alloc_sync_skcipher(kctx->gk5e->encrypt_name, 0, 0);
kctx               68 net/sunrpc/auth_gss/gss_krb5_seqnum.c 	code = krb5_rc4_setup_seq_key(kctx, cipher, cksum);
kctx               79 net/sunrpc/auth_gss/gss_krb5_seqnum.c krb5_make_seq_num(struct krb5_ctx *kctx,
kctx               88 net/sunrpc/auth_gss/gss_krb5_seqnum.c 	if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC)
kctx               89 net/sunrpc/auth_gss/gss_krb5_seqnum.c 		return krb5_make_rc4_seq_num(kctx, direction, seqnum,
kctx              112 net/sunrpc/auth_gss/gss_krb5_seqnum.c krb5_get_rc4_seq_num(struct krb5_ctx *kctx, unsigned char *cksum,
kctx              120 net/sunrpc/auth_gss/gss_krb5_seqnum.c 	cipher = crypto_alloc_sync_skcipher(kctx->gk5e->encrypt_name, 0, 0);
kctx              124 net/sunrpc/auth_gss/gss_krb5_seqnum.c 	code = krb5_rc4_setup_seq_key(kctx, cipher, cksum);
kctx              156 net/sunrpc/auth_gss/gss_krb5_seqnum.c krb5_get_seq_num(struct krb5_ctx *kctx,
kctx              163 net/sunrpc/auth_gss/gss_krb5_seqnum.c 	struct crypto_sync_skcipher *key = kctx->seq;
kctx              167 net/sunrpc/auth_gss/gss_krb5_seqnum.c 	if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC)
kctx              168 net/sunrpc/auth_gss/gss_krb5_seqnum.c 		return krb5_get_rc4_seq_num(kctx, cksum, buf,
kctx              158 net/sunrpc/auth_gss/gss_krb5_wrap.c gss_wrap_kerberos_v1(struct krb5_ctx *kctx, int offset,
kctx              171 net/sunrpc/auth_gss/gss_krb5_wrap.c 	u32			conflen = kctx->gk5e->conflen;
kctx              177 net/sunrpc/auth_gss/gss_krb5_wrap.c 	blocksize = crypto_sync_skcipher_blocksize(kctx->enc);
kctx              182 net/sunrpc/auth_gss/gss_krb5_wrap.c 	headlen = g_token_size(&kctx->mech_used,
kctx              183 net/sunrpc/auth_gss/gss_krb5_wrap.c 		GSS_KRB5_TOK_HDR_LEN + kctx->gk5e->cksumlength + plainlen) -
kctx              193 net/sunrpc/auth_gss/gss_krb5_wrap.c 	g_make_token_header(&kctx->mech_used,
kctx              195 net/sunrpc/auth_gss/gss_krb5_wrap.c 				kctx->gk5e->cksumlength + plainlen, &ptr);
kctx              202 net/sunrpc/auth_gss/gss_krb5_wrap.c 	msg_start = ptr + GSS_KRB5_TOK_HDR_LEN + kctx->gk5e->cksumlength;
kctx              209 net/sunrpc/auth_gss/gss_krb5_wrap.c 	*(__le16 *)(ptr + 2) = cpu_to_le16(kctx->gk5e->signalg);
kctx              210 net/sunrpc/auth_gss/gss_krb5_wrap.c 	*(__le16 *)(ptr + 4) = cpu_to_le16(kctx->gk5e->sealalg);
kctx              216 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (kctx->gk5e->keyed_cksum)
kctx              217 net/sunrpc/auth_gss/gss_krb5_wrap.c 		cksumkey = kctx->cksum;
kctx              224 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (make_checksum(kctx, ptr, 8, buf, offset + headlen - conflen,
kctx              231 net/sunrpc/auth_gss/gss_krb5_wrap.c 	seq_send = atomic_fetch_inc(&kctx->seq_send);
kctx              235 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if ((krb5_make_seq_num(kctx, kctx->seq, kctx->initiate ? 0 : 0xff,
kctx              239 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC) {
kctx              242 net/sunrpc/auth_gss/gss_krb5_wrap.c 		cipher = crypto_alloc_sync_skcipher(kctx->gk5e->encrypt_name,
kctx              247 net/sunrpc/auth_gss/gss_krb5_wrap.c 		krb5_rc4_setup_enc_key(kctx, cipher, seq_send);
kctx              255 net/sunrpc/auth_gss/gss_krb5_wrap.c 		if (gss_encrypt_xdr_buf(kctx->enc, buf,
kctx              260 net/sunrpc/auth_gss/gss_krb5_wrap.c 	return (kctx->endtime < now) ? GSS_S_CONTEXT_EXPIRED : GSS_S_COMPLETE;
kctx              264 net/sunrpc/auth_gss/gss_krb5_wrap.c gss_unwrap_kerberos_v1(struct krb5_ctx *kctx, int offset, int len,
kctx              281 net/sunrpc/auth_gss/gss_krb5_wrap.c 	u32			conflen = kctx->gk5e->conflen;
kctx              289 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (g_verify_token_header(&kctx->mech_used, &bodysize, &ptr,
kctx              302 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (signalg != kctx->gk5e->signalg)
kctx              306 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (sealalg != kctx->gk5e->sealalg)
kctx              316 net/sunrpc/auth_gss/gss_krb5_wrap.c 	crypt_offset = ptr + (GSS_KRB5_TOK_HDR_LEN + kctx->gk5e->cksumlength) -
kctx              322 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (krb5_get_seq_num(kctx, ptr + GSS_KRB5_TOK_HDR_LEN,
kctx              326 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if ((kctx->initiate && direction != 0xff) ||
kctx              327 net/sunrpc/auth_gss/gss_krb5_wrap.c 	    (!kctx->initiate && direction != 0))
kctx              331 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (kctx->enctype == ENCTYPE_ARCFOUR_HMAC) {
kctx              335 net/sunrpc/auth_gss/gss_krb5_wrap.c 		cipher = crypto_alloc_sync_skcipher(kctx->gk5e->encrypt_name,
kctx              340 net/sunrpc/auth_gss/gss_krb5_wrap.c 		krb5_rc4_setup_enc_key(kctx, cipher, seqnum);
kctx              347 net/sunrpc/auth_gss/gss_krb5_wrap.c 		if (gss_decrypt_xdr_buf(kctx->enc, buf, crypt_offset))
kctx              351 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (kctx->gk5e->keyed_cksum)
kctx              352 net/sunrpc/auth_gss/gss_krb5_wrap.c 		cksumkey = kctx->cksum;
kctx              356 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (make_checksum(kctx, ptr, 8, buf, crypt_offset,
kctx              361 net/sunrpc/auth_gss/gss_krb5_wrap.c 						kctx->gk5e->cksumlength))
kctx              368 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (now > kctx->endtime)
kctx              376 net/sunrpc/auth_gss/gss_krb5_wrap.c 	blocksize = crypto_sync_skcipher_blocksize(kctx->enc);
kctx              377 net/sunrpc/auth_gss/gss_krb5_wrap.c 	data_start = ptr + (GSS_KRB5_TOK_HDR_LEN + kctx->gk5e->cksumlength) +
kctx              446 net/sunrpc/auth_gss/gss_krb5_wrap.c gss_wrap_kerberos_v2(struct krb5_ctx *kctx, u32 offset,
kctx              458 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (kctx->gk5e->encrypt_v2 == NULL)
kctx              470 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if ((kctx->flags & KRB5_CTX_FLAG_INITIATOR) == 0)
kctx              472 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if ((kctx->flags & KRB5_CTX_FLAG_ACCEPTOR_SUBKEY) != 0)
kctx              486 net/sunrpc/auth_gss/gss_krb5_wrap.c 	*be64ptr = cpu_to_be64(atomic64_fetch_inc(&kctx->seq_send64));
kctx              488 net/sunrpc/auth_gss/gss_krb5_wrap.c 	err = (*kctx->gk5e->encrypt_v2)(kctx, offset, buf, pages);
kctx              493 net/sunrpc/auth_gss/gss_krb5_wrap.c 	return (kctx->endtime < now) ? GSS_S_CONTEXT_EXPIRED : GSS_S_COMPLETE;
kctx              497 net/sunrpc/auth_gss/gss_krb5_wrap.c gss_unwrap_kerberos_v2(struct krb5_ctx *kctx, int offset, int len,
kctx              513 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (kctx->gk5e->decrypt_v2 == NULL)
kctx              522 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if ((!kctx->initiate && (flags & KG2_TOKEN_FLAG_SENTBYACCEPTOR)) ||
kctx              523 net/sunrpc/auth_gss/gss_krb5_wrap.c 	    (kctx->initiate && !(flags & KG2_TOKEN_FLAG_SENTBYACCEPTOR)))
kctx              545 net/sunrpc/auth_gss/gss_krb5_wrap.c 	err = (*kctx->gk5e->decrypt_v2)(kctx, offset, len, buf,
kctx              571 net/sunrpc/auth_gss/gss_krb5_wrap.c 	if (now > kctx->endtime)
kctx              601 net/sunrpc/auth_gss/gss_krb5_wrap.c 	struct krb5_ctx	*kctx = gctx->internal_ctx_id;
kctx              603 net/sunrpc/auth_gss/gss_krb5_wrap.c 	switch (kctx->enctype) {
kctx              609 net/sunrpc/auth_gss/gss_krb5_wrap.c 		return gss_wrap_kerberos_v1(kctx, offset, buf, pages);
kctx              612 net/sunrpc/auth_gss/gss_krb5_wrap.c 		return gss_wrap_kerberos_v2(kctx, offset, buf, pages);
kctx              620 net/sunrpc/auth_gss/gss_krb5_wrap.c 	struct krb5_ctx	*kctx = gctx->internal_ctx_id;
kctx              622 net/sunrpc/auth_gss/gss_krb5_wrap.c 	switch (kctx->enctype) {
kctx              628 net/sunrpc/auth_gss/gss_krb5_wrap.c 		return gss_unwrap_kerberos_v1(kctx, offset, len, buf,
kctx              632 net/sunrpc/auth_gss/gss_krb5_wrap.c 		return gss_unwrap_kerberos_v2(kctx, offset, len, buf,