u_ctx             727 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
u_ctx             728 drivers/crypto/chelsio/chcr_algo.c 	int qid = u_ctx->lldi.rxq_ids[ctx->rx_qidx];
u_ctx            1115 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
u_ctx            1165 drivers/crypto/chelsio/chcr_algo.c 	wrparam.qid = u_ctx->lldi.rxq_ids[c_ctx(tfm)->rx_qidx];
u_ctx            1174 drivers/crypto/chelsio/chcr_algo.c 	skb->dev = u_ctx->lldi.ports[0];
u_ctx            1305 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
u_ctx            1310 drivers/crypto/chelsio/chcr_algo.c 	if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
u_ctx            1319 drivers/crypto/chelsio/chcr_algo.c 	err = process_cipher(req, u_ctx->lldi.rxq_ids[c_ctx(tfm)->rx_qidx],
u_ctx            1323 drivers/crypto/chelsio/chcr_algo.c 	skb->dev = u_ctx->lldi.ports[0];
u_ctx            1335 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
u_ctx            1344 drivers/crypto/chelsio/chcr_algo.c 	if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
u_ctx            1351 drivers/crypto/chelsio/chcr_algo.c 	err = process_cipher(req, u_ctx->lldi.rxq_ids[c_ctx(tfm)->rx_qidx],
u_ctx            1355 drivers/crypto/chelsio/chcr_algo.c 	skb->dev = u_ctx->lldi.ports[0];
u_ctx            1363 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = NULL;
u_ctx            1370 drivers/crypto/chelsio/chcr_algo.c 		u_ctx = assign_chcr_device();
u_ctx            1371 drivers/crypto/chelsio/chcr_algo.c 		if (!u_ctx) {
u_ctx            1376 drivers/crypto/chelsio/chcr_algo.c 		ctx->dev = &u_ctx->dev;
u_ctx            1377 drivers/crypto/chelsio/chcr_algo.c 		ntxq = u_ctx->lldi.ntxq;
u_ctx            1378 drivers/crypto/chelsio/chcr_algo.c 		rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan;
u_ctx            1379 drivers/crypto/chelsio/chcr_algo.c 		txq_perchan = ntxq / u_ctx->lldi.nchan;
u_ctx            1498 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm));
u_ctx            1557 drivers/crypto/chelsio/chcr_algo.c 			dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr,
u_ctx            1559 drivers/crypto/chelsio/chcr_algo.c 		if (dma_mapping_error(&u_ctx->lldi.pdev->dev,
u_ctx            1587 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = NULL;
u_ctx            1596 drivers/crypto/chelsio/chcr_algo.c 	u_ctx = ULD_CTX(h_ctx(rtfm));
u_ctx            1613 drivers/crypto/chelsio/chcr_algo.c 	if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
u_ctx            1623 drivers/crypto/chelsio/chcr_algo.c 	error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
u_ctx            1660 drivers/crypto/chelsio/chcr_algo.c 	skb->dev = u_ctx->lldi.ports[0];
u_ctx            1666 drivers/crypto/chelsio/chcr_algo.c 	chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
u_ctx            1689 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = NULL;
u_ctx            1698 drivers/crypto/chelsio/chcr_algo.c 	u_ctx = ULD_CTX(h_ctx(rtfm));
u_ctx            1737 drivers/crypto/chelsio/chcr_algo.c 	skb->dev = u_ctx->lldi.ports[0];
u_ctx            1751 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = NULL;
u_ctx            1758 drivers/crypto/chelsio/chcr_algo.c 	u_ctx = ULD_CTX(h_ctx(rtfm));
u_ctx            1763 drivers/crypto/chelsio/chcr_algo.c 	if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
u_ctx            1772 drivers/crypto/chelsio/chcr_algo.c 	error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
u_ctx            1826 drivers/crypto/chelsio/chcr_algo.c 	skb->dev = u_ctx->lldi.ports[0];
u_ctx            1832 drivers/crypto/chelsio/chcr_algo.c 	chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
u_ctx            1843 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = NULL;
u_ctx            1855 drivers/crypto/chelsio/chcr_algo.c 	u_ctx = ULD_CTX(h_ctx(rtfm));
u_ctx            1856 drivers/crypto/chelsio/chcr_algo.c 	if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
u_ctx            1866 drivers/crypto/chelsio/chcr_algo.c 	error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
u_ctx            1917 drivers/crypto/chelsio/chcr_algo.c 	skb->dev = u_ctx->lldi.ports[0];
u_ctx            1922 drivers/crypto/chelsio/chcr_algo.c 	chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
u_ctx            1933 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = NULL;
u_ctx            1940 drivers/crypto/chelsio/chcr_algo.c 	u_ctx = ULD_CTX(h_ctx(rtfm));
u_ctx            1979 drivers/crypto/chelsio/chcr_algo.c 	skb->dev = u_ctx->lldi.ports[0];
u_ctx            1995 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm));
u_ctx            2008 drivers/crypto/chelsio/chcr_algo.c 		dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr,
u_ctx            2035 drivers/crypto/chelsio/chcr_algo.c 		chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
u_ctx            2257 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm));
u_ctx            2259 drivers/crypto/chelsio/chcr_algo.c 	chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op);
u_ctx            3609 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx;
u_ctx            3627 drivers/crypto/chelsio/chcr_algo.c 	u_ctx = ULD_CTX(a_ctx(tfm));
u_ctx            3628 drivers/crypto/chelsio/chcr_algo.c 	if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
u_ctx            3638 drivers/crypto/chelsio/chcr_algo.c 	skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[a_ctx(tfm)->rx_qidx], size);
u_ctx            3645 drivers/crypto/chelsio/chcr_algo.c 	skb->dev = u_ctx->lldi.ports[0];
u_ctx              78 drivers/crypto/chelsio/chcr_core.c 	struct uld_ctx *u_ctx = NULL;
u_ctx              88 drivers/crypto/chelsio/chcr_core.c 		u_ctx = drv_data.last_dev;
u_ctx              97 drivers/crypto/chelsio/chcr_core.c 	return u_ctx;
u_ctx             100 drivers/crypto/chelsio/chcr_core.c static void chcr_dev_add(struct uld_ctx *u_ctx)
u_ctx             104 drivers/crypto/chelsio/chcr_core.c 	dev = &u_ctx->dev;
u_ctx             108 drivers/crypto/chelsio/chcr_core.c 	list_move(&u_ctx->entry, &drv_data.act_dev);
u_ctx             110 drivers/crypto/chelsio/chcr_core.c 		drv_data.last_dev = u_ctx;
u_ctx             114 drivers/crypto/chelsio/chcr_core.c static void chcr_dev_init(struct uld_ctx *u_ctx)
u_ctx             118 drivers/crypto/chelsio/chcr_core.c 	dev = &u_ctx->dev;
u_ctx             127 drivers/crypto/chelsio/chcr_core.c 	list_add_tail(&u_ctx->entry, &drv_data.inact_dev);
u_ctx             131 drivers/crypto/chelsio/chcr_core.c static int chcr_dev_move(struct uld_ctx *u_ctx)
u_ctx             136 drivers/crypto/chelsio/chcr_core.c 	if (drv_data.last_dev == u_ctx) {
u_ctx             144 drivers/crypto/chelsio/chcr_core.c 	list_move(&u_ctx->entry, &drv_data.inact_dev);
u_ctx             147 drivers/crypto/chelsio/chcr_core.c 	adap = padap(&u_ctx->dev);
u_ctx             192 drivers/crypto/chelsio/chcr_core.c 	struct uld_ctx *u_ctx;
u_ctx             199 drivers/crypto/chelsio/chcr_core.c 	u_ctx = kzalloc(sizeof(*u_ctx), GFP_KERNEL);
u_ctx             200 drivers/crypto/chelsio/chcr_core.c 	if (!u_ctx) {
u_ctx             201 drivers/crypto/chelsio/chcr_core.c 		u_ctx = ERR_PTR(-ENOMEM);
u_ctx             204 drivers/crypto/chelsio/chcr_core.c 	u_ctx->lldi = *lld;
u_ctx             205 drivers/crypto/chelsio/chcr_core.c 	chcr_dev_init(u_ctx);
u_ctx             211 drivers/crypto/chelsio/chcr_core.c 	return u_ctx;
u_ctx             217 drivers/crypto/chelsio/chcr_core.c 	struct uld_ctx *u_ctx = (struct uld_ctx *)handle;
u_ctx             218 drivers/crypto/chelsio/chcr_core.c 	struct chcr_dev *dev = &u_ctx->dev;
u_ctx             240 drivers/crypto/chelsio/chcr_core.c static void chcr_detach_device(struct uld_ctx *u_ctx)
u_ctx             242 drivers/crypto/chelsio/chcr_core.c 	struct chcr_dev *dev = &u_ctx->dev;
u_ctx             255 drivers/crypto/chelsio/chcr_core.c 	chcr_dev_move(u_ctx);
u_ctx             260 drivers/crypto/chelsio/chcr_core.c 	struct uld_ctx *u_ctx = handle;
u_ctx             265 drivers/crypto/chelsio/chcr_core.c 		if (u_ctx->dev.state != CHCR_INIT) {
u_ctx             269 drivers/crypto/chelsio/chcr_core.c 		chcr_dev_add(u_ctx);
u_ctx             274 drivers/crypto/chelsio/chcr_core.c 		chcr_detach_device(u_ctx);
u_ctx             299 drivers/crypto/chelsio/chcr_core.c 	struct uld_ctx *u_ctx, *tmp;
u_ctx             306 drivers/crypto/chelsio/chcr_core.c 	list_for_each_entry_safe(u_ctx, tmp, &drv_data.act_dev, entry) {
u_ctx             307 drivers/crypto/chelsio/chcr_core.c 		list_del(&u_ctx->entry);
u_ctx             308 drivers/crypto/chelsio/chcr_core.c 		kfree(u_ctx);
u_ctx             310 drivers/crypto/chelsio/chcr_core.c 	list_for_each_entry_safe(u_ctx, tmp, &drv_data.inact_dev, entry) {
u_ctx             311 drivers/crypto/chelsio/chcr_core.c 		list_del(&u_ctx->entry);
u_ctx             312 drivers/crypto/chelsio/chcr_core.c 		kfree(u_ctx);
u_ctx             209 drivers/crypto/chelsio/chcr_core.h 	struct uld_ctx *u_ctx = container_of(dev, struct uld_ctx, dev);
u_ctx             211 drivers/crypto/chelsio/chcr_core.h 	return pci_get_drvdata(u_ctx->lldi.pdev);