nvme_sq           256 drivers/nvme/host/trace.c EXPORT_TRACEPOINT_SYMBOL_GPL(nvme_sq);
nvme_sq           144 drivers/nvme/host/trace.h TRACE_EVENT(nvme_sq,
nvme_sq           129 drivers/nvme/target/fc.c 	struct nvmet_sq			nvme_sq;
nvme_sq           619 drivers/nvme/target/fc.c 	ret = nvmet_sq_init(&queue->nvme_sq);
nvme_sq           746 drivers/nvme/target/fc.c 		nvmet_sq_destroy(&queue->nvme_sq);
nvme_sq          1172 drivers/nvme/target/fc.c 			if (queue && queue->nvme_sq.ctrl == ctrl) {
nvme_sq          2198 drivers/nvme/target/fc.c 				&fod->queue->nvme_sq,
nvme_sq            54 drivers/nvme/target/loop.c 	struct nvmet_sq		nvme_sq;
nvme_sq            96 drivers/nvme/target/loop.c 		container_of(req->sq, struct nvme_loop_queue, nvme_sq);
nvme_sq           153 drivers/nvme/target/loop.c 			&queue->nvme_sq, &nvme_loop_ops))
nvme_sq           185 drivers/nvme/target/loop.c 	if (!nvmet_req_init(&iod->req, &queue->nvme_cq, &queue->nvme_sq,
nvme_sq           256 drivers/nvme/target/loop.c 	nvmet_sq_destroy(&ctrl->queues[0].nvme_sq);
nvme_sq           289 drivers/nvme/target/loop.c 		nvmet_sq_destroy(&ctrl->queues[i].nvme_sq);
nvme_sq           308 drivers/nvme/target/loop.c 		ret = nvmet_sq_init(&ctrl->queues[i].nvme_sq);
nvme_sq           353 drivers/nvme/target/loop.c 	error = nvmet_sq_init(&ctrl->queues[0].nvme_sq);
nvme_sq           403 drivers/nvme/target/loop.c 	nvmet_sq_destroy(&ctrl->queues[0].nvme_sq);
nvme_sq           280 drivers/nvme/target/nvmet.h 	u16 (*install_queue)(struct nvmet_sq *nvme_sq);
nvme_sq            85 drivers/nvme/target/rdma.c 	struct nvmet_sq		nvme_sq;
nvme_sq           522 drivers/nvme/target/rdma.c 	if (queue->nvme_sq.ctrl) {
nvme_sq           523 drivers/nvme/target/rdma.c 		nvmet_ctrl_fatal_error(queue->nvme_sq.ctrl);
nvme_sq           739 drivers/nvme/target/rdma.c 				queue->nvme_sq.ctrl->cntlid);
nvme_sq           768 drivers/nvme/target/rdma.c 			&queue->nvme_sq, &nvmet_rdma_ops))
nvme_sq          1068 drivers/nvme/target/rdma.c 	nvmet_sq_destroy(&queue->nvme_sq);
nvme_sq          1150 drivers/nvme/target/rdma.c 	ret = nvmet_sq_init(&queue->nvme_sq);
nvme_sq          1220 drivers/nvme/target/rdma.c 	nvmet_sq_destroy(&queue->nvme_sq);
nvme_sq          1491 drivers/nvme/target/rdma.c 		if (queue->nvme_sq.ctrl == ctrl) {
nvme_sq            89 drivers/nvme/target/tcp.c 	struct nvmet_sq		nvme_sq;
nvme_sq           312 drivers/nvme/target/tcp.c 	if (queue->nvme_sq.ctrl)
nvme_sq           313 drivers/nvme/target/tcp.c 		nvmet_ctrl_fatal_error(queue->nvme_sq.ctrl);
nvme_sq           374 drivers/nvme/target/tcp.c 	pdu->hdr.flags = NVME_TCP_F_DATA_LAST | (queue->nvme_sq.sqhd_disabled ?
nvme_sq           530 drivers/nvme/target/tcp.c 		    queue->data_digest || !queue->nvme_sq.sqhd_disabled)
nvme_sq           552 drivers/nvme/target/tcp.c 		if (queue->nvme_sq.sqhd_disabled) {
nvme_sq           560 drivers/nvme/target/tcp.c 	if (queue->nvme_sq.sqhd_disabled) {
nvme_sq           641 drivers/nvme/target/tcp.c 	if (queue->nvme_sq.sqhd_disabled) {
nvme_sq           909 drivers/nvme/target/tcp.c 			&queue->nvme_sq, &nvmet_tcp_ops))) {
nvme_sq          1348 drivers/nvme/target/tcp.c 	nvmet_sq_destroy(&queue->nvme_sq);
nvme_sq          1498 drivers/nvme/target/tcp.c 	ret = nvmet_sq_init(&queue->nvme_sq);
nvme_sq          1522 drivers/nvme/target/tcp.c 	nvmet_sq_destroy(&queue->nvme_sq);
nvme_sq          1679 drivers/nvme/target/tcp.c 		if (queue->nvme_sq.ctrl == ctrl)
nvme_sq          1687 drivers/nvme/target/tcp.c 		container_of(sq, struct nvmet_tcp_queue, nvme_sq);