nvmeq             202 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq;
nvmeq             271 drivers/nvme/host/pci.c 			    struct nvme_queue *nvmeq, int qid)
nvmeq             276 drivers/nvme/host/pci.c 	nvmeq->dbbuf_sq_db = &dev->dbbuf_dbs[sq_idx(qid, dev->db_stride)];
nvmeq             277 drivers/nvme/host/pci.c 	nvmeq->dbbuf_cq_db = &dev->dbbuf_dbs[cq_idx(qid, dev->db_stride)];
nvmeq             278 drivers/nvme/host/pci.c 	nvmeq->dbbuf_sq_ei = &dev->dbbuf_eis[sq_idx(qid, dev->db_stride)];
nvmeq             279 drivers/nvme/host/pci.c 	nvmeq->dbbuf_cq_ei = &dev->dbbuf_eis[cq_idx(qid, dev->db_stride)];
nvmeq             375 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = &dev->queues[0];
nvmeq             380 drivers/nvme/host/pci.c 	hctx->driver_data = nvmeq;
nvmeq             388 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = &dev->queues[hctx_idx + 1];
nvmeq             391 drivers/nvme/host/pci.c 	hctx->driver_data = nvmeq;
nvmeq             401 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = &dev->queues[queue_idx];
nvmeq             403 drivers/nvme/host/pci.c 	BUG_ON(!nvmeq);
nvmeq             404 drivers/nvme/host/pci.c 	iod->nvmeq = nvmeq;
nvmeq             453 drivers/nvme/host/pci.c static inline void nvme_write_sq_db(struct nvme_queue *nvmeq, bool write_sq)
nvmeq             456 drivers/nvme/host/pci.c 		u16 next_tail = nvmeq->sq_tail + 1;
nvmeq             458 drivers/nvme/host/pci.c 		if (next_tail == nvmeq->q_depth)
nvmeq             460 drivers/nvme/host/pci.c 		if (next_tail != nvmeq->last_sq_tail)
nvmeq             464 drivers/nvme/host/pci.c 	if (nvme_dbbuf_update_and_check_event(nvmeq->sq_tail,
nvmeq             465 drivers/nvme/host/pci.c 			nvmeq->dbbuf_sq_db, nvmeq->dbbuf_sq_ei))
nvmeq             466 drivers/nvme/host/pci.c 		writel(nvmeq->sq_tail, nvmeq->q_db);
nvmeq             467 drivers/nvme/host/pci.c 	nvmeq->last_sq_tail = nvmeq->sq_tail;
nvmeq             476 drivers/nvme/host/pci.c static void nvme_submit_cmd(struct nvme_queue *nvmeq, struct nvme_command *cmd,
nvmeq             479 drivers/nvme/host/pci.c 	spin_lock(&nvmeq->sq_lock);
nvmeq             480 drivers/nvme/host/pci.c 	memcpy(nvmeq->sq_cmds + (nvmeq->sq_tail << nvmeq->sqes),
nvmeq             482 drivers/nvme/host/pci.c 	if (++nvmeq->sq_tail == nvmeq->q_depth)
nvmeq             483 drivers/nvme/host/pci.c 		nvmeq->sq_tail = 0;
nvmeq             484 drivers/nvme/host/pci.c 	nvme_write_sq_db(nvmeq, write_sq);
nvmeq             485 drivers/nvme/host/pci.c 	spin_unlock(&nvmeq->sq_lock);
nvmeq             490 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = hctx->driver_data;
nvmeq             492 drivers/nvme/host/pci.c 	spin_lock(&nvmeq->sq_lock);
nvmeq             493 drivers/nvme/host/pci.c 	if (nvmeq->sq_tail != nvmeq->last_sq_tail)
nvmeq             494 drivers/nvme/host/pci.c 		nvme_write_sq_db(nvmeq, true);
nvmeq             495 drivers/nvme/host/pci.c 	spin_unlock(&nvmeq->sq_lock);
nvmeq             517 drivers/nvme/host/pci.c 	if (!iod->nvmeq->qid)
nvmeq             810 drivers/nvme/host/pci.c 			if (iod->nvmeq->qid &&
nvmeq             866 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = hctx->driver_data;
nvmeq             867 drivers/nvme/host/pci.c 	struct nvme_dev *dev = nvmeq->dev;
nvmeq             881 drivers/nvme/host/pci.c 	if (unlikely(!test_bit(NVMEQ_ENABLED, &nvmeq->flags)))
nvmeq             901 drivers/nvme/host/pci.c 	nvme_submit_cmd(nvmeq, &cmnd, bd->last);
nvmeq             913 drivers/nvme/host/pci.c 	struct nvme_dev *dev = iod->nvmeq->dev;
nvmeq             925 drivers/nvme/host/pci.c static inline bool nvme_cqe_pending(struct nvme_queue *nvmeq)
nvmeq             927 drivers/nvme/host/pci.c 	return (le16_to_cpu(nvmeq->cqes[nvmeq->cq_head].status) & 1) ==
nvmeq             928 drivers/nvme/host/pci.c 			nvmeq->cq_phase;
nvmeq             931 drivers/nvme/host/pci.c static inline void nvme_ring_cq_doorbell(struct nvme_queue *nvmeq)
nvmeq             933 drivers/nvme/host/pci.c 	u16 head = nvmeq->cq_head;
nvmeq             935 drivers/nvme/host/pci.c 	if (nvme_dbbuf_update_and_check_event(head, nvmeq->dbbuf_cq_db,
nvmeq             936 drivers/nvme/host/pci.c 					      nvmeq->dbbuf_cq_ei))
nvmeq             937 drivers/nvme/host/pci.c 		writel(head, nvmeq->q_db + nvmeq->dev->db_stride);
nvmeq             940 drivers/nvme/host/pci.c static inline struct blk_mq_tags *nvme_queue_tagset(struct nvme_queue *nvmeq)
nvmeq             942 drivers/nvme/host/pci.c 	if (!nvmeq->qid)
nvmeq             943 drivers/nvme/host/pci.c 		return nvmeq->dev->admin_tagset.tags[0];
nvmeq             944 drivers/nvme/host/pci.c 	return nvmeq->dev->tagset.tags[nvmeq->qid - 1];
nvmeq             947 drivers/nvme/host/pci.c static inline void nvme_handle_cqe(struct nvme_queue *nvmeq, u16 idx)
nvmeq             949 drivers/nvme/host/pci.c 	volatile struct nvme_completion *cqe = &nvmeq->cqes[idx];
nvmeq             952 drivers/nvme/host/pci.c 	if (unlikely(cqe->command_id >= nvmeq->q_depth)) {
nvmeq             953 drivers/nvme/host/pci.c 		dev_warn(nvmeq->dev->ctrl.device,
nvmeq             965 drivers/nvme/host/pci.c 	if (unlikely(nvmeq->qid == 0 &&
nvmeq             967 drivers/nvme/host/pci.c 		nvme_complete_async_event(&nvmeq->dev->ctrl,
nvmeq             972 drivers/nvme/host/pci.c 	req = blk_mq_tag_to_rq(nvme_queue_tagset(nvmeq), cqe->command_id);
nvmeq             973 drivers/nvme/host/pci.c 	trace_nvme_sq(req, cqe->sq_head, nvmeq->sq_tail);
nvmeq             977 drivers/nvme/host/pci.c static void nvme_complete_cqes(struct nvme_queue *nvmeq, u16 start, u16 end)
nvmeq             980 drivers/nvme/host/pci.c 		nvme_handle_cqe(nvmeq, start);
nvmeq             981 drivers/nvme/host/pci.c 		if (++start == nvmeq->q_depth)
nvmeq             986 drivers/nvme/host/pci.c static inline void nvme_update_cq_head(struct nvme_queue *nvmeq)
nvmeq             988 drivers/nvme/host/pci.c 	if (nvmeq->cq_head == nvmeq->q_depth - 1) {
nvmeq             989 drivers/nvme/host/pci.c 		nvmeq->cq_head = 0;
nvmeq             990 drivers/nvme/host/pci.c 		nvmeq->cq_phase = !nvmeq->cq_phase;
nvmeq             992 drivers/nvme/host/pci.c 		nvmeq->cq_head++;
nvmeq             996 drivers/nvme/host/pci.c static inline int nvme_process_cq(struct nvme_queue *nvmeq, u16 *start,
nvmeq            1001 drivers/nvme/host/pci.c 	*start = nvmeq->cq_head;
nvmeq            1002 drivers/nvme/host/pci.c 	while (nvme_cqe_pending(nvmeq)) {
nvmeq            1003 drivers/nvme/host/pci.c 		if (tag == -1U || nvmeq->cqes[nvmeq->cq_head].command_id == tag)
nvmeq            1005 drivers/nvme/host/pci.c 		nvme_update_cq_head(nvmeq);
nvmeq            1007 drivers/nvme/host/pci.c 	*end = nvmeq->cq_head;
nvmeq            1010 drivers/nvme/host/pci.c 		nvme_ring_cq_doorbell(nvmeq);
nvmeq            1016 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = data;
nvmeq            1025 drivers/nvme/host/pci.c 	if (nvmeq->cq_head != nvmeq->last_cq_head)
nvmeq            1027 drivers/nvme/host/pci.c 	nvme_process_cq(nvmeq, &start, &end, -1);
nvmeq            1028 drivers/nvme/host/pci.c 	nvmeq->last_cq_head = nvmeq->cq_head;
nvmeq            1032 drivers/nvme/host/pci.c 		nvme_complete_cqes(nvmeq, start, end);
nvmeq            1041 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = data;
nvmeq            1042 drivers/nvme/host/pci.c 	if (nvme_cqe_pending(nvmeq))
nvmeq            1051 drivers/nvme/host/pci.c static int nvme_poll_irqdisable(struct nvme_queue *nvmeq, unsigned int tag)
nvmeq            1053 drivers/nvme/host/pci.c 	struct pci_dev *pdev = to_pci_dev(nvmeq->dev->dev);
nvmeq            1062 drivers/nvme/host/pci.c 	if (test_bit(NVMEQ_POLLED, &nvmeq->flags)) {
nvmeq            1063 drivers/nvme/host/pci.c 		spin_lock(&nvmeq->cq_poll_lock);
nvmeq            1064 drivers/nvme/host/pci.c 		found = nvme_process_cq(nvmeq, &start, &end, tag);
nvmeq            1065 drivers/nvme/host/pci.c 		spin_unlock(&nvmeq->cq_poll_lock);
nvmeq            1067 drivers/nvme/host/pci.c 		disable_irq(pci_irq_vector(pdev, nvmeq->cq_vector));
nvmeq            1068 drivers/nvme/host/pci.c 		found = nvme_process_cq(nvmeq, &start, &end, tag);
nvmeq            1069 drivers/nvme/host/pci.c 		enable_irq(pci_irq_vector(pdev, nvmeq->cq_vector));
nvmeq            1072 drivers/nvme/host/pci.c 	nvme_complete_cqes(nvmeq, start, end);
nvmeq            1078 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = hctx->driver_data;
nvmeq            1082 drivers/nvme/host/pci.c 	if (!nvme_cqe_pending(nvmeq))
nvmeq            1085 drivers/nvme/host/pci.c 	spin_lock(&nvmeq->cq_poll_lock);
nvmeq            1086 drivers/nvme/host/pci.c 	found = nvme_process_cq(nvmeq, &start, &end, -1);
nvmeq            1087 drivers/nvme/host/pci.c 	nvme_complete_cqes(nvmeq, start, end);
nvmeq            1088 drivers/nvme/host/pci.c 	spin_unlock(&nvmeq->cq_poll_lock);
nvmeq            1096 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = &dev->queues[0];
nvmeq            1102 drivers/nvme/host/pci.c 	nvme_submit_cmd(nvmeq, &c, true);
nvmeq            1117 drivers/nvme/host/pci.c 		struct nvme_queue *nvmeq, s16 vector)
nvmeq            1122 drivers/nvme/host/pci.c 	if (!test_bit(NVMEQ_POLLED, &nvmeq->flags))
nvmeq            1131 drivers/nvme/host/pci.c 	c.create_cq.prp1 = cpu_to_le64(nvmeq->cq_dma_addr);
nvmeq            1133 drivers/nvme/host/pci.c 	c.create_cq.qsize = cpu_to_le16(nvmeq->q_depth - 1);
nvmeq            1141 drivers/nvme/host/pci.c 						struct nvme_queue *nvmeq)
nvmeq            1161 drivers/nvme/host/pci.c 	c.create_sq.prp1 = cpu_to_le64(nvmeq->sq_dma_addr);
nvmeq            1163 drivers/nvme/host/pci.c 	c.create_sq.qsize = cpu_to_le16(nvmeq->q_depth - 1);
nvmeq            1183 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = iod->nvmeq;
nvmeq            1185 drivers/nvme/host/pci.c 	dev_warn(nvmeq->dev->ctrl.device,
nvmeq            1187 drivers/nvme/host/pci.c 	atomic_inc(&nvmeq->dev->ctrl.abort_limit);
nvmeq            1238 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = iod->nvmeq;
nvmeq            1239 drivers/nvme/host/pci.c 	struct nvme_dev *dev = nvmeq->dev;
nvmeq            1264 drivers/nvme/host/pci.c 	if (nvme_poll_irqdisable(nvmeq, req->tag)) {
nvmeq            1267 drivers/nvme/host/pci.c 			 req->tag, nvmeq->qid);
nvmeq            1284 drivers/nvme/host/pci.c 			 req->tag, nvmeq->qid);
nvmeq            1299 drivers/nvme/host/pci.c 	if (!nvmeq->qid || iod->aborted) {
nvmeq            1302 drivers/nvme/host/pci.c 			 req->tag, nvmeq->qid);
nvmeq            1319 drivers/nvme/host/pci.c 	cmd.abort.sqid = cpu_to_le16(nvmeq->qid);
nvmeq            1321 drivers/nvme/host/pci.c 	dev_warn(nvmeq->dev->ctrl.device,
nvmeq            1323 drivers/nvme/host/pci.c 		 req->tag, nvmeq->qid);
nvmeq            1344 drivers/nvme/host/pci.c static void nvme_free_queue(struct nvme_queue *nvmeq)
nvmeq            1346 drivers/nvme/host/pci.c 	dma_free_coherent(nvmeq->dev->dev, CQ_SIZE(nvmeq),
nvmeq            1347 drivers/nvme/host/pci.c 				(void *)nvmeq->cqes, nvmeq->cq_dma_addr);
nvmeq            1348 drivers/nvme/host/pci.c 	if (!nvmeq->sq_cmds)
nvmeq            1351 drivers/nvme/host/pci.c 	if (test_and_clear_bit(NVMEQ_SQ_CMB, &nvmeq->flags)) {
nvmeq            1352 drivers/nvme/host/pci.c 		pci_free_p2pmem(to_pci_dev(nvmeq->dev->dev),
nvmeq            1353 drivers/nvme/host/pci.c 				nvmeq->sq_cmds, SQ_SIZE(nvmeq));
nvmeq            1355 drivers/nvme/host/pci.c 		dma_free_coherent(nvmeq->dev->dev, SQ_SIZE(nvmeq),
nvmeq            1356 drivers/nvme/host/pci.c 				nvmeq->sq_cmds, nvmeq->sq_dma_addr);
nvmeq            1374 drivers/nvme/host/pci.c static int nvme_suspend_queue(struct nvme_queue *nvmeq)
nvmeq            1376 drivers/nvme/host/pci.c 	if (!test_and_clear_bit(NVMEQ_ENABLED, &nvmeq->flags))
nvmeq            1382 drivers/nvme/host/pci.c 	nvmeq->dev->online_queues--;
nvmeq            1383 drivers/nvme/host/pci.c 	if (!nvmeq->qid && nvmeq->dev->ctrl.admin_q)
nvmeq            1384 drivers/nvme/host/pci.c 		blk_mq_quiesce_queue(nvmeq->dev->ctrl.admin_q);
nvmeq            1385 drivers/nvme/host/pci.c 	if (!test_and_clear_bit(NVMEQ_POLLED, &nvmeq->flags))
nvmeq            1386 drivers/nvme/host/pci.c 		pci_free_irq(to_pci_dev(nvmeq->dev->dev), nvmeq->cq_vector, nvmeq);
nvmeq            1400 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = &dev->queues[0];
nvmeq            1407 drivers/nvme/host/pci.c 	nvme_poll_irqdisable(nvmeq, -1);
nvmeq            1451 drivers/nvme/host/pci.c static int nvme_alloc_sq_cmds(struct nvme_dev *dev, struct nvme_queue *nvmeq,
nvmeq            1457 drivers/nvme/host/pci.c 		nvmeq->sq_cmds = pci_alloc_p2pmem(pdev, SQ_SIZE(nvmeq));
nvmeq            1458 drivers/nvme/host/pci.c 		if (nvmeq->sq_cmds) {
nvmeq            1459 drivers/nvme/host/pci.c 			nvmeq->sq_dma_addr = pci_p2pmem_virt_to_bus(pdev,
nvmeq            1460 drivers/nvme/host/pci.c 							nvmeq->sq_cmds);
nvmeq            1461 drivers/nvme/host/pci.c 			if (nvmeq->sq_dma_addr) {
nvmeq            1462 drivers/nvme/host/pci.c 				set_bit(NVMEQ_SQ_CMB, &nvmeq->flags);
nvmeq            1466 drivers/nvme/host/pci.c 			pci_free_p2pmem(pdev, nvmeq->sq_cmds, SQ_SIZE(nvmeq));
nvmeq            1470 drivers/nvme/host/pci.c 	nvmeq->sq_cmds = dma_alloc_coherent(dev->dev, SQ_SIZE(nvmeq),
nvmeq            1471 drivers/nvme/host/pci.c 				&nvmeq->sq_dma_addr, GFP_KERNEL);
nvmeq            1472 drivers/nvme/host/pci.c 	if (!nvmeq->sq_cmds)
nvmeq            1479 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = &dev->queues[qid];
nvmeq            1484 drivers/nvme/host/pci.c 	nvmeq->sqes = qid ? dev->io_sqes : NVME_ADM_SQES;
nvmeq            1485 drivers/nvme/host/pci.c 	nvmeq->q_depth = depth;
nvmeq            1486 drivers/nvme/host/pci.c 	nvmeq->cqes = dma_alloc_coherent(dev->dev, CQ_SIZE(nvmeq),
nvmeq            1487 drivers/nvme/host/pci.c 					 &nvmeq->cq_dma_addr, GFP_KERNEL);
nvmeq            1488 drivers/nvme/host/pci.c 	if (!nvmeq->cqes)
nvmeq            1491 drivers/nvme/host/pci.c 	if (nvme_alloc_sq_cmds(dev, nvmeq, qid))
nvmeq            1494 drivers/nvme/host/pci.c 	nvmeq->dev = dev;
nvmeq            1495 drivers/nvme/host/pci.c 	spin_lock_init(&nvmeq->sq_lock);
nvmeq            1496 drivers/nvme/host/pci.c 	spin_lock_init(&nvmeq->cq_poll_lock);
nvmeq            1497 drivers/nvme/host/pci.c 	nvmeq->cq_head = 0;
nvmeq            1498 drivers/nvme/host/pci.c 	nvmeq->cq_phase = 1;
nvmeq            1499 drivers/nvme/host/pci.c 	nvmeq->q_db = &dev->dbs[qid * 2 * dev->db_stride];
nvmeq            1500 drivers/nvme/host/pci.c 	nvmeq->qid = qid;
nvmeq            1506 drivers/nvme/host/pci.c 	dma_free_coherent(dev->dev, CQ_SIZE(nvmeq), (void *)nvmeq->cqes,
nvmeq            1507 drivers/nvme/host/pci.c 			  nvmeq->cq_dma_addr);
nvmeq            1512 drivers/nvme/host/pci.c static int queue_request_irq(struct nvme_queue *nvmeq)
nvmeq            1514 drivers/nvme/host/pci.c 	struct pci_dev *pdev = to_pci_dev(nvmeq->dev->dev);
nvmeq            1515 drivers/nvme/host/pci.c 	int nr = nvmeq->dev->ctrl.instance;
nvmeq            1518 drivers/nvme/host/pci.c 		return pci_request_irq(pdev, nvmeq->cq_vector, nvme_irq_check,
nvmeq            1519 drivers/nvme/host/pci.c 				nvme_irq, nvmeq, "nvme%dq%d", nr, nvmeq->qid);
nvmeq            1521 drivers/nvme/host/pci.c 		return pci_request_irq(pdev, nvmeq->cq_vector, nvme_irq,
nvmeq            1522 drivers/nvme/host/pci.c 				NULL, nvmeq, "nvme%dq%d", nr, nvmeq->qid);
nvmeq            1526 drivers/nvme/host/pci.c static void nvme_init_queue(struct nvme_queue *nvmeq, u16 qid)
nvmeq            1528 drivers/nvme/host/pci.c 	struct nvme_dev *dev = nvmeq->dev;
nvmeq            1530 drivers/nvme/host/pci.c 	nvmeq->sq_tail = 0;
nvmeq            1531 drivers/nvme/host/pci.c 	nvmeq->last_sq_tail = 0;
nvmeq            1532 drivers/nvme/host/pci.c 	nvmeq->cq_head = 0;
nvmeq            1533 drivers/nvme/host/pci.c 	nvmeq->cq_phase = 1;
nvmeq            1534 drivers/nvme/host/pci.c 	nvmeq->q_db = &dev->dbs[qid * 2 * dev->db_stride];
nvmeq            1535 drivers/nvme/host/pci.c 	memset((void *)nvmeq->cqes, 0, CQ_SIZE(nvmeq));
nvmeq            1536 drivers/nvme/host/pci.c 	nvme_dbbuf_init(dev, nvmeq, qid);
nvmeq            1541 drivers/nvme/host/pci.c static int nvme_create_queue(struct nvme_queue *nvmeq, int qid, bool polled)
nvmeq            1543 drivers/nvme/host/pci.c 	struct nvme_dev *dev = nvmeq->dev;
nvmeq            1547 drivers/nvme/host/pci.c 	clear_bit(NVMEQ_DELETE_ERROR, &nvmeq->flags);
nvmeq            1556 drivers/nvme/host/pci.c 		set_bit(NVMEQ_POLLED, &nvmeq->flags);
nvmeq            1558 drivers/nvme/host/pci.c 	result = adapter_alloc_cq(dev, qid, nvmeq, vector);
nvmeq            1562 drivers/nvme/host/pci.c 	result = adapter_alloc_sq(dev, qid, nvmeq);
nvmeq            1568 drivers/nvme/host/pci.c 	nvmeq->cq_vector = vector;
nvmeq            1569 drivers/nvme/host/pci.c 	nvme_init_queue(nvmeq, qid);
nvmeq            1572 drivers/nvme/host/pci.c 		result = queue_request_irq(nvmeq);
nvmeq            1577 drivers/nvme/host/pci.c 	set_bit(NVMEQ_ENABLED, &nvmeq->flags);
nvmeq            1684 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq;
nvmeq            1705 drivers/nvme/host/pci.c 	nvmeq = &dev->queues[0];
nvmeq            1706 drivers/nvme/host/pci.c 	aqa = nvmeq->q_depth - 1;
nvmeq            1710 drivers/nvme/host/pci.c 	lo_hi_writeq(nvmeq->sq_dma_addr, dev->bar + NVME_REG_ASQ);
nvmeq            1711 drivers/nvme/host/pci.c 	lo_hi_writeq(nvmeq->cq_dma_addr, dev->bar + NVME_REG_ACQ);
nvmeq            1717 drivers/nvme/host/pci.c 	nvmeq->cq_vector = 0;
nvmeq            1718 drivers/nvme/host/pci.c 	nvme_init_queue(nvmeq, 0);
nvmeq            1719 drivers/nvme/host/pci.c 	result = queue_request_irq(nvmeq);
nvmeq            1725 drivers/nvme/host/pci.c 	set_bit(NVMEQ_ENABLED, &nvmeq->flags);
nvmeq            2200 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = req->end_io_data;
nvmeq            2203 drivers/nvme/host/pci.c 	complete(&nvmeq->delete_done);
nvmeq            2208 drivers/nvme/host/pci.c 	struct nvme_queue *nvmeq = req->end_io_data;
nvmeq            2211 drivers/nvme/host/pci.c 		set_bit(NVMEQ_DELETE_ERROR, &nvmeq->flags);
nvmeq            2216 drivers/nvme/host/pci.c static int nvme_delete_queue(struct nvme_queue *nvmeq, u8 opcode)
nvmeq            2218 drivers/nvme/host/pci.c 	struct request_queue *q = nvmeq->dev->ctrl.admin_q;
nvmeq            2224 drivers/nvme/host/pci.c 	cmd.delete_queue.qid = cpu_to_le16(nvmeq->qid);
nvmeq            2231 drivers/nvme/host/pci.c 	req->end_io_data = nvmeq;
nvmeq            2233 drivers/nvme/host/pci.c 	init_completion(&nvmeq->delete_done);
nvmeq            2254 drivers/nvme/host/pci.c 		struct nvme_queue *nvmeq = &dev->queues[nr_queues + sent];
nvmeq            2256 drivers/nvme/host/pci.c 		timeout = wait_for_completion_io_timeout(&nvmeq->delete_done,