tfcp_req          263 drivers/nvme/target/fcloop.c 	struct fcloop_fcpreq		*tfcp_req;
tfcp_req          390 drivers/nvme/target/fcloop.c 	struct fcloop_fcpreq *tfcp_req =
tfcp_req          393 drivers/nvme/target/fcloop.c 	kfree(tfcp_req);
tfcp_req          397 drivers/nvme/target/fcloop.c fcloop_tfcp_req_put(struct fcloop_fcpreq *tfcp_req)
tfcp_req          399 drivers/nvme/target/fcloop.c 	kref_put(&tfcp_req->ref, fcloop_tfcp_req_free);
tfcp_req          403 drivers/nvme/target/fcloop.c fcloop_tfcp_req_get(struct fcloop_fcpreq *tfcp_req)
tfcp_req          405 drivers/nvme/target/fcloop.c 	return kref_get_unless_zero(&tfcp_req->ref);
tfcp_req          410 drivers/nvme/target/fcloop.c 			struct fcloop_fcpreq *tfcp_req, int status)
tfcp_req          417 drivers/nvme/target/fcloop.c 		inireq->tfcp_req = NULL;
tfcp_req          425 drivers/nvme/target/fcloop.c 	fcloop_tfcp_req_put(tfcp_req);
tfcp_req          431 drivers/nvme/target/fcloop.c 	struct fcloop_fcpreq *tfcp_req =
tfcp_req          433 drivers/nvme/target/fcloop.c 	struct nvmefc_fcp_req *fcpreq = tfcp_req->fcpreq;
tfcp_req          437 drivers/nvme/target/fcloop.c 	spin_lock_irq(&tfcp_req->reqlock);
tfcp_req          438 drivers/nvme/target/fcloop.c 	switch (tfcp_req->inistate) {
tfcp_req          440 drivers/nvme/target/fcloop.c 		tfcp_req->inistate = INI_IO_ACTIVE;
tfcp_req          446 drivers/nvme/target/fcloop.c 		spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          450 drivers/nvme/target/fcloop.c 	spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          455 drivers/nvme/target/fcloop.c 		ret = nvmet_fc_rcv_fcp_req(tfcp_req->tport->targetport,
tfcp_req          456 drivers/nvme/target/fcloop.c 				&tfcp_req->tgt_fcp_req,
tfcp_req          459 drivers/nvme/target/fcloop.c 		fcloop_call_host_done(fcpreq, tfcp_req, ret);
tfcp_req          467 drivers/nvme/target/fcloop.c 	struct fcloop_fcpreq *tfcp_req =
tfcp_req          472 drivers/nvme/target/fcloop.c 	spin_lock_irq(&tfcp_req->reqlock);
tfcp_req          473 drivers/nvme/target/fcloop.c 	fcpreq = tfcp_req->fcpreq;
tfcp_req          474 drivers/nvme/target/fcloop.c 	switch (tfcp_req->inistate) {
tfcp_req          481 drivers/nvme/target/fcloop.c 		spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          485 drivers/nvme/target/fcloop.c 	spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          489 drivers/nvme/target/fcloop.c 		fcloop_tfcp_req_put(tfcp_req);
tfcp_req          493 drivers/nvme/target/fcloop.c 	if (tfcp_req->tport->targetport)
tfcp_req          494 drivers/nvme/target/fcloop.c 		nvmet_fc_rcv_fcp_abort(tfcp_req->tport->targetport,
tfcp_req          495 drivers/nvme/target/fcloop.c 					&tfcp_req->tgt_fcp_req);
tfcp_req          497 drivers/nvme/target/fcloop.c 	spin_lock_irq(&tfcp_req->reqlock);
tfcp_req          498 drivers/nvme/target/fcloop.c 	tfcp_req->fcpreq = NULL;
tfcp_req          499 drivers/nvme/target/fcloop.c 	spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          501 drivers/nvme/target/fcloop.c 	fcloop_call_host_done(fcpreq, tfcp_req, -ECANCELED);
tfcp_req          512 drivers/nvme/target/fcloop.c 	struct fcloop_fcpreq *tfcp_req =
tfcp_req          516 drivers/nvme/target/fcloop.c 	spin_lock_irq(&tfcp_req->reqlock);
tfcp_req          517 drivers/nvme/target/fcloop.c 	fcpreq = tfcp_req->fcpreq;
tfcp_req          518 drivers/nvme/target/fcloop.c 	tfcp_req->inistate = INI_IO_COMPLETED;
tfcp_req          519 drivers/nvme/target/fcloop.c 	spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          521 drivers/nvme/target/fcloop.c 	fcloop_call_host_done(fcpreq, tfcp_req, tfcp_req->status);
tfcp_req          533 drivers/nvme/target/fcloop.c 	struct fcloop_fcpreq *tfcp_req;
tfcp_req          538 drivers/nvme/target/fcloop.c 	tfcp_req = kzalloc(sizeof(*tfcp_req), GFP_ATOMIC);
tfcp_req          539 drivers/nvme/target/fcloop.c 	if (!tfcp_req)
tfcp_req          543 drivers/nvme/target/fcloop.c 	inireq->tfcp_req = tfcp_req;
tfcp_req          546 drivers/nvme/target/fcloop.c 	tfcp_req->fcpreq = fcpreq;
tfcp_req          547 drivers/nvme/target/fcloop.c 	tfcp_req->tport = rport->targetport->private;
tfcp_req          548 drivers/nvme/target/fcloop.c 	tfcp_req->inistate = INI_IO_START;
tfcp_req          549 drivers/nvme/target/fcloop.c 	spin_lock_init(&tfcp_req->reqlock);
tfcp_req          550 drivers/nvme/target/fcloop.c 	INIT_WORK(&tfcp_req->fcp_rcv_work, fcloop_fcp_recv_work);
tfcp_req          551 drivers/nvme/target/fcloop.c 	INIT_WORK(&tfcp_req->abort_rcv_work, fcloop_fcp_abort_recv_work);
tfcp_req          552 drivers/nvme/target/fcloop.c 	INIT_WORK(&tfcp_req->tio_done_work, fcloop_tgt_fcprqst_done_work);
tfcp_req          553 drivers/nvme/target/fcloop.c 	kref_init(&tfcp_req->ref);
tfcp_req          555 drivers/nvme/target/fcloop.c 	schedule_work(&tfcp_req->fcp_rcv_work);
tfcp_req          618 drivers/nvme/target/fcloop.c 	struct fcloop_fcpreq *tfcp_req = tgt_fcp_req_to_fcpreq(tgt_fcpreq);
tfcp_req          624 drivers/nvme/target/fcloop.c 	spin_lock_irq(&tfcp_req->reqlock);
tfcp_req          625 drivers/nvme/target/fcloop.c 	fcpreq = tfcp_req->fcpreq;
tfcp_req          626 drivers/nvme/target/fcloop.c 	active = tfcp_req->active;
tfcp_req          627 drivers/nvme/target/fcloop.c 	aborted = tfcp_req->aborted;
tfcp_req          628 drivers/nvme/target/fcloop.c 	tfcp_req->active = true;
tfcp_req          629 drivers/nvme/target/fcloop.c 	spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          637 drivers/nvme/target/fcloop.c 		spin_lock_irq(&tfcp_req->reqlock);
tfcp_req          638 drivers/nvme/target/fcloop.c 		tfcp_req->active = false;
tfcp_req          639 drivers/nvme/target/fcloop.c 		spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          688 drivers/nvme/target/fcloop.c 		tfcp_req->status = 0;
tfcp_req          696 drivers/nvme/target/fcloop.c 	spin_lock_irq(&tfcp_req->reqlock);
tfcp_req          697 drivers/nvme/target/fcloop.c 	tfcp_req->active = false;
tfcp_req          698 drivers/nvme/target/fcloop.c 	spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          711 drivers/nvme/target/fcloop.c 	struct fcloop_fcpreq *tfcp_req = tgt_fcp_req_to_fcpreq(tgt_fcpreq);
tfcp_req          718 drivers/nvme/target/fcloop.c 	spin_lock_irq(&tfcp_req->reqlock);
tfcp_req          719 drivers/nvme/target/fcloop.c 	tfcp_req->aborted = true;
tfcp_req          720 drivers/nvme/target/fcloop.c 	spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          722 drivers/nvme/target/fcloop.c 	tfcp_req->status = NVME_SC_INTERNAL;
tfcp_req          735 drivers/nvme/target/fcloop.c 	struct fcloop_fcpreq *tfcp_req = tgt_fcp_req_to_fcpreq(tgt_fcpreq);
tfcp_req          737 drivers/nvme/target/fcloop.c 	schedule_work(&tfcp_req->tio_done_work);
tfcp_req          754 drivers/nvme/target/fcloop.c 	struct fcloop_fcpreq *tfcp_req;
tfcp_req          758 drivers/nvme/target/fcloop.c 	tfcp_req = inireq->tfcp_req;
tfcp_req          759 drivers/nvme/target/fcloop.c 	if (tfcp_req)
tfcp_req          760 drivers/nvme/target/fcloop.c 		fcloop_tfcp_req_get(tfcp_req);
tfcp_req          763 drivers/nvme/target/fcloop.c 	if (!tfcp_req)
tfcp_req          768 drivers/nvme/target/fcloop.c 	spin_lock_irq(&tfcp_req->reqlock);
tfcp_req          769 drivers/nvme/target/fcloop.c 	switch (tfcp_req->inistate) {
tfcp_req          772 drivers/nvme/target/fcloop.c 		tfcp_req->inistate = INI_IO_ABORTED;
tfcp_req          778 drivers/nvme/target/fcloop.c 		spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          782 drivers/nvme/target/fcloop.c 	spin_unlock_irq(&tfcp_req->reqlock);
tfcp_req          786 drivers/nvme/target/fcloop.c 		WARN_ON(!schedule_work(&tfcp_req->abort_rcv_work));
tfcp_req          792 drivers/nvme/target/fcloop.c 		fcloop_tfcp_req_put(tfcp_req);