Lines Matching refs:cdb_info
1622 struct nvme_trans_io_cdb *cdb_info, in nvme_trans_io_get_num_cmds() argument
1628 else if (cdb_info->xfer_len > max_blocks) in nvme_trans_io_get_num_cmds()
1629 return ((cdb_info->xfer_len - 1) / max_blocks) + 1; in nvme_trans_io_get_num_cmds()
1635 struct nvme_trans_io_cdb *cdb_info) in nvme_trans_io_get_control() argument
1641 if (cdb_info->fua > 0) in nvme_trans_io_get_control()
1648 struct nvme_trans_io_cdb *cdb_info, u8 is_write) in nvme_trans_do_nvme_io() argument
1663 num_cmds = nvme_trans_io_get_num_cmds(hdr, cdb_info, max_blocks); in nvme_trans_do_nvme_io()
1689 (cdb_info->xfer_len - nvme_offset)); in nvme_trans_do_nvme_io()
1697 c.rw.slba = cpu_to_le64(cdb_info->lba + nvme_offset); in nvme_trans_do_nvme_io()
1699 control = nvme_trans_io_get_control(ns, cdb_info); in nvme_trans_do_nvme_io()
1703 cdb_info->lba + nvme_offset) { in nvme_trans_do_nvme_io()
1725 struct nvme_trans_io_cdb cdb_info = { 0, }; in nvme_trans_io() local
1742 cdb_info.fua = cmd[1] & 0x8; in nvme_trans_io()
1743 cdb_info.prot_info = (cmd[1] & 0xe0) >> 5; in nvme_trans_io()
1744 if (cdb_info.prot_info && !ns->pi_type) { in nvme_trans_io()
1756 cdb_info.lba = get_unaligned_be24(&cmd[1]); in nvme_trans_io()
1757 cdb_info.xfer_len = cmd[4]; in nvme_trans_io()
1758 if (cdb_info.xfer_len == 0) in nvme_trans_io()
1759 cdb_info.xfer_len = 256; in nvme_trans_io()
1763 cdb_info.lba = get_unaligned_be32(&cmd[2]); in nvme_trans_io()
1764 cdb_info.xfer_len = get_unaligned_be16(&cmd[7]); in nvme_trans_io()
1768 cdb_info.lba = get_unaligned_be32(&cmd[2]); in nvme_trans_io()
1769 cdb_info.xfer_len = get_unaligned_be32(&cmd[6]); in nvme_trans_io()
1773 cdb_info.lba = get_unaligned_be64(&cmd[2]); in nvme_trans_io()
1774 cdb_info.xfer_len = get_unaligned_be32(&cmd[10]); in nvme_trans_io()
1809 if (xfer_bytes != (cdb_info.xfer_len << ns->lba_shift)) { in nvme_trans_io()
1815 if (cdb_info.xfer_len == 0) in nvme_trans_io()
1819 res = nvme_trans_do_nvme_io(ns, hdr, &cdb_info, is_write); in nvme_trans_io()