Lines Matching refs:hdr
233 static int nvme_trans_copy_to_user(struct sg_io_hdr *hdr, void *from, in nvme_trans_copy_to_user() argument
241 if (hdr->iovec_count > 0) { in nvme_trans_copy_to_user()
244 for (i = 0; i < hdr->iovec_count; i++) { in nvme_trans_copy_to_user()
245 if (copy_from_user(&sgl, hdr->dxferp + in nvme_trans_copy_to_user()
261 if (copy_to_user(hdr->dxferp, from, n)) in nvme_trans_copy_to_user()
268 static int nvme_trans_copy_from_user(struct sg_io_hdr *hdr, void *to, in nvme_trans_copy_from_user() argument
276 if (hdr->iovec_count > 0) { in nvme_trans_copy_from_user()
279 for (i = 0; i < hdr->iovec_count; i++) { in nvme_trans_copy_from_user()
280 if (copy_from_user(&sgl, hdr->dxferp + in nvme_trans_copy_from_user()
295 if (copy_from_user(to, hdr->dxferp, n)) in nvme_trans_copy_from_user()
302 static int nvme_trans_completion(struct sg_io_hdr *hdr, u8 status, u8 sense_key, in nvme_trans_completion() argument
309 hdr->status = SAM_STAT_GOOD; in nvme_trans_completion()
310 hdr->masked_status = GOOD; in nvme_trans_completion()
311 hdr->host_status = DID_OK; in nvme_trans_completion()
312 hdr->driver_status = DRIVER_OK; in nvme_trans_completion()
313 hdr->sb_len_wr = 0; in nvme_trans_completion()
315 hdr->status = status; in nvme_trans_completion()
316 hdr->masked_status = status >> 1; in nvme_trans_completion()
317 hdr->host_status = DID_OK; in nvme_trans_completion()
318 hdr->driver_status = DRIVER_OK; in nvme_trans_completion()
326 xfer_len = min_t(u8, hdr->mx_sb_len, DESC_FMT_SENSE_DATA_SIZE); in nvme_trans_completion()
327 hdr->sb_len_wr = xfer_len; in nvme_trans_completion()
328 if (copy_to_user(hdr->sbp, resp, xfer_len) > 0) in nvme_trans_completion()
341 static int nvme_trans_status_code(struct sg_io_hdr *hdr, int nvme_sc) in nvme_trans_status_code() argument
517 res = nvme_trans_completion(hdr, status, sense_key, asc, ascq); in nvme_trans_status_code()
524 struct sg_io_hdr *hdr, u8 *inq_response, in nvme_trans_standard_inquiry_page() argument
539 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_standard_inquiry_page()
564 return nvme_trans_copy_to_user(hdr, inq_response, xfer_len); in nvme_trans_standard_inquiry_page()
568 struct sg_io_hdr *hdr, u8 *inq_response, in nvme_trans_supported_vpd_pages() argument
584 return nvme_trans_copy_to_user(hdr, inq_response, xfer_len); in nvme_trans_supported_vpd_pages()
588 struct sg_io_hdr *hdr, u8 *inq_response, in nvme_trans_unit_serial_page() argument
600 return nvme_trans_copy_to_user(hdr, inq_response, xfer_len); in nvme_trans_unit_serial_page()
603 static int nvme_trans_device_id_page(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_device_id_page() argument
620 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_device_id_page()
648 return nvme_trans_completion(hdr, in nvme_trans_device_id_page()
666 return nvme_trans_copy_to_user(hdr, inq_response, xfer_len); in nvme_trans_device_id_page()
669 static int nvme_trans_ext_inq_page(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_ext_inq_page() argument
692 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_ext_inq_page()
708 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_ext_inq_page()
727 res = nvme_trans_copy_to_user(hdr, inq_response, xfer_len); in nvme_trans_ext_inq_page()
734 static int nvme_trans_bdev_limits_page(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_bdev_limits_page() argument
751 return nvme_trans_copy_to_user(hdr, inq_response, 0x3c); in nvme_trans_bdev_limits_page()
754 static int nvme_trans_bdev_char_page(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_bdev_char_page() argument
775 res = nvme_trans_copy_to_user(hdr, inq_response, xfer_len); in nvme_trans_bdev_char_page()
784 static int nvme_trans_log_supp_pages(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_log_supp_pages() argument
805 res = nvme_trans_copy_to_user(hdr, log_response, xfer_len); in nvme_trans_log_supp_pages()
813 struct sg_io_hdr *hdr, int alloc_len) in nvme_trans_log_info_exceptions() argument
852 res = nvme_trans_copy_to_user(hdr, log_response, xfer_len); in nvme_trans_log_info_exceptions()
859 static int nvme_trans_log_temperature(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_log_temperature() argument
913 res = nvme_trans_copy_to_user(hdr, log_response, xfer_len); in nvme_trans_log_temperature()
946 static int nvme_trans_fill_blk_desc(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_fill_blk_desc() argument
962 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_fill_blk_desc()
990 struct sg_io_hdr *hdr, u8 *resp, in nvme_trans_fill_control_page() argument
1012 struct sg_io_hdr *hdr, in nvme_trans_fill_caching_page() argument
1026 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_fill_caching_page()
1039 struct sg_io_hdr *hdr, u8 *resp, in nvme_trans_fill_pow_cnd_page() argument
1053 struct sg_io_hdr *hdr, u8 *resp, in nvme_trans_fill_inf_exc_page() argument
1067 static int nvme_trans_fill_all_pages(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_fill_all_pages() argument
1078 res = nvme_trans_fill_caching_page(ns, hdr, &resp[mode_pages_offset_1], in nvme_trans_fill_all_pages()
1082 res = nvme_trans_fill_control_page(ns, hdr, &resp[mode_pages_offset_2], in nvme_trans_fill_all_pages()
1086 res = nvme_trans_fill_pow_cnd_page(ns, hdr, &resp[mode_pages_offset_3], in nvme_trans_fill_all_pages()
1090 return nvme_trans_fill_inf_exc_page(ns, hdr, &resp[mode_pages_offset_4], in nvme_trans_fill_all_pages()
1105 struct sg_io_hdr *hdr, u8 *cmd, in nvme_trans_mode_page_create() argument
1109 struct sg_io_hdr *hdr, u8 *, int), in nvme_trans_mode_page_create()
1145 res = nvme_trans_fill_blk_desc(ns, hdr, in nvme_trans_mode_page_create()
1151 res = mode_page_fill_func(ns, hdr, &response[mode_pages_offset_1], in nvme_trans_mode_page_create()
1157 res = nvme_trans_copy_to_user(hdr, response, xfer_len); in nvme_trans_mode_page_create()
1205 static int nvme_trans_power_state(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_power_state() argument
1216 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_power_state()
1254 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_power_state()
1261 return nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_power_state()
1264 static int nvme_trans_send_activate_fw_cmd(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_send_activate_fw_cmd() argument
1275 return nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_send_activate_fw_cmd()
1278 static int nvme_trans_send_download_fw_cmd(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_send_download_fw_cmd() argument
1286 if (hdr->iovec_count > 0) { in nvme_trans_send_download_fw_cmd()
1288 return nvme_trans_completion(hdr, in nvme_trans_send_download_fw_cmd()
1301 hdr->dxferp, tot_len, NULL, 0); in nvme_trans_send_download_fw_cmd()
1302 return nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_send_download_fw_cmd()
1362 static int nvme_trans_modesel_get_mp(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_modesel_get_mp() argument
1375 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_modesel_get_mp()
1382 res = nvme_trans_completion(hdr, in nvme_trans_modesel_get_mp()
1391 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_modesel_get_mp()
1400 static int nvme_trans_modesel_data(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_modesel_data() argument
1419 res = nvme_trans_copy_from_user(hdr, parm_list, parm_list_len); in nvme_trans_modesel_data()
1441 res = nvme_trans_completion(hdr, in nvme_trans_modesel_data()
1456 res = nvme_trans_modesel_get_mp(ns, hdr, &parm_list[index], in nvme_trans_modesel_data()
1472 struct sg_io_hdr *hdr) in nvme_trans_fmt_set_blk_size_count() argument
1490 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_fmt_set_blk_size_count()
1508 static int nvme_trans_fmt_get_parm_header(struct sg_io_hdr *hdr, u8 len, in nvme_trans_fmt_get_parm_header() argument
1520 res = nvme_trans_copy_from_user(hdr, parm_list, len); in nvme_trans_fmt_get_parm_header()
1526 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_fmt_get_parm_header()
1534 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_fmt_get_parm_header()
1556 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_fmt_get_parm_header()
1568 static int nvme_trans_fmt_send_cmd(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_fmt_send_cmd() argument
1583 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_fmt_send_cmd()
1597 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_fmt_send_cmd()
1602 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_fmt_send_cmd()
1615 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_fmt_send_cmd()
1621 static inline u32 nvme_trans_io_get_num_cmds(struct sg_io_hdr *hdr, in nvme_trans_io_get_num_cmds() argument
1626 if (hdr->iovec_count > 0) in nvme_trans_io_get_num_cmds()
1627 return hdr->iovec_count; in nvme_trans_io_get_num_cmds()
1647 static int nvme_trans_do_nvme_io(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_do_nvme_io() argument
1663 num_cmds = nvme_trans_io_get_num_cmds(hdr, cdb_info, max_blocks); in nvme_trans_do_nvme_io()
1676 if (hdr->iovec_count > 0) { in nvme_trans_do_nvme_io()
1679 retcode = copy_from_user(&sgl, hdr->dxferp + in nvme_trans_do_nvme_io()
1691 next_mapping_addr = hdr->dxferp + in nvme_trans_do_nvme_io()
1715 return nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_do_nvme_io()
1721 static int nvme_trans_io(struct nvme_ns *ns, struct sg_io_hdr *hdr, u8 is_write, in nvme_trans_io() argument
1745 return nvme_trans_completion(hdr, in nvme_trans_io()
1783 if (hdr->iovec_count > 0) { in nvme_trans_io()
1784 for (i = 0; i < hdr->iovec_count; i++) { in nvme_trans_io()
1785 not_copied = copy_from_user(&sgl, hdr->dxferp + in nvme_trans_io()
1793 res = nvme_trans_completion(hdr, in nvme_trans_io()
1802 sum_iov_len = hdr->dxfer_len; in nvme_trans_io()
1806 xfer_bytes = min(((u64)hdr->dxfer_len), sum_iov_len); in nvme_trans_io()
1819 res = nvme_trans_do_nvme_io(ns, hdr, &cdb_info, is_write); in nvme_trans_io()
1827 static int nvme_trans_inquiry(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_inquiry() argument
1849 res = nvme_trans_standard_inquiry_page(ns, hdr, in nvme_trans_inquiry()
1852 res = nvme_trans_completion(hdr, in nvme_trans_inquiry()
1861 res = nvme_trans_supported_vpd_pages(ns, hdr, in nvme_trans_inquiry()
1865 res = nvme_trans_unit_serial_page(ns, hdr, inq_response, in nvme_trans_inquiry()
1869 res = nvme_trans_device_id_page(ns, hdr, inq_response, in nvme_trans_inquiry()
1873 res = nvme_trans_ext_inq_page(ns, hdr, alloc_len); in nvme_trans_inquiry()
1876 res = nvme_trans_bdev_limits_page(ns, hdr, inq_response, in nvme_trans_inquiry()
1880 res = nvme_trans_bdev_char_page(ns, hdr, alloc_len); in nvme_trans_inquiry()
1883 res = nvme_trans_completion(hdr, in nvme_trans_inquiry()
1896 static int nvme_trans_log_sense(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_log_sense() argument
1905 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_log_sense()
1914 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_log_sense()
1922 res = nvme_trans_log_supp_pages(ns, hdr, alloc_len); in nvme_trans_log_sense()
1925 res = nvme_trans_log_info_exceptions(ns, hdr, alloc_len); in nvme_trans_log_sense()
1928 res = nvme_trans_log_temperature(ns, hdr, alloc_len); in nvme_trans_log_sense()
1931 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_log_sense()
1941 static int nvme_trans_mode_select(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_mode_select() argument
1964 return nvme_trans_modesel_data(ns, hdr, cmd, parm_list_len, in nvme_trans_mode_select()
1971 static int nvme_trans_mode_sense(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_mode_sense() argument
1987 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_mode_sense()
1995 res = nvme_trans_mode_page_create(ns, hdr, cmd, alloc_len, in nvme_trans_mode_sense()
2001 res = nvme_trans_mode_page_create(ns, hdr, cmd, alloc_len, in nvme_trans_mode_sense()
2007 res = nvme_trans_mode_page_create(ns, hdr, cmd, alloc_len, in nvme_trans_mode_sense()
2013 res = nvme_trans_mode_page_create(ns, hdr, cmd, alloc_len, in nvme_trans_mode_sense()
2019 res = nvme_trans_mode_page_create(ns, hdr, cmd, alloc_len, in nvme_trans_mode_sense()
2025 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_mode_sense()
2035 static int nvme_trans_read_capacity(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_read_capacity() argument
2056 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_read_capacity()
2068 res = nvme_trans_copy_to_user(hdr, response, xfer_len); in nvme_trans_read_capacity()
2076 static int nvme_trans_report_luns(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_report_luns() argument
2091 return nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_report_luns()
2098 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_report_luns()
2107 res = nvme_trans_completion(hdr, in nvme_trans_report_luns()
2135 res = nvme_trans_copy_to_user(hdr, response, xfer_len); in nvme_trans_report_luns()
2143 static int nvme_trans_request_sense(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_request_sense() argument
2185 res = nvme_trans_copy_to_user(hdr, response, xfer_len); in nvme_trans_request_sense()
2193 struct sg_io_hdr *hdr, in nvme_trans_security_protocol() argument
2196 return nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_security_protocol()
2202 struct sg_io_hdr *hdr) in nvme_trans_synchronize_cache() argument
2212 return nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_synchronize_cache()
2215 static int nvme_trans_start_stop(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_start_stop() argument
2227 return nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_start_stop()
2233 int res = nvme_trans_synchronize_cache(ns, hdr); in nvme_trans_start_stop()
2238 return nvme_trans_power_state(ns, hdr, pc, pcmod, start); in nvme_trans_start_stop()
2242 static int nvme_trans_format_unit(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_format_unit() argument
2262 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_format_unit()
2274 res = nvme_trans_fmt_get_parm_header(hdr, parm_hdr_len, in nvme_trans_format_unit()
2281 res = nvme_trans_send_activate_fw_cmd(ns, hdr, 0); in nvme_trans_format_unit()
2284 res = nvme_trans_fmt_set_blk_size_count(ns, hdr); in nvme_trans_format_unit()
2288 res = nvme_trans_fmt_send_cmd(ns, hdr, nvme_pf_code); in nvme_trans_format_unit()
2295 struct sg_io_hdr *hdr, in nvme_trans_test_unit_ready() argument
2301 return nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_test_unit_ready()
2305 return nvme_trans_completion(hdr, SAM_STAT_GOOD, NO_SENSE, 0, 0); in nvme_trans_test_unit_ready()
2308 static int nvme_trans_write_buffer(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_write_buffer() argument
2318 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_write_buffer()
2325 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_write_buffer()
2335 res = nvme_trans_send_download_fw_cmd(ns, hdr, nvme_admin_download_fw, in nvme_trans_write_buffer()
2340 res = nvme_trans_send_activate_fw_cmd(ns, hdr, buffer_id); in nvme_trans_write_buffer()
2343 res = nvme_trans_send_download_fw_cmd(ns, hdr, nvme_admin_download_fw, in nvme_trans_write_buffer()
2348 res = nvme_trans_send_activate_fw_cmd(ns, hdr, buffer_id); in nvme_trans_write_buffer()
2351 res = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_trans_write_buffer()
2374 static int nvme_trans_unmap(struct nvme_ns *ns, struct sg_io_hdr *hdr, in nvme_trans_unmap() argument
2391 res = nvme_trans_copy_from_user(hdr, plist, list_len); in nvme_trans_unmap()
2421 res = nvme_trans_status_code(hdr, nvme_sc); in nvme_trans_unmap()
2429 static int nvme_scsi_translate(struct nvme_ns *ns, struct sg_io_hdr *hdr) in nvme_scsi_translate() argument
2435 if (hdr->cmdp == NULL) in nvme_scsi_translate()
2437 if (copy_from_user(cmd, hdr->cmdp, hdr->cmd_len)) in nvme_scsi_translate()
2444 retcode = nvme_trans_status_code(hdr, NVME_SC_SUCCESS); in nvme_scsi_translate()
2455 retcode = nvme_trans_io(ns, hdr, 0, cmd); in nvme_scsi_translate()
2461 retcode = nvme_trans_io(ns, hdr, 1, cmd); in nvme_scsi_translate()
2464 retcode = nvme_trans_inquiry(ns, hdr, cmd); in nvme_scsi_translate()
2467 retcode = nvme_trans_log_sense(ns, hdr, cmd); in nvme_scsi_translate()
2471 retcode = nvme_trans_mode_select(ns, hdr, cmd); in nvme_scsi_translate()
2475 retcode = nvme_trans_mode_sense(ns, hdr, cmd); in nvme_scsi_translate()
2478 retcode = nvme_trans_read_capacity(ns, hdr, cmd, 0); in nvme_scsi_translate()
2483 retcode = nvme_trans_read_capacity(ns, hdr, cmd, 1); in nvme_scsi_translate()
2490 retcode = nvme_trans_report_luns(ns, hdr, cmd); in nvme_scsi_translate()
2493 retcode = nvme_trans_request_sense(ns, hdr, cmd); in nvme_scsi_translate()
2497 retcode = nvme_trans_security_protocol(ns, hdr, cmd); in nvme_scsi_translate()
2500 retcode = nvme_trans_start_stop(ns, hdr, cmd); in nvme_scsi_translate()
2503 retcode = nvme_trans_synchronize_cache(ns, hdr); in nvme_scsi_translate()
2506 retcode = nvme_trans_format_unit(ns, hdr, cmd); in nvme_scsi_translate()
2509 retcode = nvme_trans_test_unit_ready(ns, hdr, cmd); in nvme_scsi_translate()
2512 retcode = nvme_trans_write_buffer(ns, hdr, cmd); in nvme_scsi_translate()
2515 retcode = nvme_trans_unmap(ns, hdr, cmd); in nvme_scsi_translate()
2519 retcode = nvme_trans_completion(hdr, SAM_STAT_CHECK_CONDITION, in nvme_scsi_translate()
2529 struct sg_io_hdr hdr; in nvme_sg_io() local
2534 if (copy_from_user(&hdr, u_hdr, sizeof(hdr))) in nvme_sg_io()
2536 if (hdr.interface_id != 'S') in nvme_sg_io()
2538 if (hdr.cmd_len > BLK_MAX_CDB) in nvme_sg_io()
2545 retcode = nvme_scsi_translate(ns, &hdr); in nvme_sg_io()
2548 if (copy_to_user(u_hdr, &hdr, sizeof(sg_io_hdr_t)) > 0) in nvme_sg_io()