Home
last modified time | relevance | path

Searched refs:nr_phys_segments (Results 1 – 17 of 17) sorted by relevance

/linux-4.4.14/block/
Dblk-merge.c271 rq->nr_phys_segments = __blk_recalc_rq_segments(rq->q, rq->bio, in blk_recalc_rq_segments()
463 WARN_ON(nsegs > rq->nr_phys_segments); in blk_rq_map_sg()
475 if (req->nr_phys_segments + nr_phys_segs > queue_max_segments(q)) in ll_new_hw_segment()
485 req->nr_phys_segments += nr_phys_segs; in ll_new_hw_segment()
577 total_phys_segments = req->nr_phys_segments + next->nr_phys_segments; in ll_merge_requests_fn()
579 if (req->nr_phys_segments == 1) in ll_merge_requests_fn()
581 if (next->nr_phys_segments == 1) in ll_merge_requests_fn()
593 req->nr_phys_segments = total_phys_segments; in ll_merge_requests_fn()
Dbsg-lib.c92 size_t sz = (sizeof(struct scatterlist) * req->nr_phys_segments); in bsg_map_buffer()
94 BUG_ON(!req->nr_phys_segments); in bsg_map_buffer()
99 sg_init_table(buf->sg_list, req->nr_phys_segments); in bsg_map_buffer()
Dblk-core.c1539 rq->nr_phys_segments = 1; in blk_add_request_payload()
2164 if (rq->nr_phys_segments > queue_max_segments(q)) { in blk_cloned_rq_check_limits()
2422 rq->nr_phys_segments++; in blk_peek_request()
2444 --rq->nr_phys_segments; in blk_peek_request()
2973 rq->nr_phys_segments = bio_phys_segments(q, bio); in blk_rq_bio_prep()
3059 dst->nr_phys_segments = src->nr_phys_segments; in __blk_rq_prep_clone()
Dblk-mq.c184 rq->nr_phys_segments = 0; in blk_mq_rq_ctx_init()
433 rq->nr_phys_segments++; in blk_mq_start_request()
446 rq->nr_phys_segments--; in __blk_mq_requeue_request()
/linux-4.4.14/Documentation/block/
Drequest.txt61 unsigned short nr_phys_segments DB Number of physical scatter gather
Dbiodoc.txt529 unsigned short nr_phys_segments;
/linux-4.4.14/drivers/block/
Dxen-blkfront.c591 max_grefs = req->nr_phys_segments * GRANTS_PER_PSEG; in blkif_queue_rw_req()
621 GREFS(req->nr_phys_segments) > BLKIF_MAX_SEGMENTS_PER_REQUEST); in blkif_queue_rw_req()
623 GREFS(req->nr_phys_segments) > info->max_indirect_segments); in blkif_queue_rw_req()
1725 BUG_ON(req->nr_phys_segments > segs); in blkif_recover()
Dnull_blk.c461 rq->nr_phys_segments = bio_phys_segments(q, bio); in null_lnvm_submit_io()
Dvirtio_blk.c172 BUG_ON(req->nr_phys_segments + 2 > vblk->sg_elems); in virtio_queue_rq()
Dcpqarray.c917 BUG_ON(creq->nr_phys_segments > SG_MAX); in do_ida_request()
Dcciss.c3321 BUG_ON(creq->nr_phys_segments > h->maxsgentries); in do_cciss_request()
/linux-4.4.14/drivers/nvme/host/
Dpci.c452 if (rq->nr_phys_segments <= NVME_INT_PAGES && in nvme_alloc_iod()
457 iod_init(iod, size, rq->nr_phys_segments, in nvme_alloc_iod()
462 return __nvme_alloc_iod(rq->nr_phys_segments, size, dev, in nvme_alloc_iod()
737 if (req->nr_phys_segments) { in nvme_submit_priv()
882 } else if (req->nr_phys_segments) { in nvme_queue_rq()
885 sg_init_table(iod->sg, req->nr_phys_segments); in nvme_queue_rq()
Dlightnvm.c488 rq->nr_phys_segments = bio_phys_segments(q, bio); in nvme_nvm_submit_io()
/linux-4.4.14/drivers/scsi/
Dscsi_lib.c1090 if (unlikely(scsi_alloc_sgtable(sdb, req->nr_phys_segments, in scsi_init_sgtable()
1123 BUG_ON(!rq->nr_phys_segments); in scsi_init_io()
Dscsi_transport_fc.c3675 size_t sz = (sizeof(struct scatterlist) * req->nr_phys_segments); in fc_bsg_map_buffer()
3677 BUG_ON(!req->nr_phys_segments); in fc_bsg_map_buffer()
3682 sg_init_table(buf->sg_list, req->nr_phys_segments); in fc_bsg_map_buffer()
/linux-4.4.14/drivers/mmc/card/
Dblock.c1681 phys_segments += cur->nr_phys_segments; in mmc_blk_prep_packed_list()
1721 phys_segments += next->nr_phys_segments; in mmc_blk_prep_packed_list()
/linux-4.4.14/include/linux/
Dblkdev.h163 unsigned short nr_phys_segments; member