Lines Matching defs:q
33 void blk_queue_prep_rq(struct request_queue *q, prep_rq_fn *pfn) in blk_queue_prep_rq()
50 void blk_queue_unprep_rq(struct request_queue *q, unprep_rq_fn *ufn) in blk_queue_unprep_rq()
72 void blk_queue_merge_bvec(struct request_queue *q, merge_bvec_fn *mbfn) in blk_queue_merge_bvec()
78 void blk_queue_softirq_done(struct request_queue *q, softirq_done_fn *fn) in blk_queue_softirq_done()
84 void blk_queue_rq_timeout(struct request_queue *q, unsigned int timeout) in blk_queue_rq_timeout()
90 void blk_queue_rq_timed_out(struct request_queue *q, rq_timed_out_fn *fn) in blk_queue_rq_timed_out()
96 void blk_queue_lld_busy(struct request_queue *q, lld_busy_fn *fn) in blk_queue_lld_busy()
176 void blk_queue_make_request(struct request_queue *q, make_request_fn *mfn) in blk_queue_make_request()
208 void blk_queue_bounce_limit(struct request_queue *q, u64 max_addr) in blk_queue_bounce_limit()
272 void blk_queue_max_hw_sectors(struct request_queue *q, unsigned int max_hw_sectors) in blk_queue_max_hw_sectors()
291 void blk_queue_chunk_sectors(struct request_queue *q, unsigned int chunk_sectors) in blk_queue_chunk_sectors()
303 void blk_queue_max_discard_sectors(struct request_queue *q, in blk_queue_max_discard_sectors()
315 void blk_queue_max_write_same_sectors(struct request_queue *q, in blk_queue_max_write_same_sectors()
331 void blk_queue_max_segments(struct request_queue *q, unsigned short max_segments) in blk_queue_max_segments()
352 void blk_queue_max_segment_size(struct request_queue *q, unsigned int max_size) in blk_queue_max_segment_size()
374 void blk_queue_logical_block_size(struct request_queue *q, unsigned short size) in blk_queue_logical_block_size()
396 void blk_queue_physical_block_size(struct request_queue *q, unsigned int size) in blk_queue_physical_block_size()
419 void blk_queue_alignment_offset(struct request_queue *q, unsigned int offset) in blk_queue_alignment_offset()
464 void blk_queue_io_min(struct request_queue *q, unsigned int min) in blk_queue_io_min()
502 void blk_queue_io_opt(struct request_queue *q, unsigned int opt) in blk_queue_io_opt()
713 void blk_queue_dma_pad(struct request_queue *q, unsigned int mask) in blk_queue_dma_pad()
729 void blk_queue_update_dma_pad(struct request_queue *q, unsigned int mask) in blk_queue_update_dma_pad()
757 int blk_queue_dma_drain(struct request_queue *q, in blk_queue_dma_drain()
778 void blk_queue_segment_boundary(struct request_queue *q, unsigned long mask) in blk_queue_segment_boundary()
800 void blk_queue_dma_alignment(struct request_queue *q, int mask) in blk_queue_dma_alignment()
820 void blk_queue_update_dma_alignment(struct request_queue *q, int mask) in blk_queue_update_dma_alignment()
838 void blk_queue_flush(struct request_queue *q, unsigned int flush) in blk_queue_flush()
849 void blk_queue_flush_queueable(struct request_queue *q, bool queueable) in blk_queue_flush_queueable()