Searched refs:wb_offset (Results 1 – 11 of 11) sorted by relevance
192 u32 reg_offset, wb_offset; in cayman_dma_resume() local199 wb_offset = R600_WB_DMA_RPTR_OFFSET; in cayman_dma_resume()203 wb_offset = CAYMAN_WB_DMA1_RPTR_OFFSET; in cayman_dma_resume()223 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume()225 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume()
370 u32 reg_offset, wb_offset; in cik_sdma_gfx_resume() local377 wb_offset = R600_WB_DMA_RPTR_OFFSET; in cik_sdma_gfx_resume()381 wb_offset = CAYMAN_WB_DMA1_RPTR_OFFSET; in cik_sdma_gfx_resume()401 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()403 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
45 unsigned int wb_offset, /* Offset & ~PAGE_CACHE_MASK */ member187 return (((loff_t)req->wb_index) << PAGE_CACHE_SHIFT) + req->wb_offset; in req_offset()
478 if (subreq->wb_offset == (head->wb_offset + total_bytes)) { in nfs_lock_and_join_requests()481 } else if (WARN_ON_ONCE(subreq->wb_offset < head->wb_offset || in nfs_lock_and_join_requests()482 ((subreq->wb_offset + subreq->wb_bytes) > in nfs_lock_and_join_requests()483 (head->wb_offset + total_bytes)))) { in nfs_lock_and_join_requests()1026 rqend = req->wb_offset + req->wb_bytes; in nfs_try_to_update_request()1034 || end < req->wb_offset) in nfs_try_to_update_request()1050 if (offset < req->wb_offset) { in nfs_try_to_update_request()1051 req->wb_offset = offset; in nfs_try_to_update_request()1055 req->wb_bytes = end - req->wb_offset; in nfs_try_to_update_request()1057 req->wb_bytes = rqend - req->wb_offset; in nfs_try_to_update_request()
381 req->wb_offset = offset; in nfs_create_request()1040 offset = subreq->wb_offset; in __nfs_pageio_add_request()1047 WARN_ON_ONCE(subreq->wb_offset != offset); in __nfs_pageio_add_request()1078 subreq->wb_offset = offset; in __nfs_pageio_add_request()1146 offset = req->wb_offset; in nfs_pageio_add_request()1171 dupreq->wb_offset = offset; in nfs_pageio_add_request()
515 req->wb_offset = pos & ~PAGE_MASK; in nfs_direct_read_schedule_iovec()906 req->wb_offset = pos & ~PAGE_MASK; in nfs_direct_write_schedule_iovec()
402 u32 wb_offset; in cik_sdma_gfx_resume() local407 wb_offset = (ring->rptr_offs * 4); in cik_sdma_gfx_resume()438 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()440 ((adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
445 u32 wb_offset; in sdma_v2_4_gfx_resume() local450 wb_offset = (ring->rptr_offs * 4); in sdma_v2_4_gfx_resume()481 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v2_4_gfx_resume()483 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume()
581 u32 wb_offset; in sdma_v3_0_gfx_resume() local587 wb_offset = (ring->rptr_offs * 4); in sdma_v3_0_gfx_resume()618 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()620 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
593 if (req->wb_offset || in objio_init_write()
755 if (!IS_ALIGNED(req->wb_offset, alignment)) in is_aligned_req()