Lines Matching refs:src_reloc
2748 struct radeon_bo_list *src_reloc, *dst_reloc, *dst2_reloc; in evergreen_dma_cs_parse() local
2803 r = r600_dma_cs_next_reloc(p, &src_reloc); in evergreen_dma_cs_parse()
2821 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) { in evergreen_dma_cs_parse()
2823 src_offset + (count * 4), radeon_bo_size(src_reloc->robj)); in evergreen_dma_cs_parse()
2832 ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in evergreen_dma_cs_parse()
2834 ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in evergreen_dma_cs_parse()
2844 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8); in evergreen_dma_cs_parse()
2854 ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in evergreen_dma_cs_parse()
2855 ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in evergreen_dma_cs_parse()
2861 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) { in evergreen_dma_cs_parse()
2863 src_offset + (count * 4), radeon_bo_size(src_reloc->robj)); in evergreen_dma_cs_parse()
2880 if ((src_offset + count) > radeon_bo_size(src_reloc->robj)) { in evergreen_dma_cs_parse()
2882 src_offset + count, radeon_bo_size(src_reloc->robj)); in evergreen_dma_cs_parse()
2891 ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xffffffff); in evergreen_dma_cs_parse()
2893 ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in evergreen_dma_cs_parse()
2903 ib[idx+1] += (u32)(src_reloc->gpu_offset & 0xffffffff); in evergreen_dma_cs_parse()
2904 ib[idx+2] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in evergreen_dma_cs_parse()
2924 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) { in evergreen_dma_cs_parse()
2926 src_offset + (count * 4), radeon_bo_size(src_reloc->robj)); in evergreen_dma_cs_parse()
2941 ib[idx+3] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in evergreen_dma_cs_parse()
2944 ib[idx+6] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in evergreen_dma_cs_parse()
2964 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) { in evergreen_dma_cs_parse()
2966 src_offset + (count * 4), radeon_bo_size(src_reloc->robj)); in evergreen_dma_cs_parse()
2981 ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in evergreen_dma_cs_parse()
2982 ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in evergreen_dma_cs_parse()
2995 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8); in evergreen_dma_cs_parse()
3001 ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in evergreen_dma_cs_parse()
3002 ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in evergreen_dma_cs_parse()
3026 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) { in evergreen_dma_cs_parse()
3028 src_offset + (count * 4), radeon_bo_size(src_reloc->robj)); in evergreen_dma_cs_parse()
3043 ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in evergreen_dma_cs_parse()
3044 ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in evergreen_dma_cs_parse()
3055 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8); in evergreen_dma_cs_parse()
3065 ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in evergreen_dma_cs_parse()
3066 ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in evergreen_dma_cs_parse()
3072 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) { in evergreen_dma_cs_parse()
3074 src_offset + (count * 4), radeon_bo_size(src_reloc->robj)); in evergreen_dma_cs_parse()
3091 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8); in evergreen_dma_cs_parse()
3113 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) { in evergreen_dma_cs_parse()
3115 src_offset + (count * 4), radeon_bo_size(src_reloc->robj)); in evergreen_dma_cs_parse()
3130 ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc); in evergreen_dma_cs_parse()
3131 ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff; in evergreen_dma_cs_parse()