ndw 65 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw) ndw 69 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c ndw = (ndw + ring->funcs->align_mask) & ~ring->funcs->align_mask; ndw 74 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c if (WARN_ON_ONCE(ndw > ring->max_dw)) ndw 77 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.c ring->count_dw = ndw; ndw 256 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw); ndw 64 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c unsigned int ndw = AMDGPU_VM_SDMA_MIN_NUM_DW; ndw 67 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c r = amdgpu_job_alloc_with_ib(p->adev, ndw * 4, &p->job); ndw 80 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c p->num_dw_left = ndw; ndw 200 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c unsigned int i, ndw, nptes; ndw 205 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c ndw = p->num_dw_left; ndw 206 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c ndw -= p->job->ibs->length_dw; ndw 208 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c if (ndw < 32) { ndw 214 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c ndw = 32; ndw 216 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c ndw += count * 2; ndw 217 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c ndw = max(ndw, AMDGPU_VM_SDMA_MIN_NUM_DW); ndw 218 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c ndw = min(ndw, AMDGPU_VM_SDMA_MAX_NUM_DW); ndw 220 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c r = amdgpu_job_alloc_with_ib(p->adev, ndw * 4, &p->job); ndw 224 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c p->num_dw_left = ndw; ndw 238 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c ndw -= p->adev->vm_manager.vm_pte_funcs->copy_pte_num_dw * ndw 242 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c ndw -= 7; ndw 244 drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c nptes = min(count, ndw / 2); ndw 759 drivers/gpu/drm/amd/amdgpu/cik_sdma.c unsigned ndw = count * 2; ndw 765 drivers/gpu/drm/amd/amdgpu/cik_sdma.c ib->ptr[ib->length_dw++] = ndw; ndw 766 drivers/gpu/drm/amd/amdgpu/cik_sdma.c for (; ndw > 0; ndw -= 2) { ndw 698 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c unsigned ndw = count * 2; ndw 704 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c ib->ptr[ib->length_dw++] = ndw; ndw 705 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c for (; ndw > 0; ndw -= 2) { ndw 969 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c unsigned ndw = count * 2; ndw 975 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c ib->ptr[ib->length_dw++] = ndw; ndw 976 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c for (; ndw > 0; ndw -= 2) { ndw 1526 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c unsigned ndw = count * 2; ndw 1532 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c ib->ptr[ib->length_dw++] = ndw - 1; ndw 1533 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c for (; ndw > 0; ndw -= 2) { ndw 1063 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c unsigned ndw = count * 2; ndw 1069 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c ib->ptr[ib->length_dw++] = ndw - 1; ndw 1070 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c for (; ndw > 0; ndw -= 2) { ndw 343 drivers/gpu/drm/amd/amdgpu/si_dma.c unsigned ndw = count * 2; ndw 345 drivers/gpu/drm/amd/amdgpu/si_dma.c ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); ndw 348 drivers/gpu/drm/amd/amdgpu/si_dma.c for (; ndw > 0; ndw -= 2) { ndw 373 drivers/gpu/drm/amd/amdgpu/si_dma.c unsigned ndw; ndw 376 drivers/gpu/drm/amd/amdgpu/si_dma.c ndw = count * 2; ndw 377 drivers/gpu/drm/amd/amdgpu/si_dma.c if (ndw > 0xFFFFE) ndw 378 drivers/gpu/drm/amd/amdgpu/si_dma.c ndw = 0xFFFFE; ndw 386 drivers/gpu/drm/amd/amdgpu/si_dma.c ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw); ndw 395 drivers/gpu/drm/amd/amdgpu/si_dma.c pe += ndw * 4; ndw 396 drivers/gpu/drm/amd/amdgpu/si_dma.c addr += (ndw / 2) * incr; ndw 397 drivers/gpu/drm/amd/amdgpu/si_dma.c count -= ndw / 2; ndw 848 drivers/gpu/drm/radeon/cik_sdma.c unsigned ndw; ndw 851 drivers/gpu/drm/radeon/cik_sdma.c ndw = count * 2; ndw 852 drivers/gpu/drm/radeon/cik_sdma.c if (ndw > 0xFFFFE) ndw 853 drivers/gpu/drm/radeon/cik_sdma.c ndw = 0xFFFFE; ndw 860 drivers/gpu/drm/radeon/cik_sdma.c ib->ptr[ib->length_dw++] = ndw; ndw 861 drivers/gpu/drm/radeon/cik_sdma.c for (; ndw > 0; ndw -= 2, --count, pe += 8) { ndw 897 drivers/gpu/drm/radeon/cik_sdma.c unsigned ndw; ndw 900 drivers/gpu/drm/radeon/cik_sdma.c ndw = count; ndw 901 drivers/gpu/drm/radeon/cik_sdma.c if (ndw > 0x7FFFF) ndw 902 drivers/gpu/drm/radeon/cik_sdma.c ndw = 0x7FFFF; ndw 919 drivers/gpu/drm/radeon/cik_sdma.c ib->ptr[ib->length_dw++] = ndw; /* number of entries */ ndw 921 drivers/gpu/drm/radeon/cik_sdma.c pe += ndw * 8; ndw 922 drivers/gpu/drm/radeon/cik_sdma.c addr += ndw * incr; ndw 923 drivers/gpu/drm/radeon/cik_sdma.c count -= ndw; ndw 320 drivers/gpu/drm/radeon/ni_dma.c unsigned ndw; ndw 323 drivers/gpu/drm/radeon/ni_dma.c ndw = count * 2; ndw 324 drivers/gpu/drm/radeon/ni_dma.c if (ndw > 0xFFFFE) ndw 325 drivers/gpu/drm/radeon/ni_dma.c ndw = 0xFFFFE; ndw 328 drivers/gpu/drm/radeon/ni_dma.c 0, 0, ndw); ndw 334 drivers/gpu/drm/radeon/ni_dma.c pe += ndw * 4; ndw 335 drivers/gpu/drm/radeon/ni_dma.c src += ndw * 4; ndw 336 drivers/gpu/drm/radeon/ni_dma.c count -= ndw / 2; ndw 360 drivers/gpu/drm/radeon/ni_dma.c unsigned ndw; ndw 363 drivers/gpu/drm/radeon/ni_dma.c ndw = count * 2; ndw 364 drivers/gpu/drm/radeon/ni_dma.c if (ndw > 0xFFFFE) ndw 365 drivers/gpu/drm/radeon/ni_dma.c ndw = 0xFFFFE; ndw 369 drivers/gpu/drm/radeon/ni_dma.c 0, 0, ndw); ndw 372 drivers/gpu/drm/radeon/ni_dma.c for (; ndw > 0; ndw -= 2, --count, pe += 8) { ndw 408 drivers/gpu/drm/radeon/ni_dma.c unsigned ndw; ndw 411 drivers/gpu/drm/radeon/ni_dma.c ndw = count * 2; ndw 412 drivers/gpu/drm/radeon/ni_dma.c if (ndw > 0xFFFFE) ndw 413 drivers/gpu/drm/radeon/ni_dma.c ndw = 0xFFFFE; ndw 421 drivers/gpu/drm/radeon/ni_dma.c ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw); ndw 431 drivers/gpu/drm/radeon/ni_dma.c pe += ndw * 4; ndw 432 drivers/gpu/drm/radeon/ni_dma.c addr += (ndw / 2) * incr; ndw 433 drivers/gpu/drm/radeon/ni_dma.c count -= ndw / 2; ndw 902 drivers/gpu/drm/radeon/r100.c unsigned ndw; ndw 914 drivers/gpu/drm/radeon/r100.c ndw = 64 + (10 * num_loops); ndw 915 drivers/gpu/drm/radeon/r100.c r = radeon_ring_lock(rdev, ring, ndw); ndw 917 drivers/gpu/drm/radeon/r100.c DRM_ERROR("radeon: moving bo (%d) asking for %u dw.\n", r, ndw); ndw 1018 drivers/gpu/drm/radeon/radeon.h int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw); ndw 1019 drivers/gpu/drm/radeon/radeon.h int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw); ndw 108 drivers/gpu/drm/radeon/radeon_ring.c int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) ndw 113 drivers/gpu/drm/radeon/radeon_ring.c if (ndw > (ring->ring_size / 4)) ndw 118 drivers/gpu/drm/radeon/radeon_ring.c ndw = (ndw + ring->align_mask) & ~ring->align_mask; ndw 119 drivers/gpu/drm/radeon/radeon_ring.c while (ndw > (ring->ring_free_dw - 1)) { ndw 121 drivers/gpu/drm/radeon/radeon_ring.c if (ndw < ring->ring_free_dw) { ndw 128 drivers/gpu/drm/radeon/radeon_ring.c ring->count_dw = ndw; ndw 144 drivers/gpu/drm/radeon/radeon_ring.c int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) ndw 149 drivers/gpu/drm/radeon/radeon_ring.c r = radeon_ring_alloc(rdev, ring, ndw); ndw 648 drivers/gpu/drm/radeon/radeon_vm.c unsigned count = 0, pt_idx, ndw; ndw 653 drivers/gpu/drm/radeon/radeon_vm.c ndw = 64; ndw 656 drivers/gpu/drm/radeon/radeon_vm.c ndw += vm->max_pde_used * 6; ndw 659 drivers/gpu/drm/radeon/radeon_vm.c if (ndw > 0xfffff) ndw 662 drivers/gpu/drm/radeon/radeon_vm.c r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, ndw * 4); ndw 706 drivers/gpu/drm/radeon/radeon_vm.c WARN_ON(ib.length_dw > ndw); ndw 918 drivers/gpu/drm/radeon/radeon_vm.c unsigned nptes, ncmds, ndw; ndw 974 drivers/gpu/drm/radeon/radeon_vm.c ndw = 64; ndw 979 drivers/gpu/drm/radeon/radeon_vm.c ndw += ncmds * 7; ndw 983 drivers/gpu/drm/radeon/radeon_vm.c ndw += ncmds * 4; ndw 986 drivers/gpu/drm/radeon/radeon_vm.c ndw += nptes * 2; ndw 990 drivers/gpu/drm/radeon/radeon_vm.c ndw += ncmds * 10; ndw 993 drivers/gpu/drm/radeon/radeon_vm.c ndw += 2 * 10; ndw 997 drivers/gpu/drm/radeon/radeon_vm.c if (ndw > 0xfffff) ndw 1000 drivers/gpu/drm/radeon/radeon_vm.c r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, ndw * 4); ndw 1021 drivers/gpu/drm/radeon/radeon_vm.c WARN_ON(ib.length_dw > ndw); ndw 112 drivers/gpu/drm/radeon/si_dma.c unsigned ndw; ndw 115 drivers/gpu/drm/radeon/si_dma.c ndw = count * 2; ndw 116 drivers/gpu/drm/radeon/si_dma.c if (ndw > 0xFFFFE) ndw 117 drivers/gpu/drm/radeon/si_dma.c ndw = 0xFFFFE; ndw 120 drivers/gpu/drm/radeon/si_dma.c ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); ndw 123 drivers/gpu/drm/radeon/si_dma.c for (; ndw > 0; ndw -= 2, --count, pe += 8) { ndw 159 drivers/gpu/drm/radeon/si_dma.c unsigned ndw; ndw 162 drivers/gpu/drm/radeon/si_dma.c ndw = count * 2; ndw 163 drivers/gpu/drm/radeon/si_dma.c if (ndw > 0xFFFFE) ndw 164 drivers/gpu/drm/radeon/si_dma.c ndw = 0xFFFFE; ndw 172 drivers/gpu/drm/radeon/si_dma.c ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw); ndw 181 drivers/gpu/drm/radeon/si_dma.c pe += ndw * 4; ndw 182 drivers/gpu/drm/radeon/si_dma.c addr += (ndw / 2) * incr; ndw 183 drivers/gpu/drm/radeon/si_dma.c count -= ndw / 2; ndw 2161 kernel/rcu/tree_plugin.h int ndw; ndw 2168 kernel/rcu/tree_plugin.h ndw = READ_ONCE(rdp->nocb_defer_wakeup); ndw 2170 kernel/rcu/tree_plugin.h wake_nocb_gp(rdp, ndw == RCU_NOCB_WAKE_FORCE, flags);