Lines Matching refs:rdev
63 int radeon_uvd_init(struct radeon_device *rdev) in radeon_uvd_init() argument
69 INIT_DELAYED_WORK(&rdev->uvd.idle_work, radeon_uvd_idle_work_handler); in radeon_uvd_init()
71 switch (rdev->family) { in radeon_uvd_init()
133 r = request_firmware(&rdev->uvd_fw, fw_name, rdev->dev); in radeon_uvd_init()
135 dev_err(rdev->dev, "radeon_uvd: Can't load firmware \"%s\"\n", in radeon_uvd_init()
140 bo_size = RADEON_GPU_PAGE_ALIGN(rdev->uvd_fw->size + 8) + in radeon_uvd_init()
143 r = radeon_bo_create(rdev, bo_size, PAGE_SIZE, true, in radeon_uvd_init()
145 NULL, &rdev->uvd.vcpu_bo); in radeon_uvd_init()
147 dev_err(rdev->dev, "(%d) failed to allocate UVD bo\n", r); in radeon_uvd_init()
151 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, false); in radeon_uvd_init()
153 radeon_bo_unref(&rdev->uvd.vcpu_bo); in radeon_uvd_init()
154 dev_err(rdev->dev, "(%d) failed to reserve UVD bo\n", r); in radeon_uvd_init()
158 r = radeon_bo_pin(rdev->uvd.vcpu_bo, RADEON_GEM_DOMAIN_VRAM, in radeon_uvd_init()
159 &rdev->uvd.gpu_addr); in radeon_uvd_init()
161 radeon_bo_unreserve(rdev->uvd.vcpu_bo); in radeon_uvd_init()
162 radeon_bo_unref(&rdev->uvd.vcpu_bo); in radeon_uvd_init()
163 dev_err(rdev->dev, "(%d) UVD bo pin failed\n", r); in radeon_uvd_init()
167 r = radeon_bo_kmap(rdev->uvd.vcpu_bo, &rdev->uvd.cpu_addr); in radeon_uvd_init()
169 dev_err(rdev->dev, "(%d) UVD map failed\n", r); in radeon_uvd_init()
173 radeon_bo_unreserve(rdev->uvd.vcpu_bo); in radeon_uvd_init()
176 atomic_set(&rdev->uvd.handles[i], 0); in radeon_uvd_init()
177 rdev->uvd.filp[i] = NULL; in radeon_uvd_init()
178 rdev->uvd.img_size[i] = 0; in radeon_uvd_init()
184 void radeon_uvd_fini(struct radeon_device *rdev) in radeon_uvd_fini() argument
188 if (rdev->uvd.vcpu_bo == NULL) in radeon_uvd_fini()
191 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, false); in radeon_uvd_fini()
193 radeon_bo_kunmap(rdev->uvd.vcpu_bo); in radeon_uvd_fini()
194 radeon_bo_unpin(rdev->uvd.vcpu_bo); in radeon_uvd_fini()
195 radeon_bo_unreserve(rdev->uvd.vcpu_bo); in radeon_uvd_fini()
198 radeon_bo_unref(&rdev->uvd.vcpu_bo); in radeon_uvd_fini()
200 radeon_ring_fini(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX]); in radeon_uvd_fini()
202 release_firmware(rdev->uvd_fw); in radeon_uvd_fini()
205 int radeon_uvd_suspend(struct radeon_device *rdev) in radeon_uvd_suspend() argument
209 if (rdev->uvd.vcpu_bo == NULL) in radeon_uvd_suspend()
213 uint32_t handle = atomic_read(&rdev->uvd.handles[i]); in radeon_uvd_suspend()
217 radeon_uvd_note_usage(rdev); in radeon_uvd_suspend()
219 r = radeon_uvd_get_destroy_msg(rdev, in radeon_uvd_suspend()
229 rdev->uvd.filp[i] = NULL; in radeon_uvd_suspend()
230 atomic_set(&rdev->uvd.handles[i], 0); in radeon_uvd_suspend()
237 int radeon_uvd_resume(struct radeon_device *rdev) in radeon_uvd_resume() argument
242 if (rdev->uvd.vcpu_bo == NULL) in radeon_uvd_resume()
245 memcpy(rdev->uvd.cpu_addr, rdev->uvd_fw->data, rdev->uvd_fw->size); in radeon_uvd_resume()
247 size = radeon_bo_size(rdev->uvd.vcpu_bo); in radeon_uvd_resume()
248 size -= rdev->uvd_fw->size; in radeon_uvd_resume()
250 ptr = rdev->uvd.cpu_addr; in radeon_uvd_resume()
251 ptr += rdev->uvd_fw->size; in radeon_uvd_resume()
284 void radeon_uvd_free_handles(struct radeon_device *rdev, struct drm_file *filp) in radeon_uvd_free_handles() argument
288 uint32_t handle = atomic_read(&rdev->uvd.handles[i]); in radeon_uvd_free_handles()
289 if (handle != 0 && rdev->uvd.filp[i] == filp) { in radeon_uvd_free_handles()
292 radeon_uvd_note_usage(rdev); in radeon_uvd_free_handles()
294 r = radeon_uvd_get_destroy_msg(rdev, in radeon_uvd_free_handles()
304 rdev->uvd.filp[i] = NULL; in radeon_uvd_free_handles()
305 atomic_set(&rdev->uvd.handles[i], 0); in radeon_uvd_free_handles()
410 if (p->rdev->family >= CHIP_PALM) in radeon_uvd_validate_codec()
473 if (atomic_read(&p->rdev->uvd.handles[i]) == handle) { in radeon_uvd_cs_msg()
478 if (!atomic_cmpxchg(&p->rdev->uvd.handles[i], 0, handle)) { in radeon_uvd_cs_msg()
479 p->rdev->uvd.filp[i] = p->filp; in radeon_uvd_cs_msg()
480 p->rdev->uvd.img_size[i] = img_size; in radeon_uvd_cs_msg()
499 if (atomic_read(&p->rdev->uvd.handles[i]) == handle) { in radeon_uvd_cs_msg()
500 if (p->rdev->uvd.filp[i] != p->filp) { in radeon_uvd_cs_msg()
514 atomic_cmpxchg(&p->rdev->uvd.handles[i], handle, 0); in radeon_uvd_cs_msg()
581 (start >> 28) != (p->rdev->uvd.gpu_addr >> 28)) { in radeon_uvd_cs_reloc()
695 static int radeon_uvd_send_msg(struct radeon_device *rdev, in radeon_uvd_send_msg() argument
702 r = radeon_ib_get(rdev, ring, &ib, NULL, 64); in radeon_uvd_send_msg()
716 r = radeon_ib_schedule(rdev, &ib, NULL, false); in radeon_uvd_send_msg()
721 radeon_ib_free(rdev, &ib); in radeon_uvd_send_msg()
728 int radeon_uvd_get_create_msg(struct radeon_device *rdev, int ring, in radeon_uvd_get_create_msg() argument
732 uint64_t offs = radeon_bo_size(rdev->uvd.vcpu_bo) - in radeon_uvd_get_create_msg()
735 uint32_t *msg = rdev->uvd.cpu_addr + offs; in radeon_uvd_get_create_msg()
736 uint64_t addr = rdev->uvd.gpu_addr + offs; in radeon_uvd_get_create_msg()
740 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, true); in radeon_uvd_get_create_msg()
759 r = radeon_uvd_send_msg(rdev, ring, addr, fence); in radeon_uvd_get_create_msg()
760 radeon_bo_unreserve(rdev->uvd.vcpu_bo); in radeon_uvd_get_create_msg()
764 int radeon_uvd_get_destroy_msg(struct radeon_device *rdev, int ring, in radeon_uvd_get_destroy_msg() argument
768 uint64_t offs = radeon_bo_size(rdev->uvd.vcpu_bo) - in radeon_uvd_get_destroy_msg()
771 uint32_t *msg = rdev->uvd.cpu_addr + offs; in radeon_uvd_get_destroy_msg()
772 uint64_t addr = rdev->uvd.gpu_addr + offs; in radeon_uvd_get_destroy_msg()
776 r = radeon_bo_reserve(rdev->uvd.vcpu_bo, true); in radeon_uvd_get_destroy_msg()
788 r = radeon_uvd_send_msg(rdev, ring, addr, fence); in radeon_uvd_get_destroy_msg()
789 radeon_bo_unreserve(rdev->uvd.vcpu_bo); in radeon_uvd_get_destroy_msg()
802 static void radeon_uvd_count_handles(struct radeon_device *rdev, in radeon_uvd_count_handles() argument
811 if (!atomic_read(&rdev->uvd.handles[i])) in radeon_uvd_count_handles()
814 if (rdev->uvd.img_size[i] >= 720*576) in radeon_uvd_count_handles()
823 struct radeon_device *rdev = in radeon_uvd_idle_work_handler() local
826 if (radeon_fence_count_emitted(rdev, R600_RING_TYPE_UVD_INDEX) == 0) { in radeon_uvd_idle_work_handler()
827 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in radeon_uvd_idle_work_handler()
828 radeon_uvd_count_handles(rdev, &rdev->pm.dpm.sd, in radeon_uvd_idle_work_handler()
829 &rdev->pm.dpm.hd); in radeon_uvd_idle_work_handler()
830 radeon_dpm_enable_uvd(rdev, false); in radeon_uvd_idle_work_handler()
832 radeon_set_uvd_clocks(rdev, 0, 0); in radeon_uvd_idle_work_handler()
835 schedule_delayed_work(&rdev->uvd.idle_work, in radeon_uvd_idle_work_handler()
840 void radeon_uvd_note_usage(struct radeon_device *rdev) in radeon_uvd_note_usage() argument
843 bool set_clocks = !cancel_delayed_work_sync(&rdev->uvd.idle_work); in radeon_uvd_note_usage()
844 set_clocks &= schedule_delayed_work(&rdev->uvd.idle_work, in radeon_uvd_note_usage()
847 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in radeon_uvd_note_usage()
849 radeon_uvd_count_handles(rdev, &sd, &hd); in radeon_uvd_note_usage()
850 if ((rdev->pm.dpm.sd != sd) || in radeon_uvd_note_usage()
851 (rdev->pm.dpm.hd != hd)) { in radeon_uvd_note_usage()
852 rdev->pm.dpm.sd = sd; in radeon_uvd_note_usage()
853 rdev->pm.dpm.hd = hd; in radeon_uvd_note_usage()
860 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in radeon_uvd_note_usage()
861 radeon_dpm_enable_uvd(rdev, true); in radeon_uvd_note_usage()
863 radeon_set_uvd_clocks(rdev, 53300, 40000); in radeon_uvd_note_usage()
910 int radeon_uvd_calc_upll_dividers(struct radeon_device *rdev, in radeon_uvd_calc_upll_dividers() argument
920 unsigned vco_freq, ref_freq = rdev->clock.spll.reference_freq; in radeon_uvd_calc_upll_dividers()
973 int radeon_uvd_send_upll_ctlreq(struct radeon_device *rdev, in radeon_uvd_send_upll_ctlreq() argument