Lines Matching refs:r

347 	int r;  in amdgpu_cs_list_validate()  local
374 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in amdgpu_cs_list_validate()
378 if (unlikely(r)) { in amdgpu_cs_list_validate()
379 if (r != -ERESTARTSYS && domain != lobj->allowed_domains) { in amdgpu_cs_list_validate()
383 return r; in amdgpu_cs_list_validate()
397 int i, r; in amdgpu_cs_parser_relocs() local
419 r = ttm_eu_reserve_buffers(&p->ticket, &p->validated, true, &duplicates); in amdgpu_cs_parser_relocs()
420 if (unlikely(r != 0)) in amdgpu_cs_parser_relocs()
423 r = amdgpu_cs_list_validate(p->adev, &fpriv->vm, &p->validated); in amdgpu_cs_parser_relocs()
424 if (r) in amdgpu_cs_parser_relocs()
427 r = amdgpu_cs_list_validate(p->adev, &fpriv->vm, &duplicates); in amdgpu_cs_parser_relocs()
430 if (r) in amdgpu_cs_parser_relocs()
437 return r; in amdgpu_cs_parser_relocs()
443 int r; in amdgpu_cs_sync_rings() local
447 r = amdgpu_sync_resv(p->adev, &p->ibs[0].sync, resv, p->filp); in amdgpu_cs_sync_rings()
449 if (r) in amdgpu_cs_sync_rings()
450 return r; in amdgpu_cs_sync_rings()
522 int i, r; in amdgpu_bo_vm_update_pte() local
524 r = amdgpu_vm_update_page_directory(adev, vm); in amdgpu_bo_vm_update_pte()
525 if (r) in amdgpu_bo_vm_update_pte()
526 return r; in amdgpu_bo_vm_update_pte()
528 r = amdgpu_sync_fence(adev, &p->ibs[0].sync, vm->page_directory_fence); in amdgpu_bo_vm_update_pte()
529 if (r) in amdgpu_bo_vm_update_pte()
530 return r; in amdgpu_bo_vm_update_pte()
532 r = amdgpu_vm_clear_freed(adev, vm); in amdgpu_bo_vm_update_pte()
533 if (r) in amdgpu_bo_vm_update_pte()
534 return r; in amdgpu_bo_vm_update_pte()
549 r = amdgpu_vm_bo_update(adev, bo_va, &bo->tbo.mem); in amdgpu_bo_vm_update_pte()
550 if (r) in amdgpu_bo_vm_update_pte()
551 return r; in amdgpu_bo_vm_update_pte()
554 r = amdgpu_sync_fence(adev, &p->ibs[0].sync, f); in amdgpu_bo_vm_update_pte()
555 if (r) in amdgpu_bo_vm_update_pte()
556 return r; in amdgpu_bo_vm_update_pte()
561 r = amdgpu_vm_clear_invalids(adev, vm, &p->ibs[0].sync); in amdgpu_bo_vm_update_pte()
575 return r; in amdgpu_bo_vm_update_pte()
584 int i, r; in amdgpu_cs_ib_vm_chunk() local
593 r = amdgpu_ring_parse_cs(ring, parser, i); in amdgpu_cs_ib_vm_chunk()
594 if (r) in amdgpu_cs_ib_vm_chunk()
595 return r; in amdgpu_cs_ib_vm_chunk()
599 r = amdgpu_bo_vm_update_pte(parser, vm); in amdgpu_cs_ib_vm_chunk()
600 if (!r) in amdgpu_cs_ib_vm_chunk()
603 return r; in amdgpu_cs_ib_vm_chunk()
606 static int amdgpu_cs_handle_lockup(struct amdgpu_device *adev, int r) in amdgpu_cs_handle_lockup() argument
608 if (r == -EDEADLK) { in amdgpu_cs_handle_lockup()
609 r = amdgpu_gpu_reset(adev); in amdgpu_cs_handle_lockup()
610 if (!r) in amdgpu_cs_handle_lockup()
611 r = -EAGAIN; in amdgpu_cs_handle_lockup()
613 return r; in amdgpu_cs_handle_lockup()
622 int r; in amdgpu_cs_ib_fill() local
637 r = amdgpu_cs_get_ring(adev, chunk_ib->ip_type, in amdgpu_cs_ib_fill()
640 if (r) in amdgpu_cs_ib_fill()
641 return r; in amdgpu_cs_ib_fill()
663 r = amdgpu_bo_kmap(aobj, (void **)&kptr); in amdgpu_cs_ib_fill()
664 if (r) { in amdgpu_cs_ib_fill()
665 return r; in amdgpu_cs_ib_fill()
671 r = amdgpu_ib_get(ring, NULL, chunk_ib->ib_bytes, ib); in amdgpu_cs_ib_fill()
672 if (r) { in amdgpu_cs_ib_fill()
674 return r; in amdgpu_cs_ib_fill()
680 r = amdgpu_ib_get(ring, vm, 0, ib); in amdgpu_cs_ib_fill()
681 if (r) { in amdgpu_cs_ib_fill()
683 return r; in amdgpu_cs_ib_fill()
738 int i, j, r; in amdgpu_cs_dependencies() local
764 r = amdgpu_cs_get_ring(adev, deps[j].ip_type, in amdgpu_cs_dependencies()
767 if (r) in amdgpu_cs_dependencies()
768 return r; in amdgpu_cs_dependencies()
777 r = PTR_ERR(fence); in amdgpu_cs_dependencies()
779 return r; in amdgpu_cs_dependencies()
782 r = amdgpu_sync_fence(adev, &ib->sync, fence); in amdgpu_cs_dependencies()
785 if (r) in amdgpu_cs_dependencies()
786 return r; in amdgpu_cs_dependencies()
812 int i, r; in amdgpu_cs_ioctl() local
820 r = amdgpu_cs_parser_init(&parser, data); in amdgpu_cs_ioctl()
821 if (r) { in amdgpu_cs_ioctl()
823 amdgpu_cs_parser_fini(&parser, r, false); in amdgpu_cs_ioctl()
824 r = amdgpu_cs_handle_lockup(adev, r); in amdgpu_cs_ioctl()
825 return r; in amdgpu_cs_ioctl()
827 r = amdgpu_cs_parser_relocs(&parser); in amdgpu_cs_ioctl()
828 if (r == -ENOMEM) in amdgpu_cs_ioctl()
830 else if (r && r != -ERESTARTSYS) in amdgpu_cs_ioctl()
831 DRM_ERROR("Failed to process the buffer list %d!\n", r); in amdgpu_cs_ioctl()
832 else if (!r) { in amdgpu_cs_ioctl()
834 r = amdgpu_cs_ib_fill(adev, &parser); in amdgpu_cs_ioctl()
837 if (!r) { in amdgpu_cs_ioctl()
838 r = amdgpu_cs_dependencies(adev, &parser); in amdgpu_cs_ioctl()
839 if (r) in amdgpu_cs_ioctl()
840 DRM_ERROR("Failed in the dependencies handling %d!\n", r); in amdgpu_cs_ioctl()
843 if (r) in amdgpu_cs_ioctl()
849 r = amdgpu_cs_ib_vm_chunk(adev, &parser); in amdgpu_cs_ioctl()
850 if (r) in amdgpu_cs_ioctl()
860 r = -ENOMEM; in amdgpu_cs_ioctl()
884 r = -ENOMEM; in amdgpu_cs_ioctl()
902 r = amdgpu_ib_schedule(adev, parser.num_ibs, parser.ibs, in amdgpu_cs_ioctl()
910 amdgpu_cs_parser_fini(&parser, r, reserved_buffers); in amdgpu_cs_ioctl()
911 r = amdgpu_cs_handle_lockup(adev, r); in amdgpu_cs_ioctl()
912 return r; in amdgpu_cs_ioctl()
933 long r; in amdgpu_cs_wait_ioctl() local
935 r = amdgpu_cs_get_ring(adev, wait->in.ip_type, wait->in.ip_instance, in amdgpu_cs_wait_ioctl()
937 if (r) in amdgpu_cs_wait_ioctl()
938 return r; in amdgpu_cs_wait_ioctl()
946 r = PTR_ERR(fence); in amdgpu_cs_wait_ioctl()
948 r = fence_wait_timeout(fence, true, timeout); in amdgpu_cs_wait_ioctl()
951 r = 1; in amdgpu_cs_wait_ioctl()
954 if (r < 0) in amdgpu_cs_wait_ioctl()
955 return r; in amdgpu_cs_wait_ioctl()
958 wait->out.status = (r == 0); in amdgpu_cs_wait_ioctl()