bos 63 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c struct list_head bos; bos 85 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c list_for_each_entry_safe(bo, next_bo, &node->bos, mn_list) { bos 177 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c list_for_each_entry(bo, &node->bos, mn_list) { bos 276 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c list_for_each_entry(bo, &node->bos, mn_list) { bos 383 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c struct list_head bos; bos 394 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c INIT_LIST_HEAD(&bos); bos 404 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c list_splice(&node->bos, &bos); bos 416 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c INIT_LIST_HEAD(&node->bos); bos 417 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c list_splice(&bos, &node->bos); bos 418 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c list_add(&bo->mn_list, &node->bos); bos 459 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c node = container_of(head, struct amdgpu_mn_node, bos); bos 141 drivers/gpu/drm/etnaviv/etnaviv_dump.c obj = submit->bos[i].obj; bos 201 drivers/gpu/drm/etnaviv/etnaviv_dump.c obj = submit->bos[i].obj; bos 202 drivers/gpu/drm/etnaviv/etnaviv_dump.c vram = submit->bos[i].mapping; bos 108 drivers/gpu/drm/etnaviv/etnaviv_gem.h struct etnaviv_gem_submit_bo bos[0]; bos 35 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c size_t sz = size_vstruct(nr_bos, sizeof(submit->bos[0]), sizeof(*submit)); bos 74 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].flags = bo->flags; bos 81 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].va = bo->presumed; bos 101 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].obj = to_etnaviv_bo(obj); bos 113 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c if (submit->bos[i].flags & BO_LOCKED) { bos 114 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct drm_gem_object *obj = &submit->bos[i].obj->base; bos 117 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].flags &= ~BO_LOCKED; bos 128 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct drm_gem_object *obj = &submit->bos[i].obj->base; bos 135 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c if (!(submit->bos[i].flags & BO_LOCKED)) { bos 143 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].flags |= BO_LOCKED; bos 161 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c obj = &submit->bos[contended].obj->base; bos 167 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[contended].flags |= BO_LOCKED; bos 181 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct etnaviv_gem_submit_bo *bo = &submit->bos[i]; bos 213 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct drm_gem_object *obj = &submit->bos[i].obj->base; bos 215 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c if (submit->bos[i].flags & ETNA_SUBMIT_BO_WRITE) bos 231 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct etnaviv_gem_object *etnaviv_obj = submit->bos[i].obj; bos 236 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].va); bos 243 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].va != mapping->iova) { bos 250 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].flags |= BO_PINNED; bos 251 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].mapping = mapping; bos 266 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c *bo = &submit->bos[idx]; bos 391 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct etnaviv_gem_object *etnaviv_obj = submit->bos[i].obj; bos 394 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c if (submit->bos[i].flags & BO_PINNED) { bos 395 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c etnaviv_gem_mapping_unreference(submit->bos[i].mapping); bos 397 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].mapping = NULL; bos 398 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].flags &= ~BO_PINNED; bos 434 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c struct drm_etnaviv_gem_submit_bo *bos; bos 478 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c bos = kvmalloc_array(args->nr_bos, sizeof(*bos), GFP_KERNEL); bos 482 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c if (!bos || !relocs || !pmrs || !stream) { bos 487 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c ret = copy_from_user(bos, u64_to_user_ptr(args->bos), bos 488 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c args->nr_bos * sizeof(*bos)); bos 542 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c ret = submit_lookup_objects(submit, file, bos, args->nr_bos); bos 619 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c if (bos) bos 620 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c kvfree(bos); bos 39 drivers/gpu/drm/etnaviv/etnaviv_sched.c struct etnaviv_gem_submit_bo *bo = &submit->bos[i]; bos 93 drivers/gpu/drm/lima/lima_drv.c struct drm_lima_gem_submit_bo *bos; bos 111 drivers/gpu/drm/lima/lima_drv.c bos = kvcalloc(args->nr_bos, sizeof(*submit.bos) + sizeof(*submit.lbos), GFP_KERNEL); bos 112 drivers/gpu/drm/lima/lima_drv.c if (!bos) bos 115 drivers/gpu/drm/lima/lima_drv.c size = args->nr_bos * sizeof(*submit.bos); bos 116 drivers/gpu/drm/lima/lima_drv.c if (copy_from_user(bos, u64_to_user_ptr(args->bos), size)) { bos 144 drivers/gpu/drm/lima/lima_drv.c submit.bos = bos; bos 145 drivers/gpu/drm/lima/lima_drv.c submit.lbos = (void *)bos + size; bos 161 drivers/gpu/drm/lima/lima_drv.c kvfree(bos); bos 29 drivers/gpu/drm/lima/lima_drv.h struct drm_lima_gem_submit_bo *bos; bos 151 drivers/gpu/drm/lima/lima_gem.c static int lima_gem_lock_bos(struct lima_bo **bos, u32 nr_bos, bos 165 drivers/gpu/drm/lima/lima_gem.c ret = ww_mutex_lock_interruptible(&bos[i]->gem.resv->lock, ctx); bos 177 drivers/gpu/drm/lima/lima_gem.c ww_mutex_unlock(&bos[i]->gem.resv->lock); bos 180 drivers/gpu/drm/lima/lima_gem.c ww_mutex_unlock(&bos[slow_locked]->gem.resv->lock); bos 185 drivers/gpu/drm/lima/lima_gem.c &bos[contended]->gem.resv->lock, ctx); bos 196 drivers/gpu/drm/lima/lima_gem.c static void lima_gem_unlock_bos(struct lima_bo **bos, u32 nr_bos, bos 202 drivers/gpu/drm/lima/lima_gem.c ww_mutex_unlock(&bos[i]->gem.resv->lock); bos 239 drivers/gpu/drm/lima/lima_gem.c struct lima_bo **bos = submit->lbos; bos 251 drivers/gpu/drm/lima/lima_gem.c obj = drm_gem_object_lookup(file, submit->bos[i].handle); bos 268 drivers/gpu/drm/lima/lima_gem.c bos[i] = bo; bos 271 drivers/gpu/drm/lima/lima_gem.c err = lima_gem_lock_bos(bos, submit->nr_bos, &ctx); bos 277 drivers/gpu/drm/lima/lima_gem.c bos, submit->nr_bos, vm); bos 287 drivers/gpu/drm/lima/lima_gem.c submit->task, bos[i], bos 288 drivers/gpu/drm/lima/lima_gem.c submit->bos[i].flags & LIMA_SUBMIT_BO_WRITE, bos 298 drivers/gpu/drm/lima/lima_gem.c if (submit->bos[i].flags & LIMA_SUBMIT_BO_WRITE) bos 299 drivers/gpu/drm/lima/lima_gem.c dma_resv_add_excl_fence(bos[i]->gem.resv, fence); bos 301 drivers/gpu/drm/lima/lima_gem.c dma_resv_add_shared_fence(bos[i]->gem.resv, fence); bos 304 drivers/gpu/drm/lima/lima_gem.c lima_gem_unlock_bos(bos, submit->nr_bos, &ctx); bos 307 drivers/gpu/drm/lima/lima_gem.c drm_gem_object_put_unlocked(&bos[i]->gem); bos 321 drivers/gpu/drm/lima/lima_gem.c lima_gem_unlock_bos(bos, submit->nr_bos, &ctx); bos 324 drivers/gpu/drm/lima/lima_gem.c if (!bos[i]) bos 326 drivers/gpu/drm/lima/lima_gem.c lima_vm_bo_del(vm, bos[i]); bos 327 drivers/gpu/drm/lima/lima_gem.c drm_gem_object_put_unlocked(&bos[i]->gem); bos 110 drivers/gpu/drm/lima/lima_sched.c struct lima_bo **bos, int num_bos, bos 115 drivers/gpu/drm/lima/lima_sched.c task->bos = kmemdup(bos, sizeof(*bos) * num_bos, GFP_KERNEL); bos 116 drivers/gpu/drm/lima/lima_sched.c if (!task->bos) bos 120 drivers/gpu/drm/lima/lima_sched.c drm_gem_object_get(&bos[i]->gem); bos 124 drivers/gpu/drm/lima/lima_sched.c kfree(task->bos); bos 149 drivers/gpu/drm/lima/lima_sched.c if (task->bos) { bos 151 drivers/gpu/drm/lima/lima_sched.c drm_gem_object_put_unlocked(&task->bos[i]->gem); bos 152 drivers/gpu/drm/lima/lima_sched.c kfree(task->bos); bos 302 drivers/gpu/drm/lima/lima_sched.c struct lima_bo **bos = task->bos; bos 308 drivers/gpu/drm/lima/lima_sched.c lima_vm_bo_del(vm, bos[i]); bos 20 drivers/gpu/drm/lima/lima_sched.h struct lima_bo **bos; bos 77 drivers/gpu/drm/lima/lima_sched.h struct lima_bo **bos, int num_bos, bos 65 drivers/gpu/drm/msm/adreno/a5xx_gpu.c obj = submit->bos[submit->cmd[i].idx].obj; bos 595 drivers/gpu/drm/msm/adreno/adreno_gpu.c for (i = 0; state->bos && i < state->nr_bos; i++) bos 596 drivers/gpu/drm/msm/adreno/adreno_gpu.c kvfree(state->bos[i].data); bos 598 drivers/gpu/drm/msm/adreno/adreno_gpu.c kfree(state->bos); bos 726 drivers/gpu/drm/msm/adreno/adreno_gpu.c if (state->bos) { bos 731 drivers/gpu/drm/msm/adreno/adreno_gpu.c state->bos[i].iova); bos 732 drivers/gpu/drm/msm/adreno/adreno_gpu.c drm_printf(p, " size: %zd\n", state->bos[i].size); bos 734 drivers/gpu/drm/msm/adreno/adreno_gpu.c adreno_show_object(p, &state->bos[i].data, bos 735 drivers/gpu/drm/msm/adreno/adreno_gpu.c state->bos[i].size, &state->bos[i].encoded); bos 923 drivers/gpu/drm/msm/disp/dpu1/dpu_formats.c struct drm_gem_object **bos) bos 931 drivers/gpu/drm/msm/disp/dpu1/dpu_formats.c if (!msm_fmt || !cmd || !bos) { bos 947 drivers/gpu/drm/msm/disp/dpu1/dpu_formats.c if (!bos[i]) { bos 951 drivers/gpu/drm/msm/disp/dpu1/dpu_formats.c if ((i == 0) || (bos[i] != bos[0])) bos 952 drivers/gpu/drm/msm/disp/dpu1/dpu_formats.c bos_total_size += bos[i]->size; bos 49 drivers/gpu/drm/msm/disp/dpu1/dpu_formats.h struct drm_gem_object **bos); bos 25 drivers/gpu/drm/msm/msm_fb.c const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos); bos 104 drivers/gpu/drm/msm/msm_fb.c struct drm_gem_object *bos[4] = {0}; bos 109 drivers/gpu/drm/msm/msm_fb.c bos[i] = drm_gem_object_lookup(file, mode_cmd->handles[i]); bos 110 drivers/gpu/drm/msm/msm_fb.c if (!bos[i]) { bos 116 drivers/gpu/drm/msm/msm_fb.c fb = msm_framebuffer_init(dev, mode_cmd, bos); bos 126 drivers/gpu/drm/msm/msm_fb.c drm_gem_object_put_unlocked(bos[i]); bos 131 drivers/gpu/drm/msm/msm_fb.c const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos) bos 180 drivers/gpu/drm/msm/msm_fb.c if (bos[i]->size < min_size) { bos 185 drivers/gpu/drm/msm/msm_fb.c msm_fb->base.obj[i] = bos[i]; bos 160 drivers/gpu/drm/msm/msm_gem.h } bos[0]; bos 33 drivers/gpu/drm/msm/msm_gem_submit.c uint64_t sz = struct_size(submit, bos, nr_bos) + bos 47 drivers/gpu/drm/msm/msm_gem_submit.c submit->cmd = (void *)&submit->bos[nr_bos]; bos 81 drivers/gpu/drm/msm/msm_gem_submit.c u64_to_user_ptr(args->bos + (i * sizeof(submit_bo))); bos 86 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].flags = 0; bos 105 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].handle = submit_bo.handle; bos 106 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].flags = submit_bo.flags; bos 108 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].iova = submit_bo.presumed; bos 120 drivers/gpu/drm/msm/msm_gem_submit.c obj = idr_find(&file->object_idr, submit->bos[i].handle); bos 122 drivers/gpu/drm/msm/msm_gem_submit.c DRM_ERROR("invalid handle %u at index %u\n", submit->bos[i].handle, i); bos 131 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].handle, i); bos 138 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].obj = msm_obj; bos 155 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[i].obj; bos 157 drivers/gpu/drm/msm/msm_gem_submit.c if (submit->bos[i].flags & BO_PINNED) bos 160 drivers/gpu/drm/msm/msm_gem_submit.c if (submit->bos[i].flags & BO_LOCKED) bos 163 drivers/gpu/drm/msm/msm_gem_submit.c if (backoff && !(submit->bos[i].flags & BO_VALID)) bos 164 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].iova = 0; bos 166 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].flags &= ~(BO_LOCKED | BO_PINNED); bos 176 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[i].obj; bos 183 drivers/gpu/drm/msm/msm_gem_submit.c if (!(submit->bos[i].flags & BO_LOCKED)) { bos 188 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].flags |= BO_LOCKED; bos 204 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[contended].obj; bos 209 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[contended].flags |= BO_LOCKED; bos 223 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[i].obj; bos 224 drivers/gpu/drm/msm/msm_gem_submit.c bool write = submit->bos[i].flags & MSM_SUBMIT_BO_WRITE; bos 257 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[i].obj; bos 267 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].flags |= BO_PINNED; bos 269 drivers/gpu/drm/msm/msm_gem_submit.c if (iova == submit->bos[i].iova) { bos 270 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].flags |= BO_VALID; bos 272 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].iova = iova; bos 274 drivers/gpu/drm/msm/msm_gem_submit.c submit->bos[i].flags &= ~BO_VALID; bos 292 drivers/gpu/drm/msm/msm_gem_submit.c *obj = submit->bos[idx].obj; bos 294 drivers/gpu/drm/msm/msm_gem_submit.c *iova = submit->bos[idx].iova; bos 296 drivers/gpu/drm/msm/msm_gem_submit.c *valid = !!(submit->bos[idx].flags & BO_VALID); bos 388 drivers/gpu/drm/msm/msm_gem_submit.c struct msm_gem_object *msm_obj = submit->bos[i].obj; bos 308 drivers/gpu/drm/msm/msm_gpu.c struct msm_gpu_state_bo *state_bo = &state->bos[state->nr_bos]; bos 360 drivers/gpu/drm/msm/msm_gpu.c state->bos = kcalloc(submit->nr_cmds, bos 363 drivers/gpu/drm/msm/msm_gpu.c for (i = 0; state->bos && i < submit->nr_cmds; i++) { bos 366 drivers/gpu/drm/msm/msm_gpu.c msm_gpu_crashstate_get_bo(state, submit->bos[idx].obj, bos 367 drivers/gpu/drm/msm/msm_gpu.c submit->bos[idx].iova, submit->bos[idx].flags); bos 675 drivers/gpu/drm/msm/msm_gpu.c struct msm_gem_object *msm_obj = submit->bos[i].obj; bos 752 drivers/gpu/drm/msm/msm_gpu.c struct msm_gem_object *msm_obj = submit->bos[i].obj; bos 764 drivers/gpu/drm/msm/msm_gpu.c if (submit->bos[i].flags & MSM_SUBMIT_BO_WRITE) bos 766 drivers/gpu/drm/msm/msm_gpu.c else if (submit->bos[i].flags & MSM_SUBMIT_BO_READ) bos 211 drivers/gpu/drm/msm/msm_gpu.h struct msm_gpu_state_bo *bos; bos 111 drivers/gpu/drm/msm/msm_kms.h struct drm_gem_object **bos); bos 303 drivers/gpu/drm/msm/msm_rd.c struct msm_gem_object *obj = submit->bos[idx].obj; bos 308 drivers/gpu/drm/msm/msm_rd.c offset = iova - submit->bos[idx].iova; bos 310 drivers/gpu/drm/msm/msm_rd.c iova = submit->bos[idx].iova; bos 322 drivers/gpu/drm/msm/msm_rd.c if (!(submit->bos[idx].flags & MSM_SUBMIT_BO_READ)) bos 339 drivers/gpu/drm/msm/msm_rd.c return rd_full || (submit->bos[idx].flags & MSM_SUBMIT_BO_DUMP); bos 312 drivers/gpu/drm/omapdrm/omap_fb.c struct drm_gem_object *bos[4]; bos 317 drivers/gpu/drm/omapdrm/omap_fb.c bos[i] = drm_gem_object_lookup(file, mode_cmd->handles[i]); bos 318 drivers/gpu/drm/omapdrm/omap_fb.c if (!bos[i]) { bos 324 drivers/gpu/drm/omapdrm/omap_fb.c fb = omap_framebuffer_init(dev, mode_cmd, bos); bos 332 drivers/gpu/drm/omapdrm/omap_fb.c drm_gem_object_put_unlocked(bos[i]); bos 338 drivers/gpu/drm/omapdrm/omap_fb.c const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos) bos 400 drivers/gpu/drm/omapdrm/omap_fb.c if (size > omap_gem_mmap_size(bos[i]) - mode_cmd->offsets[i]) { bos 403 drivers/gpu/drm/omapdrm/omap_fb.c bos[i]->size - mode_cmd->offsets[i], size); bos 408 drivers/gpu/drm/omapdrm/omap_fb.c fb->obj[i] = bos[i]; bos 25 drivers/gpu/drm/omapdrm/omap_fb.h const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos); bos 149 drivers/gpu/drm/panfrost/panfrost_drv.c job->bo_count, &job->bos); bos 162 drivers/gpu/drm/panfrost/panfrost_drv.c bo = to_panfrost_bo(job->bos[i]); bos 190 drivers/gpu/drm/panfrost/panfrost_job.c static void panfrost_acquire_object_fences(struct drm_gem_object **bos, bos 197 drivers/gpu/drm/panfrost/panfrost_job.c implicit_fences[i] = dma_resv_get_excl_rcu(bos[i]->resv); bos 200 drivers/gpu/drm/panfrost/panfrost_job.c static void panfrost_attach_object_fences(struct drm_gem_object **bos, bos 207 drivers/gpu/drm/panfrost/panfrost_job.c dma_resv_add_excl_fence(bos[i]->resv, fence); bos 220 drivers/gpu/drm/panfrost/panfrost_job.c ret = drm_gem_lock_reservations(job->bos, job->bo_count, bos 237 drivers/gpu/drm/panfrost/panfrost_job.c panfrost_acquire_object_fences(job->bos, job->bo_count, bos 244 drivers/gpu/drm/panfrost/panfrost_job.c panfrost_attach_object_fences(job->bos, job->bo_count, bos 248 drivers/gpu/drm/panfrost/panfrost_job.c drm_gem_unlock_reservations(job->bos, job->bo_count, &acquire_ctx); bos 283 drivers/gpu/drm/panfrost/panfrost_job.c if (job->bos) { bos 287 drivers/gpu/drm/panfrost/panfrost_job.c bo = to_panfrost_bo(job->bos[i]); bos 288 drivers/gpu/drm/panfrost/panfrost_job.c drm_gem_object_put_unlocked(job->bos[i]); bos 291 drivers/gpu/drm/panfrost/panfrost_job.c kvfree(job->bos); bos 36 drivers/gpu/drm/panfrost/panfrost_job.h struct drm_gem_object **bos; bos 157 drivers/gpu/drm/qxl/qxl_drv.h struct list_head bos; bos 140 drivers/gpu/drm/qxl/qxl_release.c INIT_LIST_HEAD(&release->bos); bos 162 drivers/gpu/drm/qxl/qxl_release.c while (!list_empty(&release->bos)) { bos 166 drivers/gpu/drm/qxl/qxl_release.c entry = container_of(release->bos.next, bos 190 drivers/gpu/drm/qxl/qxl_release.c WARN_ON(list_empty(&release->bos)); bos 213 drivers/gpu/drm/qxl/qxl_release.c list_for_each_entry(entry, &release->bos, tv.head) { bos 225 drivers/gpu/drm/qxl/qxl_release.c list_add_tail(&entry->tv.head, &release->bos); bos 259 drivers/gpu/drm/qxl/qxl_release.c if (list_is_singular(&release->bos)) bos 262 drivers/gpu/drm/qxl/qxl_release.c ret = ttm_eu_reserve_buffers(&release->ticket, &release->bos, bos 267 drivers/gpu/drm/qxl/qxl_release.c list_for_each_entry(entry, &release->bos, tv.head) { bos 272 drivers/gpu/drm/qxl/qxl_release.c ttm_eu_backoff_reservation(&release->ticket, &release->bos); bos 283 drivers/gpu/drm/qxl/qxl_release.c if (list_is_singular(&release->bos)) bos 286 drivers/gpu/drm/qxl/qxl_release.c ttm_eu_backoff_reservation(&release->ticket, &release->bos); bos 439 drivers/gpu/drm/qxl/qxl_release.c if (list_is_singular(&release->bos) || list_empty(&release->bos)) bos 442 drivers/gpu/drm/qxl/qxl_release.c bo = list_first_entry(&release->bos, struct ttm_validate_buffer, head)->bo; bos 458 drivers/gpu/drm/qxl/qxl_release.c list_for_each_entry(entry, &release->bos, head) { bos 49 drivers/gpu/drm/radeon/radeon_mn.c struct list_head bos; bos 97 drivers/gpu/drm/radeon/radeon_mn.c list_for_each_entry(bo, &node->bos, mn_list) { bos 181 drivers/gpu/drm/radeon/radeon_mn.c struct list_head bos; bos 189 drivers/gpu/drm/radeon/radeon_mn.c INIT_LIST_HEAD(&bos); bos 199 drivers/gpu/drm/radeon/radeon_mn.c list_splice(&node->bos, &bos); bos 214 drivers/gpu/drm/radeon/radeon_mn.c INIT_LIST_HEAD(&node->bos); bos 215 drivers/gpu/drm/radeon/radeon_mn.c list_splice(&bos, &node->bos); bos 216 drivers/gpu/drm/radeon/radeon_mn.c list_add(&bo->mn_list, &node->bos); bos 248 drivers/gpu/drm/radeon/radeon_mn.c node = container_of(head, struct radeon_mn_node, bos); bos 42 drivers/net/ethernet/netronome/nfp/flower/action.c if (act->mpls_push.bos != ACT_MPLS_BOS_NOT_SET) { bos 43 drivers/net/ethernet/netronome/nfp/flower/action.c mpls_lse |= act->mpls_push.bos << MPLS_LS_S_SHIFT; bos 91 drivers/net/ethernet/netronome/nfp/flower/action.c if (act->mpls_mangle.bos != ACT_MPLS_BOS_NOT_SET) { bos 92 drivers/net/ethernet/netronome/nfp/flower/action.c mpls_lse |= act->mpls_mangle.bos << MPLS_LS_S_SHIFT; bos 690 drivers/staging/wusbcore/devconnect.c struct usb_bos_descriptor *bos, size_t desc_size) bos 697 drivers/staging/wusbcore/devconnect.c itr = (void *)bos + sizeof(*bos); bos 698 drivers/staging/wusbcore/devconnect.c top = itr + desc_size - sizeof(*bos); bos 706 drivers/staging/wusbcore/devconnect.c (int)(itr - (void *)bos), top - itr); bos 718 drivers/staging/wusbcore/devconnect.c (int)(itr - (void *)bos), bos 736 drivers/staging/wusbcore/devconnect.c cap_size, (int)(itr - (void *)bos)); bos 764 drivers/staging/wusbcore/devconnect.c struct usb_bos_descriptor *bos; bos 767 drivers/staging/wusbcore/devconnect.c bos = kmalloc(alloc_size, GFP_KERNEL); bos 768 drivers/staging/wusbcore/devconnect.c if (bos == NULL) bos 770 drivers/staging/wusbcore/devconnect.c result = usb_get_descriptor(usb_dev, USB_DT_BOS, 0, bos, desc_size); bos 776 drivers/staging/wusbcore/devconnect.c desc_size = le16_to_cpu(bos->wTotalLength); bos 778 drivers/staging/wusbcore/devconnect.c kfree(bos); bos 780 drivers/staging/wusbcore/devconnect.c bos = kmalloc(alloc_size, GFP_KERNEL); bos 781 drivers/staging/wusbcore/devconnect.c if (bos == NULL) bos 784 drivers/staging/wusbcore/devconnect.c result = usb_get_descriptor(usb_dev, USB_DT_BOS, 0, bos, desc_size); bos 790 drivers/staging/wusbcore/devconnect.c if (result < sizeof(*bos) bos 791 drivers/staging/wusbcore/devconnect.c || le16_to_cpu(bos->wTotalLength) != desc_size) { bos 797 drivers/staging/wusbcore/devconnect.c result = wusb_dev_bos_grok(usb_dev, wusb_dev, bos, result); bos 800 drivers/staging/wusbcore/devconnect.c wusb_dev->bos = bos; bos 805 drivers/staging/wusbcore/devconnect.c kfree(bos); bos 812 drivers/staging/wusbcore/devconnect.c kfree(wusb_dev->bos); bos 93 drivers/staging/wusbcore/wusbhc.h struct usb_bos_descriptor *bos; bos 967 drivers/usb/core/config.c if (dev->bos) { bos 968 drivers/usb/core/config.c kfree(dev->bos->desc); bos 969 drivers/usb/core/config.c kfree(dev->bos); bos 970 drivers/usb/core/config.c dev->bos = NULL; bos 987 drivers/usb/core/config.c struct usb_bos_descriptor *bos; bos 995 drivers/usb/core/config.c bos = kzalloc(sizeof(struct usb_bos_descriptor), GFP_KERNEL); bos 996 drivers/usb/core/config.c if (!bos) bos 1000 drivers/usb/core/config.c ret = usb_get_descriptor(dev, USB_DT_BOS, 0, bos, USB_DT_BOS_SIZE); bos 1001 drivers/usb/core/config.c if (ret < USB_DT_BOS_SIZE || bos->bLength < USB_DT_BOS_SIZE) { bos 1005 drivers/usb/core/config.c kfree(bos); bos 1009 drivers/usb/core/config.c length = bos->bLength; bos 1010 drivers/usb/core/config.c total_len = le16_to_cpu(bos->wTotalLength); bos 1011 drivers/usb/core/config.c num = bos->bNumDeviceCaps; bos 1012 drivers/usb/core/config.c kfree(bos); bos 1016 drivers/usb/core/config.c dev->bos = kzalloc(sizeof(struct usb_host_bos), GFP_KERNEL); bos 1017 drivers/usb/core/config.c if (!dev->bos) bos 1026 drivers/usb/core/config.c dev->bos->desc = (struct usb_bos_descriptor *)buffer; bos 1044 drivers/usb/core/config.c dev->bos->desc->bNumDeviceCaps = i; bos 1050 drivers/usb/core/config.c dev->bos->desc->bNumDeviceCaps = i; bos 1064 drivers/usb/core/config.c dev->bos->ext_cap = bos 1068 drivers/usb/core/config.c dev->bos->ss_cap = bos 1076 drivers/usb/core/config.c dev->bos->ssp_cap = ssp_cap; bos 1079 drivers/usb/core/config.c dev->bos->ss_id = bos 1083 drivers/usb/core/config.c dev->bos->ptm_cap = bos 1092 drivers/usb/core/config.c dev->bos->desc->wTotalLength = cpu_to_le16(buffer - buffer0); bos 149 drivers/usb/core/hub.c if (udev->bos->ext_cap && bos 151 drivers/usb/core/hub.c le32_to_cpu(udev->bos->ext_cap->bmAttributes))) bos 161 drivers/usb/core/hub.c if (!udev->bos->ss_cap) { bos 166 drivers/usb/core/hub.c if (udev->bos->ss_cap->bU1devExitLat == 0 && bos 167 drivers/usb/core/hub.c udev->bos->ss_cap->bU2DevExitLat == 0) { bos 325 drivers/usb/core/hub.c udev_u1_del = udev->bos->ss_cap->bU1devExitLat; bos 326 drivers/usb/core/hub.c udev_u2_del = le16_to_cpu(udev->bos->ss_cap->bU2DevExitLat); bos 327 drivers/usb/core/hub.c hub_u1_del = udev->parent->bos->ss_cap->bU1devExitLat; bos 328 drivers/usb/core/hub.c hub_u2_del = le16_to_cpu(udev->parent->bos->ss_cap->bU2DevExitLat); bos 2680 drivers/usb/core/hub.c struct usb_ssp_cap_descriptor *ssp_cap = hdev->bos->ssp_cap; bos 4042 drivers/usb/core/hub.c __u8 u1_mel = udev->bos->ss_cap->bU1devExitLat; bos 4043 drivers/usb/core/hub.c __le16 u2_mel = udev->bos->ss_cap->bU2DevExitLat; bos 4497 drivers/usb/core/hub.c if (!udev->usb2_hw_lpm_capable || !udev->bos) bos 4503 drivers/usb/core/hub.c if ((udev->bos->ext_cap->bmAttributes & cpu_to_le32(USB_BESL_SUPPORT)) || bos 5600 drivers/usb/core/hub.c if ((old_bos && !udev->bos) || (!old_bos && udev->bos)) bos 5602 drivers/usb/core/hub.c if (udev->bos) { bos 5603 drivers/usb/core/hub.c len = le16_to_cpu(udev->bos->desc->wTotalLength); bos 5606 drivers/usb/core/hub.c if (memcmp(udev->bos->desc, old_bos->desc, len)) bos 5708 drivers/usb/core/hub.c struct usb_host_bos *bos; bos 5739 drivers/usb/core/hub.c bos = udev->bos; bos 5740 drivers/usb/core/hub.c udev->bos = NULL; bos 5756 drivers/usb/core/hub.c if (descriptors_changed(udev, &descriptor, bos)) { bos 5834 drivers/usb/core/hub.c udev->bos = bos; bos 5839 drivers/usb/core/hub.c udev->bos = bos; bos 144 drivers/usb/core/hub.h hdev->bos->ssp_cap); bos 616 drivers/usb/gadget/composite.c struct usb_bos_descriptor *bos = cdev->req->buf; bos 619 drivers/usb/gadget/composite.c bos->bLength = USB_DT_BOS_SIZE; bos 620 drivers/usb/gadget/composite.c bos->bDescriptorType = USB_DT_BOS; bos 622 drivers/usb/gadget/composite.c bos->wTotalLength = cpu_to_le16(USB_DT_BOS_SIZE); bos 623 drivers/usb/gadget/composite.c bos->bNumDeviceCaps = 0; bos 652 drivers/usb/gadget/composite.c usb_ext = cdev->req->buf + le16_to_cpu(bos->wTotalLength); bos 653 drivers/usb/gadget/composite.c bos->bNumDeviceCaps++; bos 654 drivers/usb/gadget/composite.c le16_add_cpu(&bos->wTotalLength, USB_DT_USB_EXT_CAP_SIZE); bos 668 drivers/usb/gadget/composite.c ss_cap = cdev->req->buf + le16_to_cpu(bos->wTotalLength); bos 669 drivers/usb/gadget/composite.c bos->bNumDeviceCaps++; bos 670 drivers/usb/gadget/composite.c le16_add_cpu(&bos->wTotalLength, USB_DT_USB_SS_CAP_SIZE); bos 688 drivers/usb/gadget/composite.c ssp_cap = cdev->req->buf + le16_to_cpu(bos->wTotalLength); bos 689 drivers/usb/gadget/composite.c bos->bNumDeviceCaps++; bos 695 drivers/usb/gadget/composite.c le16_add_cpu(&bos->wTotalLength, USB_DT_USB_SSP_CAP_SIZE(1)); bos 730 drivers/usb/gadget/composite.c return le16_to_cpu(bos->wTotalLength); bos 2025 drivers/usb/gadget/udc/dummy_hcd.c struct usb_bos_descriptor bos; bos 2029 drivers/usb/gadget/udc/dummy_hcd.c .bos = { bos 4333 drivers/usb/host/xhci.c field = le32_to_cpu(udev->bos->ext_cap->bmAttributes); bos 4367 drivers/usb/host/xhci.c field = le32_to_cpu(udev->bos->ext_cap->bmAttributes); bos 4422 drivers/usb/host/xhci.c field = le32_to_cpu(udev->bos->ext_cap->bmAttributes); bos 4685 drivers/usb/host/xhci.c u2_del_ns = le16_to_cpu(udev->bos->ss_cap->bU2DevExitLat) * 1000ULL; bos 899 drivers/usb/misc/usbtest.c struct usb_bos_descriptor *bos = NULL; bos 905 drivers/usb/misc/usbtest.c sizeof(*udev->bos->desc)); bos 906 drivers/usb/misc/usbtest.c if (retval != sizeof(*udev->bos->desc)) { bos 911 drivers/usb/misc/usbtest.c bos = (struct usb_bos_descriptor *)dev->buf; bos 912 drivers/usb/misc/usbtest.c total = le16_to_cpu(bos->wTotalLength); bos 913 drivers/usb/misc/usbtest.c num = bos->bNumDeviceCaps; bos 930 drivers/usb/misc/usbtest.c length = sizeof(*udev->bos->desc); bos 1356 drivers/usb/misc/usbtest.c if (udev->bos) bos 1357 drivers/usb/misc/usbtest.c len = le16_to_cpu(udev->bos->desc->wTotalLength); bos 266 drivers/usb/usbip/vhci_hcd.c struct usb_bos_descriptor bos; bos 270 drivers/usb/usbip/vhci_hcd.c .bos = { bos 652 include/linux/usb.h struct usb_host_bos *bos; bos 813 include/linux/usb.h if (udev->speed < USB_SPEED_SUPER || !udev->bos || !udev->bos->ss_cap) bos 815 include/linux/usb.h return udev->bos->ss_cap->bmAttributes & USB_LTM_SUPPORT; bos 204 include/net/flow_offload.h u8 bos; bos 213 include/net/flow_offload.h u8 bos; bos 201 include/uapi/drm/etnaviv_drm.h __u64 bos; /* in, ptr to array of submit_bo's */ bos 114 include/uapi/drm/lima_drm.h __u64 bos; /* in, array of drm_lima_gem_submit_bo */ bos 236 include/uapi/drm/msm_drm.h __u64 bos; /* in, ptr to array of submit_bo's */ bos 193 net/mpls/af_mpls.c if (!dec.bos) bos 432 net/mpls/af_mpls.c if (unlikely(!new_header_size && dec.bos)) { bos 437 net/mpls/af_mpls.c bool bos; bos 443 net/mpls/af_mpls.c bos = dec.bos; bos 446 net/mpls/af_mpls.c dec.ttl, 0, bos); bos 447 net/mpls/af_mpls.c bos = false; bos 1683 net/mpls/af_mpls.c bool bos; bos 1690 net/mpls/af_mpls.c bos = true; bos 1692 net/mpls/af_mpls.c nla_label[i] = mpls_entry_encode(label[i], 0, 0, bos); bos 1693 net/mpls/af_mpls.c bos = false; bos 1706 net/mpls/af_mpls.c bool bos; bos 1730 net/mpls/af_mpls.c bos = true; bos 1731 net/mpls/af_mpls.c for (i = nla_labels - 1; i >= 0; i--, bos = false) { bos 1750 net/mpls/af_mpls.c if (dec.bos != bos) { bos 1752 net/mpls/af_mpls.c if (bos) { bos 2419 net/mpls/af_mpls.c bool bos; bos 2433 net/mpls/af_mpls.c bos = true; bos 2436 net/mpls/af_mpls.c 1, 0, bos); bos 2437 net/mpls/af_mpls.c bos = false; bos 15 net/mpls/internal.h u8 bos; bos 175 net/mpls/internal.h static inline struct mpls_shim_hdr mpls_entry_encode(u32 label, unsigned ttl, unsigned tc, bool bos) bos 181 net/mpls/internal.h (bos ? (1 << MPLS_LS_S_SHIFT) : 0) | bos 194 net/mpls/internal.h result.bos = (entry & MPLS_LS_S_MASK) >> MPLS_LS_S_SHIFT; bos 50 net/mpls/mpls_iptunnel.c bool bos; bos 126 net/mpls/mpls_iptunnel.c bos = true; bos 129 net/mpls/mpls_iptunnel.c ttl, 0, bos); bos 130 net/mpls/mpls_iptunnel.c bos = false; bos 3548 net/sched/cls_api.c entry->mpls_push.bos = tcf_mpls_bos(act); bos 3559 net/sched/cls_api.c entry->mpls_mangle.bos = tcf_mpls_bos(act); bos 758 net/sched/cls_flower.c u8 bos = nla_get_u8(tb[TCA_FLOWER_KEY_MPLS_BOS]); bos 760 net/sched/cls_flower.c if (bos & ~MPLS_BOS_MASK) bos 762 net/sched/cls_flower.c key_val->mpls_bos = bos;