fctx 2637 drivers/block/rbd.c struct rbd_img_fill_ctx *fctx) fctx 2642 drivers/block/rbd.c img_req->data_type = fctx->pos_type; fctx 2648 drivers/block/rbd.c fctx->iter = *fctx->pos; fctx 2655 drivers/block/rbd.c fctx->set_pos_fn, &fctx->iter); fctx 2679 drivers/block/rbd.c struct rbd_img_fill_ctx *fctx) fctx 2686 drivers/block/rbd.c if (fctx->pos_type == OBJ_REQUEST_NODATA || fctx 2689 drivers/block/rbd.c num_img_extents, fctx); fctx 2700 drivers/block/rbd.c fctx->iter = *fctx->pos; fctx 2707 drivers/block/rbd.c fctx->count_fn, &fctx->iter); fctx 2724 drivers/block/rbd.c fctx->iter = *fctx->pos; fctx 2730 drivers/block/rbd.c fctx->copy_fn, &fctx->iter); fctx 2743 drivers/block/rbd.c struct rbd_img_fill_ctx fctx = { fctx 2748 drivers/block/rbd.c return rbd_img_fill_request(img_req, &ex, 1, &fctx); fctx 2793 drivers/block/rbd.c struct rbd_img_fill_ctx fctx = { fctx 2802 drivers/block/rbd.c &fctx); fctx 2853 drivers/block/rbd.c struct rbd_img_fill_ctx fctx = { fctx 2862 drivers/block/rbd.c &fctx); fctx 101 drivers/crypto/cavium/cpt/cptvf_algs.c struct fc_context *fctx = &rctx->fctx; fctx 121 drivers/crypto/cavium/cpt/cptvf_algs.c fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type; fctx 122 drivers/crypto/cavium/cpt/cptvf_algs.c fctx->enc.enc_ctrl.e.aes_key = ctx->key_type; fctx 123 drivers/crypto/cavium/cpt/cptvf_algs.c fctx->enc.enc_ctrl.e.iv_source = FROM_DPTR; fctx 126 drivers/crypto/cavium/cpt/cptvf_algs.c memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2); fctx 128 drivers/crypto/cavium/cpt/cptvf_algs.c memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len); fctx 129 drivers/crypto/cavium/cpt/cptvf_algs.c ctrl_flags = (u64 *)&fctx->enc.enc_ctrl.flags; fctx 141 drivers/crypto/cavium/cpt/cptvf_algs.c req_info->in[*argcnt].vptr = (u8 *)fctx; fctx 197 drivers/crypto/cavium/cpt/cptvf_algs.c struct fc_context *fctx = &rctx->fctx; fctx 203 drivers/crypto/cavium/cpt/cptvf_algs.c memset(fctx, 0, sizeof(struct fc_context)); fctx 113 drivers/crypto/cavium/cpt/cptvf_algs.h struct fc_context fctx; fctx 39 drivers/crypto/cavium/nitrox/nitrox_aead.c struct flexi_crypto_context *fctx; fctx 49 drivers/crypto/cavium/nitrox/nitrox_aead.c fctx = nctx->u.fctx; fctx 50 drivers/crypto/cavium/nitrox/nitrox_aead.c flags.f = be64_to_cpu(fctx->flags.f); fctx 52 drivers/crypto/cavium/nitrox/nitrox_aead.c fctx->flags.f = cpu_to_be64(flags.f); fctx 55 drivers/crypto/cavium/nitrox/nitrox_aead.c memset(&fctx->crypto, 0, sizeof(fctx->crypto)); fctx 56 drivers/crypto/cavium/nitrox/nitrox_aead.c memcpy(fctx->crypto.u.key, key, keylen); fctx 65 drivers/crypto/cavium/nitrox/nitrox_aead.c struct flexi_crypto_context *fctx = nctx->u.fctx; fctx 68 drivers/crypto/cavium/nitrox/nitrox_aead.c flags.f = be64_to_cpu(fctx->flags.f); fctx 70 drivers/crypto/cavium/nitrox/nitrox_aead.c fctx->flags.f = cpu_to_be64(flags.f); fctx 195 drivers/crypto/cavium/nitrox/nitrox_aead.c struct flexi_crypto_context *fctx = nctx->u.fctx; fctx 198 drivers/crypto/cavium/nitrox/nitrox_aead.c memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE); fctx 226 drivers/crypto/cavium/nitrox/nitrox_aead.c struct flexi_crypto_context *fctx = nctx->u.fctx; fctx 229 drivers/crypto/cavium/nitrox/nitrox_aead.c memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE); fctx 270 drivers/crypto/cavium/nitrox/nitrox_aead.c nctx->u.fctx->flags.f = 0; fctx 285 drivers/crypto/cavium/nitrox/nitrox_aead.c flags = &nctx->u.fctx->flags; fctx 317 drivers/crypto/cavium/nitrox/nitrox_aead.c struct flexi_crypto_context *fctx = nctx->u.fctx; fctx 319 drivers/crypto/cavium/nitrox/nitrox_aead.c memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys)); fctx 320 drivers/crypto/cavium/nitrox/nitrox_aead.c memzero_explicit(&fctx->auth, sizeof(struct auth_keys)); fctx 333 drivers/crypto/cavium/nitrox/nitrox_aead.c struct flexi_crypto_context *fctx = nctx->u.fctx; fctx 344 drivers/crypto/cavium/nitrox/nitrox_aead.c memcpy(fctx->crypto.iv, key + keylen, GCM_AES_SALT_SIZE); fctx 203 drivers/crypto/cavium/nitrox/nitrox_req.h struct flexi_crypto_context *fctx; fctx 80 drivers/crypto/cavium/nitrox/nitrox_skcipher.c struct flexi_crypto_context *fctx = nctx->u.fctx; fctx 82 drivers/crypto/cavium/nitrox/nitrox_skcipher.c memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys)); fctx 83 drivers/crypto/cavium/nitrox/nitrox_skcipher.c memzero_explicit(&fctx->auth, sizeof(struct auth_keys)); fctx 98 drivers/crypto/cavium/nitrox/nitrox_skcipher.c struct flexi_crypto_context *fctx; fctx 111 drivers/crypto/cavium/nitrox/nitrox_skcipher.c fctx = nctx->u.fctx; fctx 112 drivers/crypto/cavium/nitrox/nitrox_skcipher.c flags = &fctx->flags; fctx 119 drivers/crypto/cavium/nitrox/nitrox_skcipher.c memcpy(fctx->crypto.u.key, key, keylen); fctx 279 drivers/crypto/cavium/nitrox/nitrox_skcipher.c struct flexi_crypto_context *fctx; fctx 294 drivers/crypto/cavium/nitrox/nitrox_skcipher.c fctx = nctx->u.fctx; fctx 296 drivers/crypto/cavium/nitrox/nitrox_skcipher.c memcpy(fctx->auth.u.key2, (key + keylen), keylen); fctx 306 drivers/crypto/cavium/nitrox/nitrox_skcipher.c struct flexi_crypto_context *fctx; fctx 312 drivers/crypto/cavium/nitrox/nitrox_skcipher.c fctx = nctx->u.fctx; fctx 314 drivers/crypto/cavium/nitrox/nitrox_skcipher.c memcpy(fctx->crypto.iv, key + (keylen - CTR_RFC3686_NONCE_SIZE), fctx 891 drivers/gpu/drm/msm/msm_drv.c ret = msm_wait_fence(gpu->rb[queue->prio]->fctx, args->fence, &timeout, fctx 301 drivers/gpu/drm/msm/msm_drv.h struct msm_fence_context *fctx, bool exclusive); fctx 16 drivers/gpu/drm/msm/msm_fence.c struct msm_fence_context *fctx; fctx 18 drivers/gpu/drm/msm/msm_fence.c fctx = kzalloc(sizeof(*fctx), GFP_KERNEL); fctx 19 drivers/gpu/drm/msm/msm_fence.c if (!fctx) fctx 22 drivers/gpu/drm/msm/msm_fence.c fctx->dev = dev; fctx 23 drivers/gpu/drm/msm/msm_fence.c strncpy(fctx->name, name, sizeof(fctx->name)); fctx 24 drivers/gpu/drm/msm/msm_fence.c fctx->context = dma_fence_context_alloc(1); fctx 25 drivers/gpu/drm/msm/msm_fence.c init_waitqueue_head(&fctx->event); fctx 26 drivers/gpu/drm/msm/msm_fence.c spin_lock_init(&fctx->spinlock); fctx 28 drivers/gpu/drm/msm/msm_fence.c return fctx; fctx 31 drivers/gpu/drm/msm/msm_fence.c void msm_fence_context_free(struct msm_fence_context *fctx) fctx 33 drivers/gpu/drm/msm/msm_fence.c kfree(fctx); fctx 36 drivers/gpu/drm/msm/msm_fence.c static inline bool fence_completed(struct msm_fence_context *fctx, uint32_t fence) fctx 38 drivers/gpu/drm/msm/msm_fence.c return (int32_t)(fctx->completed_fence - fence) >= 0; fctx 42 drivers/gpu/drm/msm/msm_fence.c int msm_wait_fence(struct msm_fence_context *fctx, uint32_t fence, fctx 47 drivers/gpu/drm/msm/msm_fence.c if (fence > fctx->last_fence) { fctx 49 drivers/gpu/drm/msm/msm_fence.c fctx->name, fence, fctx->last_fence); fctx 55 drivers/gpu/drm/msm/msm_fence.c ret = fence_completed(fctx, fence) ? 0 : -EBUSY; fctx 60 drivers/gpu/drm/msm/msm_fence.c ret = wait_event_interruptible_timeout(fctx->event, fctx 61 drivers/gpu/drm/msm/msm_fence.c fence_completed(fctx, fence), fctx 64 drivers/gpu/drm/msm/msm_fence.c ret = wait_event_timeout(fctx->event, fctx 65 drivers/gpu/drm/msm/msm_fence.c fence_completed(fctx, fence), fctx 70 drivers/gpu/drm/msm/msm_fence.c fence, fctx->completed_fence); fctx 81 drivers/gpu/drm/msm/msm_fence.c void msm_update_fence(struct msm_fence_context *fctx, uint32_t fence) fctx 83 drivers/gpu/drm/msm/msm_fence.c spin_lock(&fctx->spinlock); fctx 84 drivers/gpu/drm/msm/msm_fence.c fctx->completed_fence = max(fence, fctx->completed_fence); fctx 85 drivers/gpu/drm/msm/msm_fence.c spin_unlock(&fctx->spinlock); fctx 87 drivers/gpu/drm/msm/msm_fence.c wake_up_all(&fctx->event); fctx 92 drivers/gpu/drm/msm/msm_fence.c struct msm_fence_context *fctx; fctx 108 drivers/gpu/drm/msm/msm_fence.c return f->fctx->name; fctx 114 drivers/gpu/drm/msm/msm_fence.c return fence_completed(f->fctx, f->base.seqno); fctx 124 drivers/gpu/drm/msm/msm_fence.c msm_fence_alloc(struct msm_fence_context *fctx) fctx 132 drivers/gpu/drm/msm/msm_fence.c f->fctx = fctx; fctx 134 drivers/gpu/drm/msm/msm_fence.c dma_fence_init(&f->base, &msm_fence_ops, &fctx->spinlock, fctx 135 drivers/gpu/drm/msm/msm_fence.c fctx->context, ++fctx->last_fence); fctx 25 drivers/gpu/drm/msm/msm_fence.h void msm_fence_context_free(struct msm_fence_context *fctx); fctx 27 drivers/gpu/drm/msm/msm_fence.h int msm_wait_fence(struct msm_fence_context *fctx, uint32_t fence, fctx 29 drivers/gpu/drm/msm/msm_fence.h void msm_update_fence(struct msm_fence_context *fctx, uint32_t fence); fctx 31 drivers/gpu/drm/msm/msm_fence.h struct dma_fence * msm_fence_alloc(struct msm_fence_context *fctx); fctx 703 drivers/gpu/drm/msm/msm_gem.c struct msm_fence_context *fctx, bool exclusive) fctx 713 drivers/gpu/drm/msm/msm_gem.c if (fence && (fence->context != fctx->context)) { fctx 726 drivers/gpu/drm/msm/msm_gem.c if (fence->context != fctx->context) { fctx 241 drivers/gpu/drm/msm/msm_gem_submit.c ret = msm_gem_sync_object(&msm_obj->base, submit->ring->fctx, fctx 455 drivers/gpu/drm/msm/msm_gem_submit.c if (!dma_fence_match_context(in_fence, ring->fctx->context)) fctx 565 drivers/gpu/drm/msm/msm_gem_submit.c submit->fence = msm_fence_alloc(ring->fctx); fctx 398 drivers/gpu/drm/msm/msm_gpu.c msm_update_fence(submit->ring->fctx, fctx 52 drivers/gpu/drm/msm/msm_ringbuffer.c ring->fctx = msm_fence_context_alloc(gpu->dev, name); fctx 66 drivers/gpu/drm/msm/msm_ringbuffer.c msm_fence_context_free(ring->fctx); fctx 47 drivers/gpu/drm/msm/msm_ringbuffer.h struct msm_fence_context *fctx; fctx 1045 drivers/gpu/drm/nouveau/dispnv04/crtc.c struct nouveau_fence_chan *fctx = chan->fence; fctx 1053 drivers/gpu/drm/nouveau/dispnv04/crtc.c if (list_empty(&fctx->flip)) { fctx 1059 drivers/gpu/drm/nouveau/dispnv04/crtc.c s = list_first_entry(&fctx->flip, struct nv04_page_flip_state, head); fctx 1101 drivers/gpu/drm/nouveau/dispnv04/crtc.c struct nouveau_fence_chan *fctx = chan->fence; fctx 1109 drivers/gpu/drm/nouveau/dispnv04/crtc.c list_add_tail(&s->head, &fctx->flip); fctx 65 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fctx 67 drivers/gpu/drm/nouveau/nouveau_fence.c if (!--fctx->notify_ref) fctx 90 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fence_context_del(struct nouveau_fence_chan *fctx) fctx 94 drivers/gpu/drm/nouveau/nouveau_fence.c spin_lock_irq(&fctx->lock); fctx 95 drivers/gpu/drm/nouveau/nouveau_fence.c while (!list_empty(&fctx->pending)) { fctx 96 drivers/gpu/drm/nouveau/nouveau_fence.c fence = list_entry(fctx->pending.next, typeof(*fence), head); fctx 99 drivers/gpu/drm/nouveau/nouveau_fence.c nvif_notify_put(&fctx->notify); fctx 101 drivers/gpu/drm/nouveau/nouveau_fence.c spin_unlock_irq(&fctx->lock); fctx 103 drivers/gpu/drm/nouveau/nouveau_fence.c nvif_notify_fini(&fctx->notify); fctx 104 drivers/gpu/drm/nouveau/nouveau_fence.c fctx->dead = 1; fctx 120 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fence_context_free(struct nouveau_fence_chan *fctx) fctx 122 drivers/gpu/drm/nouveau/nouveau_fence.c kref_put(&fctx->fence_ref, nouveau_fence_context_put); fctx 126 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fence_update(struct nouveau_channel *chan, struct nouveau_fence_chan *fctx) fctx 130 drivers/gpu/drm/nouveau/nouveau_fence.c u32 seq = fctx->read(chan); fctx 132 drivers/gpu/drm/nouveau/nouveau_fence.c while (!list_empty(&fctx->pending)) { fctx 133 drivers/gpu/drm/nouveau/nouveau_fence.c fence = list_entry(fctx->pending.next, typeof(*fence), head); fctx 147 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = fctx 148 drivers/gpu/drm/nouveau/nouveau_fence.c container_of(notify, typeof(*fctx), notify); fctx 152 drivers/gpu/drm/nouveau/nouveau_fence.c spin_lock_irqsave(&fctx->lock, flags); fctx 153 drivers/gpu/drm/nouveau/nouveau_fence.c if (!list_empty(&fctx->pending)) { fctx 157 drivers/gpu/drm/nouveau/nouveau_fence.c fence = list_entry(fctx->pending.next, typeof(*fence), head); fctx 158 drivers/gpu/drm/nouveau/nouveau_fence.c chan = rcu_dereference_protected(fence->channel, lockdep_is_held(&fctx->lock)); fctx 159 drivers/gpu/drm/nouveau/nouveau_fence.c if (nouveau_fence_update(chan, fctx)) fctx 162 drivers/gpu/drm/nouveau/nouveau_fence.c spin_unlock_irqrestore(&fctx->lock, flags); fctx 168 drivers/gpu/drm/nouveau/nouveau_fence.c nouveau_fence_context_new(struct nouveau_channel *chan, struct nouveau_fence_chan *fctx) fctx 174 drivers/gpu/drm/nouveau/nouveau_fence.c INIT_LIST_HEAD(&fctx->flip); fctx 175 drivers/gpu/drm/nouveau/nouveau_fence.c INIT_LIST_HEAD(&fctx->pending); fctx 176 drivers/gpu/drm/nouveau/nouveau_fence.c spin_lock_init(&fctx->lock); fctx 177 drivers/gpu/drm/nouveau/nouveau_fence.c fctx->context = chan->drm->chan.context_base + chan->chid; fctx 180 drivers/gpu/drm/nouveau/nouveau_fence.c strcpy(fctx->name, "copy engine channel"); fctx 182 drivers/gpu/drm/nouveau/nouveau_fence.c strcpy(fctx->name, "generic kernel channel"); fctx 184 drivers/gpu/drm/nouveau/nouveau_fence.c strcpy(fctx->name, nvxx_client(&cli->base)->name); fctx 186 drivers/gpu/drm/nouveau/nouveau_fence.c kref_init(&fctx->fence_ref); fctx 195 drivers/gpu/drm/nouveau/nouveau_fence.c &fctx->notify); fctx 203 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = chan->fence; fctx 212 drivers/gpu/drm/nouveau/nouveau_fence.c &fctx->lock, fctx->context, ++fctx->sequence); fctx 215 drivers/gpu/drm/nouveau/nouveau_fence.c &fctx->lock, fctx->context, ++fctx->sequence); fctx 216 drivers/gpu/drm/nouveau/nouveau_fence.c kref_get(&fctx->fence_ref); fctx 219 drivers/gpu/drm/nouveau/nouveau_fence.c ret = fctx->emit(fence); fctx 222 drivers/gpu/drm/nouveau/nouveau_fence.c spin_lock_irq(&fctx->lock); fctx 224 drivers/gpu/drm/nouveau/nouveau_fence.c if (nouveau_fence_update(chan, fctx)) fctx 225 drivers/gpu/drm/nouveau/nouveau_fence.c nvif_notify_put(&fctx->notify); fctx 227 drivers/gpu/drm/nouveau/nouveau_fence.c list_add_tail(&fence->head, &fctx->pending); fctx 228 drivers/gpu/drm/nouveau/nouveau_fence.c spin_unlock_irq(&fctx->lock); fctx 239 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fctx 246 drivers/gpu/drm/nouveau/nouveau_fence.c spin_lock_irqsave(&fctx->lock, flags); fctx 247 drivers/gpu/drm/nouveau/nouveau_fence.c chan = rcu_dereference_protected(fence->channel, lockdep_is_held(&fctx->lock)); fctx 248 drivers/gpu/drm/nouveau/nouveau_fence.c if (chan && nouveau_fence_update(chan, fctx)) fctx 249 drivers/gpu/drm/nouveau/nouveau_fence.c nvif_notify_put(&fctx->notify); fctx 250 drivers/gpu/drm/nouveau/nouveau_fence.c spin_unlock_irqrestore(&fctx->lock, flags); fctx 335 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = chan->fence; fctx 360 drivers/gpu/drm/nouveau/nouveau_fence.c if (prev && (prev == chan || fctx->sync(f, prev, chan) == 0)) fctx 385 drivers/gpu/drm/nouveau/nouveau_fence.c if (prev && (prev == chan || fctx->sync(f, prev, chan) == 0)) fctx 435 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fctx 437 drivers/gpu/drm/nouveau/nouveau_fence.c return !fctx->dead ? fctx->name : "dead channel"; fctx 449 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fctx 456 drivers/gpu/drm/nouveau/nouveau_fence.c ret = (int)(fctx->read(chan) - fence->base.seqno) >= 0; fctx 490 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fctx 492 drivers/gpu/drm/nouveau/nouveau_fence.c kref_put(&fctx->fence_ref, nouveau_fence_context_put); fctx 508 drivers/gpu/drm/nouveau/nouveau_fence.c struct nouveau_fence_chan *fctx = nouveau_fctx(fence); fctx 511 drivers/gpu/drm/nouveau/nouveau_fence.c if (!fctx->notify_ref++) fctx 512 drivers/gpu/drm/nouveau/nouveau_fence.c nvif_notify_get(&fctx->notify); fctx 517 drivers/gpu/drm/nouveau/nouveau_fence.c else if (!--fctx->notify_ref) fctx 518 drivers/gpu/drm/nouveau/nouveau_fence.c nvif_notify_put(&fctx->notify); fctx 71 drivers/gpu/drm/nouveau/nv04_fence.c struct nv04_fence_chan *fctx = chan->fence; fctx 72 drivers/gpu/drm/nouveau/nv04_fence.c nouveau_fence_context_del(&fctx->base); fctx 74 drivers/gpu/drm/nouveau/nv04_fence.c nouveau_fence_context_free(&fctx->base); fctx 80 drivers/gpu/drm/nouveau/nv04_fence.c struct nv04_fence_chan *fctx = kzalloc(sizeof(*fctx), GFP_KERNEL); fctx 81 drivers/gpu/drm/nouveau/nv04_fence.c if (fctx) { fctx 82 drivers/gpu/drm/nouveau/nv04_fence.c nouveau_fence_context_new(chan, &fctx->base); fctx 83 drivers/gpu/drm/nouveau/nv04_fence.c fctx->base.emit = nv04_fence_emit; fctx 84 drivers/gpu/drm/nouveau/nv04_fence.c fctx->base.sync = nv04_fence_sync; fctx 85 drivers/gpu/drm/nouveau/nv04_fence.c fctx->base.read = nv04_fence_read; fctx 86 drivers/gpu/drm/nouveau/nv04_fence.c chan->fence = fctx; fctx 59 drivers/gpu/drm/nouveau/nv10_fence.c struct nv10_fence_chan *fctx = chan->fence; fctx 60 drivers/gpu/drm/nouveau/nv10_fence.c nouveau_fence_context_del(&fctx->base); fctx 61 drivers/gpu/drm/nouveau/nv10_fence.c nvif_object_fini(&fctx->sema); fctx 63 drivers/gpu/drm/nouveau/nv10_fence.c nouveau_fence_context_free(&fctx->base); fctx 69 drivers/gpu/drm/nouveau/nv10_fence.c struct nv10_fence_chan *fctx; fctx 71 drivers/gpu/drm/nouveau/nv10_fence.c fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); fctx 72 drivers/gpu/drm/nouveau/nv10_fence.c if (!fctx) fctx 75 drivers/gpu/drm/nouveau/nv10_fence.c nouveau_fence_context_new(chan, &fctx->base); fctx 76 drivers/gpu/drm/nouveau/nv10_fence.c fctx->base.emit = nv10_fence_emit; fctx 77 drivers/gpu/drm/nouveau/nv10_fence.c fctx->base.read = nv10_fence_read; fctx 78 drivers/gpu/drm/nouveau/nv10_fence.c fctx->base.sync = nv10_fence_sync; fctx 39 drivers/gpu/drm/nouveau/nv17_fence.c struct nv10_fence_chan *fctx = chan->fence; fctx 54 drivers/gpu/drm/nouveau/nv17_fence.c OUT_RING (prev, fctx->sema.handle); fctx 63 drivers/gpu/drm/nouveau/nv17_fence.c OUT_RING (chan, fctx->sema.handle); fctx 78 drivers/gpu/drm/nouveau/nv17_fence.c struct nv10_fence_chan *fctx; fctx 84 drivers/gpu/drm/nouveau/nv17_fence.c fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); fctx 85 drivers/gpu/drm/nouveau/nv17_fence.c if (!fctx) fctx 88 drivers/gpu/drm/nouveau/nv17_fence.c nouveau_fence_context_new(chan, &fctx->base); fctx 89 drivers/gpu/drm/nouveau/nv17_fence.c fctx->base.emit = nv10_fence_emit; fctx 90 drivers/gpu/drm/nouveau/nv17_fence.c fctx->base.read = nv10_fence_read; fctx 91 drivers/gpu/drm/nouveau/nv17_fence.c fctx->base.sync = nv17_fence_sync; fctx 100 drivers/gpu/drm/nouveau/nv17_fence.c &fctx->sema); fctx 39 drivers/gpu/drm/nouveau/nv50_fence.c struct nv10_fence_chan *fctx; fctx 45 drivers/gpu/drm/nouveau/nv50_fence.c fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); fctx 46 drivers/gpu/drm/nouveau/nv50_fence.c if (!fctx) fctx 49 drivers/gpu/drm/nouveau/nv50_fence.c nouveau_fence_context_new(chan, &fctx->base); fctx 50 drivers/gpu/drm/nouveau/nv50_fence.c fctx->base.emit = nv10_fence_emit; fctx 51 drivers/gpu/drm/nouveau/nv50_fence.c fctx->base.read = nv10_fence_read; fctx 52 drivers/gpu/drm/nouveau/nv50_fence.c fctx->base.sync = nv17_fence_sync; fctx 61 drivers/gpu/drm/nouveau/nv50_fence.c &fctx->sema); fctx 71 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_chan *fctx = chan->fence; fctx 72 drivers/gpu/drm/nouveau/nv84_fence.c u64 addr = fctx->vma->addr + chan->chid * 16; fctx 74 drivers/gpu/drm/nouveau/nv84_fence.c return fctx->base.emit32(chan, addr, fence->base.seqno); fctx 81 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_chan *fctx = chan->fence; fctx 82 drivers/gpu/drm/nouveau/nv84_fence.c u64 addr = fctx->vma->addr + prev->chid * 16; fctx 84 drivers/gpu/drm/nouveau/nv84_fence.c return fctx->base.sync32(chan, addr, fence->base.seqno); fctx 98 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_chan *fctx = chan->fence; fctx 100 drivers/gpu/drm/nouveau/nv84_fence.c nouveau_bo_wr32(priv->bo, chan->chid * 16 / 4, fctx->base.sequence); fctx 102 drivers/gpu/drm/nouveau/nv84_fence.c nouveau_vma_del(&fctx->vma); fctx 104 drivers/gpu/drm/nouveau/nv84_fence.c nouveau_fence_context_del(&fctx->base); fctx 106 drivers/gpu/drm/nouveau/nv84_fence.c nouveau_fence_context_free(&fctx->base); fctx 113 drivers/gpu/drm/nouveau/nv84_fence.c struct nv84_fence_chan *fctx; fctx 116 drivers/gpu/drm/nouveau/nv84_fence.c fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); fctx 117 drivers/gpu/drm/nouveau/nv84_fence.c if (!fctx) fctx 120 drivers/gpu/drm/nouveau/nv84_fence.c nouveau_fence_context_new(chan, &fctx->base); fctx 121 drivers/gpu/drm/nouveau/nv84_fence.c fctx->base.emit = nv84_fence_emit; fctx 122 drivers/gpu/drm/nouveau/nv84_fence.c fctx->base.sync = nv84_fence_sync; fctx 123 drivers/gpu/drm/nouveau/nv84_fence.c fctx->base.read = nv84_fence_read; fctx 124 drivers/gpu/drm/nouveau/nv84_fence.c fctx->base.emit32 = nv84_fence_emit32; fctx 125 drivers/gpu/drm/nouveau/nv84_fence.c fctx->base.sync32 = nv84_fence_sync32; fctx 126 drivers/gpu/drm/nouveau/nv84_fence.c fctx->base.sequence = nv84_fence_read(chan); fctx 129 drivers/gpu/drm/nouveau/nv84_fence.c ret = nouveau_vma_new(priv->bo, chan->vmm, &fctx->vma); fctx 68 drivers/gpu/drm/nouveau/nvc0_fence.c struct nv84_fence_chan *fctx = chan->fence; fctx 69 drivers/gpu/drm/nouveau/nvc0_fence.c fctx->base.emit32 = nvc0_fence_emit32; fctx 70 drivers/gpu/drm/nouveau/nvc0_fence.c fctx->base.sync32 = nvc0_fence_sync32; fctx 79 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv04.c struct nvkm_memory *fctx = device->imem->ramfc; fctx 98 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv04.c nvkm_kmap(fctx); fctx 103 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv04.c u32 cv = (nvkm_ro32(fctx, c->ctxp + data) & ~cm); fctx 104 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv04.c nvkm_wo32(fctx, c->ctxp + data, cv | (rv << c->ctxs)); fctx 106 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv04.c nvkm_done(fctx); fctx 2350 drivers/net/ethernet/broadcom/cnic.c struct fcoe_context *fctx; fctx 2384 drivers/net/ethernet/broadcom/cnic.c fctx = cnic_get_bnx2x_ctx(dev, cid, 1, &ctx_addr); fctx 2385 drivers/net/ethernet/broadcom/cnic.c if (fctx) { fctx 2391 drivers/net/ethernet/broadcom/cnic.c fctx->xstorm_ag_context.cdu_reserved = val; fctx 2394 drivers/net/ethernet/broadcom/cnic.c fctx->ustorm_ag_context.cdu_usage = val; fctx 321 fs/fuse/control.c static int fuse_ctl_fill_super(struct super_block *sb, struct fs_context *fctx) fctx 674 fs/userfaultfd.c struct userfaultfd_fork_ctx *fctx; fctx 683 fs/userfaultfd.c list_for_each_entry(fctx, fcs, list) fctx 684 fs/userfaultfd.c if (fctx->orig == octx) { fctx 685 fs/userfaultfd.c ctx = fctx->new; fctx 690 fs/userfaultfd.c fctx = kmalloc(sizeof(*fctx), GFP_KERNEL); fctx 691 fs/userfaultfd.c if (!fctx) fctx 696 fs/userfaultfd.c kfree(fctx); fctx 711 fs/userfaultfd.c fctx->orig = octx; fctx 712 fs/userfaultfd.c fctx->new = ctx; fctx 713 fs/userfaultfd.c list_add_tail(&fctx->list, fcs); fctx 720 fs/userfaultfd.c static void dup_fctx(struct userfaultfd_fork_ctx *fctx) fctx 722 fs/userfaultfd.c struct userfaultfd_ctx *ctx = fctx->orig; fctx 728 fs/userfaultfd.c ewq.msg.arg.reserved.reserved1 = (unsigned long)fctx->new; fctx 735 fs/userfaultfd.c struct userfaultfd_fork_ctx *fctx, *n; fctx 737 fs/userfaultfd.c list_for_each_entry_safe(fctx, n, fcs, list) { fctx 738 fs/userfaultfd.c dup_fctx(fctx); fctx 739 fs/userfaultfd.c list_del(&fctx->list); fctx 740 fs/userfaultfd.c kfree(fctx); fctx 487 security/apparmor/file.c static void update_file_ctx(struct aa_file_ctx *fctx, struct aa_label *label, fctx 493 security/apparmor/file.c spin_lock(&fctx->lock); fctx 494 security/apparmor/file.c old = rcu_dereference_protected(fctx->label, fctx 495 security/apparmor/file.c lockdep_is_held(&fctx->lock)); fctx 499 security/apparmor/file.c rcu_assign_pointer(fctx->label, l); fctx 503 security/apparmor/file.c fctx->allow |= request; fctx 505 security/apparmor/file.c spin_unlock(&fctx->lock); fctx 599 security/apparmor/file.c struct aa_file_ctx *fctx; fctx 607 security/apparmor/file.c fctx = file_ctx(file); fctx 610 security/apparmor/file.c flabel = rcu_dereference(fctx->label); fctx 620 security/apparmor/file.c denied = request & ~fctx->allow; fctx 394 security/apparmor/lsm.c struct aa_file_ctx *fctx = file_ctx(file); fctx 407 security/apparmor/lsm.c fctx->allow = MAY_EXEC | MAY_READ | AA_EXEC_MMAP; fctx 419 security/apparmor/lsm.c fctx->allow = aa_map_file_to_perms(file);