ef 24 arch/s390/include/asm/eadm.h u8 ef; ef 60 arch/x86/include/asm/jump_label.h .if \def ef 77 arch/x86/include/asm/jump_label.h .if \def ef 70 drivers/crypto/cavium/zip/zip_deflate.c zip_cmd->s.ef = 1; ef 77 drivers/crypto/cavium/zip/zip_inflate.c zip_cmd->s.ef = 1; ef 177 drivers/crypto/cavium/zip/zip_inflate.c result_ptr->s.ef = 1; ef 194 drivers/crypto/cavium/zip/zip_inflate.c zip_ops->end_file = result_ptr->s.ef; ef 188 drivers/crypto/cavium/zip/zip_regs.h u64 ef : 1; ef 202 drivers/crypto/cavium/zip/zip_regs.h u64 ef : 1; ef 353 drivers/crypto/cavium/zip/zip_regs.h u64 ef : 1; ef 359 drivers/crypto/cavium/zip/zip_regs.h u64 ef : 1; ef 1332 drivers/dma/ppc4xx/adma.c int ef = 1; ef 1354 drivers/dma/ppc4xx/adma.c ef = 0; /* sum_product case, process on DMA0/1 */ ef 1356 drivers/dma/ppc4xx/adma.c ef = 3; /* override (DMA0/1 + idle) */ ef 1358 drivers/dma/ppc4xx/adma.c ef = 0; /* can't process on DMA2 if !rxor */ ef 1362 drivers/dma/ppc4xx/adma.c if (likely(ef) && ef 1364 drivers/dma/ppc4xx/adma.c ef++; ef 1366 drivers/dma/ppc4xx/adma.c return ef; ef 200 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h struct dma_fence **ef); ef 204 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h struct dma_fence **ef); ef 225 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h struct dma_fence **ef); ef 219 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct amdgpu_amdkfd_fence *ef) ef 225 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (!ef) ef 246 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (f->context == ef->base.context) ef 841 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct dma_fence **ef) ef 872 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c *ef = dma_fence_get(&info->eviction_fence->base); ef 915 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c dma_fence_put(*ef); ef 916 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c *ef = NULL; ef 928 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct dma_fence **ef) ef 946 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = init_kfd_vm(new_vm, process_info, ef); ef 964 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct dma_fence **ef) ef 982 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = init_kfd_vm(avm, process_info, ef); ef 1960 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c int amdgpu_amdkfd_gpuvm_restore_process_bos(void *info, struct dma_fence **ef) ef 2074 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c *ef = dma_fence_get(&new_fence->base); ef 720 drivers/gpu/drm/amd/amdkfd/kfd_priv.h struct dma_fence *ef; ef 467 drivers/gpu/drm/amd/amdkfd/kfd_process.c dma_fence_put(p->ef); ef 789 drivers/gpu/drm/amd/amdkfd/kfd_process.c &pdd->vm, &p->kgd_process_info, &p->ef); ef 792 drivers/gpu/drm/amd/amdkfd/kfd_process.c &pdd->vm, &p->kgd_process_info, &p->ef); ef 1012 drivers/gpu/drm/amd/amdkfd/kfd_process.c WARN_ONCE(p->last_eviction_seqno != p->ef->seqno, ef 1026 drivers/gpu/drm/amd/amdkfd/kfd_process.c dma_fence_signal(p->ef); ef 1027 drivers/gpu/drm/amd/amdkfd/kfd_process.c dma_fence_put(p->ef); ef 1028 drivers/gpu/drm/amd/amdkfd/kfd_process.c p->ef = NULL; ef 1063 drivers/gpu/drm/amd/amdkfd/kfd_process.c &p->ef); ef 1092 drivers/gpu/drm/amd/amdkfd/kfd_process.c dma_fence_signal(p->ef); ef 1093 drivers/gpu/drm/amd/amdkfd/kfd_process.c dma_fence_put(p->ef); ef 1094 drivers/gpu/drm/amd/amdkfd/kfd_process.c p->ef = NULL; ef 274 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (ior->dp.ef) ef 406 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ior->dp.ef = dp->dpcd[DPCD_RC02] & DPCD_RC02_ENHANCED_FRAME_CAP; ef 40 drivers/gpu/drm/nouveau/nvkm/engine/disp/ior.h bool ef; ef 341 drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c h = h - (3 * ior->dp.ef) - (12 / ior->dp.nr); ef 42 drivers/gpu/drm/nouveau/nvkm/engine/disp/piornv50.c pior->dp.ef); ef 112 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorg94.c if (sor->dp.ef) ef 111 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c if (sor->dp.ef) ef 50 drivers/gpu/drm/nouveau/nvkm/engine/disp/sortu102.c if (sor->dp.ef) ef 144 drivers/gpu/drm/nouveau/nvkm/subdev/i2c/aux.c nvkm_i2c_aux_lnk_ctl(struct nvkm_i2c_aux *aux, int nr, int bw, bool ef) ef 147 drivers/gpu/drm/nouveau/nvkm/subdev/i2c/aux.c return aux->func->lnk_ctl(aux, nr, bw, ef); ef 1210 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ethtool.c static int set_flash(struct net_device *netdev, struct ethtool_flash *ef) ef 1231 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ethtool.c ef->data[sizeof(ef->data) - 1] = '\0'; ef 1232 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ethtool.c ret = request_firmware(&fw, ef->data, adap->pdev_dev); ef 1248 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ethtool.c "loaded firmware %s, reload cxgb4 driver\n", ef->data); ef 168 fs/gfs2/xattr.c struct ea_find *ef = private; ef 173 fs/gfs2/xattr.c if (ea->ea_type == ef->type) { ef 174 fs/gfs2/xattr.c if (ea->ea_name_len == ef->namel && ef 175 fs/gfs2/xattr.c !memcmp(GFS2_EA2NAME(ea), ef->name, ea->ea_name_len)) { ef 176 fs/gfs2/xattr.c struct gfs2_ea_location *el = ef->ef_el; ef 191 fs/gfs2/xattr.c struct ea_find ef; ef 194 fs/gfs2/xattr.c ef.type = type; ef 195 fs/gfs2/xattr.c ef.name = name; ef 196 fs/gfs2/xattr.c ef.namel = strlen(name); ef 197 fs/gfs2/xattr.c ef.ef_el = el; ef 201 fs/gfs2/xattr.c error = ea_foreach(ip, ea_find_i, &ef);