/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
D | ctxnv50.c | 169 static void nv50_gr_construct_mmio(struct nvkm_grctx *ctx); 170 static void nv50_gr_construct_xfer1(struct nvkm_grctx *ctx); 171 static void nv50_gr_construct_xfer2(struct nvkm_grctx *ctx); 176 nv50_grctx_generate(struct nvkm_grctx *ctx) in nv50_grctx_generate() argument 178 cp_set (ctx, STATE, RUNNING); in nv50_grctx_generate() 179 cp_set (ctx, XFER_SWITCH, ENABLE); in nv50_grctx_generate() 181 cp_bra (ctx, AUTO_SAVE, PENDING, cp_setup_save); in nv50_grctx_generate() 182 cp_bra (ctx, USER_SAVE, PENDING, cp_setup_save); in nv50_grctx_generate() 184 cp_name(ctx, cp_check_load); in nv50_grctx_generate() 185 cp_bra (ctx, AUTO_LOAD, PENDING, cp_setup_auto_load); in nv50_grctx_generate() [all …]
|
D | ctxnv40.c | 159 nv40_gr_construct_general(struct nvkm_grctx *ctx) in nv40_gr_construct_general() argument 161 struct nvkm_device *device = ctx->device; in nv40_gr_construct_general() 164 cp_ctx(ctx, 0x4000a4, 1); in nv40_gr_construct_general() 165 gr_def(ctx, 0x4000a4, 0x00000008); in nv40_gr_construct_general() 166 cp_ctx(ctx, 0x400144, 58); in nv40_gr_construct_general() 167 gr_def(ctx, 0x400144, 0x00000001); in nv40_gr_construct_general() 168 cp_ctx(ctx, 0x400314, 1); in nv40_gr_construct_general() 169 gr_def(ctx, 0x400314, 0x00000000); in nv40_gr_construct_general() 170 cp_ctx(ctx, 0x400400, 10); in nv40_gr_construct_general() 171 cp_ctx(ctx, 0x400480, 10); in nv40_gr_construct_general() [all …]
|
D | ctxnv40.h | 24 cp_out(struct nvkm_grctx *ctx, u32 inst) in cp_out() argument 26 u32 *ctxprog = ctx->ucode; in cp_out() 28 if (ctx->mode != NVKM_GRCTX_PROG) in cp_out() 31 BUG_ON(ctx->ctxprog_len == ctx->ctxprog_max); in cp_out() 32 ctxprog[ctx->ctxprog_len++] = inst; in cp_out() 36 cp_lsr(struct nvkm_grctx *ctx, u32 val) in cp_lsr() argument 38 cp_out(ctx, CP_LOAD_SR | val); in cp_lsr() 42 cp_ctx(struct nvkm_grctx *ctx, u32 reg, u32 length) in cp_ctx() argument 44 ctx->ctxprog_reg = (reg - 0x00400000) >> 2; in cp_ctx() 46 ctx->ctxvals_base = ctx->ctxvals_pos; in cp_ctx() [all …]
|
/linux-4.4.14/arch/powerpc/platforms/cell/spufs/ |
D | context.c | 38 struct spu_context *ctx; in alloc_spu_context() local 40 ctx = kzalloc(sizeof *ctx, GFP_KERNEL); in alloc_spu_context() 41 if (!ctx) in alloc_spu_context() 46 if (spu_init_csa(&ctx->csa)) in alloc_spu_context() 48 spin_lock_init(&ctx->mmio_lock); in alloc_spu_context() 49 mutex_init(&ctx->mapping_lock); in alloc_spu_context() 50 kref_init(&ctx->kref); in alloc_spu_context() 51 mutex_init(&ctx->state_mutex); in alloc_spu_context() 52 mutex_init(&ctx->run_mutex); in alloc_spu_context() 53 init_waitqueue_head(&ctx->ibox_wq); in alloc_spu_context() [all …]
|
D | run.c | 16 struct spu_context *ctx = spu->ctx; in spufs_stop_callback() local 25 if (ctx) { in spufs_stop_callback() 29 ctx->csa.class_0_pending = spu->class_0_pending; in spufs_stop_callback() 30 ctx->csa.class_0_dar = spu->class_0_dar; in spufs_stop_callback() 33 ctx->csa.class_1_dsisr = spu->class_1_dsisr; in spufs_stop_callback() 34 ctx->csa.class_1_dar = spu->class_1_dar; in spufs_stop_callback() 44 wake_up_all(&ctx->stop_wq); in spufs_stop_callback() 48 int spu_stopped(struct spu_context *ctx, u32 *stat) in spu_stopped() argument 57 *stat = ctx->ops->status_read(ctx); in spu_stopped() 68 if (test_bit(SPU_SCHED_NOTIFY_ACTIVE, &ctx->sched_flags)) in spu_stopped() [all …]
|
D | backing_ops.c | 47 static void gen_spu_event(struct spu_context *ctx, u32 event) in gen_spu_event() argument 53 ch0_cnt = ctx->csa.spu_chnlcnt_RW[0]; in gen_spu_event() 54 ch0_data = ctx->csa.spu_chnldata_RW[0]; in gen_spu_event() 55 ch1_data = ctx->csa.spu_chnldata_RW[1]; in gen_spu_event() 56 ctx->csa.spu_chnldata_RW[0] |= event; in gen_spu_event() 58 ctx->csa.spu_chnlcnt_RW[0] = 1; in gen_spu_event() 62 static int spu_backing_mbox_read(struct spu_context *ctx, u32 * data) in spu_backing_mbox_read() argument 67 spin_lock(&ctx->csa.register_lock); in spu_backing_mbox_read() 68 mbox_stat = ctx->csa.prob.mb_stat_R; in spu_backing_mbox_read() 74 *data = ctx->csa.prob.pu_mb_R; in spu_backing_mbox_read() [all …]
|
D | sched.c | 97 void spu_set_timeslice(struct spu_context *ctx) in spu_set_timeslice() argument 99 if (ctx->prio < NORMAL_PRIO) in spu_set_timeslice() 100 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE * 4, ctx->prio); in spu_set_timeslice() 102 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE, ctx->prio); in spu_set_timeslice() 108 void __spu_update_sched_info(struct spu_context *ctx) in __spu_update_sched_info() argument 114 BUG_ON(!list_empty(&ctx->rq)); in __spu_update_sched_info() 121 ctx->tid = current->pid; in __spu_update_sched_info() 130 ctx->prio = current->prio; in __spu_update_sched_info() 132 ctx->prio = current->static_prio; in __spu_update_sched_info() 133 ctx->policy = current->policy; in __spu_update_sched_info() [all …]
|
D | file.c | 164 struct spu_context *ctx = i->i_ctx; in spufs_mem_open() local 166 mutex_lock(&ctx->mapping_lock); in spufs_mem_open() 167 file->private_data = ctx; in spufs_mem_open() 169 ctx->local_store = inode->i_mapping; in spufs_mem_open() 170 mutex_unlock(&ctx->mapping_lock); in spufs_mem_open() 178 struct spu_context *ctx = i->i_ctx; in spufs_mem_release() local 180 mutex_lock(&ctx->mapping_lock); in spufs_mem_release() 182 ctx->local_store = NULL; in spufs_mem_release() 183 mutex_unlock(&ctx->mapping_lock); in spufs_mem_release() 188 __spufs_mem_read(struct spu_context *ctx, char __user *buffer, in __spufs_mem_read() argument [all …]
|
D | fault.c | 36 static void spufs_handle_event(struct spu_context *ctx, in spufs_handle_event() argument 41 if (ctx->flags & SPU_CREATE_EVENTS_ENABLED) { in spufs_handle_event() 42 ctx->event_return |= type; in spufs_handle_event() 43 wake_up_all(&ctx->stop_wq); in spufs_handle_event() 58 ctx->ops->restart_dma(ctx); in spufs_handle_event() 68 ctx->ops->npc_read(ctx) - 4; in spufs_handle_event() 77 int spufs_handle_class0(struct spu_context *ctx) in spufs_handle_class0() argument 79 unsigned long stat = ctx->csa.class_0_pending & CLASS0_INTR_MASK; in spufs_handle_class0() 85 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() 89 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() [all …]
|
D | spufs.h | 188 int (*mbox_read) (struct spu_context * ctx, u32 * data); 189 u32(*mbox_stat_read) (struct spu_context * ctx); 190 unsigned int (*mbox_stat_poll)(struct spu_context *ctx, 192 int (*ibox_read) (struct spu_context * ctx, u32 * data); 193 int (*wbox_write) (struct spu_context * ctx, u32 data); 194 u32(*signal1_read) (struct spu_context * ctx); 195 void (*signal1_write) (struct spu_context * ctx, u32 data); 196 u32(*signal2_read) (struct spu_context * ctx); 197 void (*signal2_write) (struct spu_context * ctx, u32 data); 198 void (*signal1_type_set) (struct spu_context * ctx, u64 val); [all …]
|
D | hw_ops.c | 37 static int spu_hw_mbox_read(struct spu_context *ctx, u32 * data) in spu_hw_mbox_read() argument 39 struct spu *spu = ctx->spu; in spu_hw_mbox_read() 54 static u32 spu_hw_mbox_stat_read(struct spu_context *ctx) in spu_hw_mbox_stat_read() argument 56 return in_be32(&ctx->spu->problem->mb_stat_R); in spu_hw_mbox_stat_read() 59 static unsigned int spu_hw_mbox_stat_poll(struct spu_context *ctx, in spu_hw_mbox_stat_poll() argument 62 struct spu *spu = ctx->spu; in spu_hw_mbox_stat_poll() 96 static int spu_hw_ibox_read(struct spu_context *ctx, u32 * data) in spu_hw_ibox_read() argument 98 struct spu *spu = ctx->spu; in spu_hw_ibox_read() 117 static int spu_hw_wbox_write(struct spu_context *ctx, u32 data) in spu_hw_wbox_write() argument 119 struct spu *spu = ctx->spu; in spu_hw_wbox_write() [all …]
|
D | coredump.c | 37 static ssize_t do_coredump_read(int num, struct spu_context *ctx, void *buffer, in do_coredump_read() argument 44 return spufs_coredump_read[num].read(ctx, buffer, size, off); in do_coredump_read() 46 data = spufs_coredump_read[num].get(ctx); in do_coredump_read() 53 static int spufs_ctx_note_size(struct spu_context *ctx, int dfd) in spufs_ctx_note_size() argument 75 struct spu_context *ctx; in match_context() local 78 ctx = SPUFS_I(file_inode(file))->i_ctx; in match_context() 79 if (ctx->flags & SPU_CREATE_NOSCHED) in match_context() 109 struct spu_context *ctx; in spufs_coredump_extra_notes_size() local 113 while ((ctx = coredump_next_context(&fd)) != NULL) { in spufs_coredump_extra_notes_size() 114 rc = spu_acquire_saved(ctx); in spufs_coredump_extra_notes_size() [all …]
|
/linux-4.4.14/arch/mips/net/ |
D | bpf_jit.c | 119 static inline void emit_jit_reg_move(ptr dst, ptr src, struct jit_ctx *ctx); 122 #define emit_instr(ctx, func, ...) \ argument 124 if ((ctx)->target != NULL) { \ 125 u32 *p = &(ctx)->target[ctx->idx]; \ 128 (ctx)->idx++; \ 135 #define emit_long_instr(ctx, func, ...) \ argument 137 if ((ctx)->target != NULL) { \ 138 u32 *p = &(ctx)->target[ctx->idx]; \ 141 (ctx)->idx++; \ 151 unsigned int src2, struct jit_ctx *ctx) in emit_addu() argument [all …]
|
/linux-4.4.14/sound/soc/intel/skylake/ |
D | skl-sst-cldma.c | 24 static void skl_cldma_int_enable(struct sst_dsp *ctx) in skl_cldma_int_enable() argument 26 sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_ADSPIC, in skl_cldma_int_enable() 30 void skl_cldma_int_disable(struct sst_dsp *ctx) in skl_cldma_int_disable() argument 32 sst_dsp_shim_update_bits_unlocked(ctx, in skl_cldma_int_disable() 37 static void skl_cldma_setup_bdle(struct sst_dsp *ctx, in skl_cldma_setup_bdle() argument 43 ctx->cl_dev.frags = 0; in skl_cldma_setup_bdle() 46 (ctx->cl_dev.frags * ctx->cl_dev.bufsize)); in skl_cldma_setup_bdle() 51 bdl[2] = cpu_to_le32(ctx->cl_dev.bufsize); in skl_cldma_setup_bdle() 53 size -= ctx->cl_dev.bufsize; in skl_cldma_setup_bdle() 57 ctx->cl_dev.frags++; in skl_cldma_setup_bdle() [all …]
|
D | skl-sst-dsp.c | 30 void skl_dsp_set_state_locked(struct sst_dsp *ctx, int state) in skl_dsp_set_state_locked() argument 32 mutex_lock(&ctx->mutex); in skl_dsp_set_state_locked() 33 ctx->sst_state = state; in skl_dsp_set_state_locked() 34 mutex_unlock(&ctx->mutex); in skl_dsp_set_state_locked() 37 static int skl_dsp_core_set_reset_state(struct sst_dsp *ctx) in skl_dsp_core_set_reset_state() argument 42 sst_dsp_shim_update_bits_unlocked(ctx, in skl_dsp_core_set_reset_state() 47 ret = sst_dsp_register_poll(ctx, in skl_dsp_core_set_reset_state() 53 if ((sst_dsp_shim_read_unlocked(ctx, SKL_ADSP_REG_ADSPCS) & in skl_dsp_core_set_reset_state() 56 dev_err(ctx->dev, "Set reset state failed\n"); in skl_dsp_core_set_reset_state() 63 static int skl_dsp_core_unset_reset_state(struct sst_dsp *ctx) in skl_dsp_core_unset_reset_state() argument [all …]
|
D | skl-sst.c | 40 static bool skl_check_fw_status(struct sst_dsp *ctx, u32 status) in skl_check_fw_status() argument 44 cur_sts = sst_dsp_shim_read(ctx, SKL_ADSP_FW_STATUS) & SKL_FW_STS_MASK; in skl_check_fw_status() 49 static int skl_transfer_firmware(struct sst_dsp *ctx, in skl_transfer_firmware() argument 54 ret = ctx->cl_dev.ops.cl_copy_to_dmabuf(ctx, basefw, base_fw_size); in skl_transfer_firmware() 58 ret = sst_dsp_register_poll(ctx, in skl_transfer_firmware() 65 ctx->cl_dev.ops.cl_stop_dma(ctx); in skl_transfer_firmware() 70 static int skl_load_base_firmware(struct sst_dsp *ctx) in skl_load_base_firmware() argument 73 struct skl_sst *skl = ctx->thread_context; in skl_load_base_firmware() 79 if (ctx->fw == NULL) { in skl_load_base_firmware() 80 ret = request_firmware(&ctx->fw, "dsp_fw_release.bin", ctx->dev); in skl_load_base_firmware() [all …]
|
D | skl-messages.c | 61 static void skl_dsp_enable_notification(struct skl_sst *ctx, bool enable) in skl_dsp_enable_notification() argument 72 skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)&mask); in skl_dsp_enable_notification() 113 struct skl_sst *ctx = skl->skl_sst; in skl_free_dsp() local 118 skl_sst_dsp_cleanup(bus->dev, ctx); in skl_free_dsp() 119 if (ctx->dsp->addr.lpe) in skl_free_dsp() 120 iounmap(ctx->dsp->addr.lpe); in skl_free_dsp() 125 struct skl_sst *ctx = skl->skl_sst; in skl_suspend_dsp() local 132 ret = skl_dsp_sleep(ctx->dsp); in skl_suspend_dsp() 145 struct skl_sst *ctx = skl->skl_sst; in skl_resume_dsp() local 156 ret = skl_dsp_wake(ctx->dsp); in skl_resume_dsp() [all …]
|
D | skl-sst-dsp.h | 111 int (*load_fw)(struct sst_dsp *ctx); 113 int (*parse_fw)(struct sst_dsp *ctx); 114 int (*set_state_D0)(struct sst_dsp *ctx); 115 int (*set_state_D3)(struct sst_dsp *ctx); 116 unsigned int (*get_fw_errcode)(struct sst_dsp *ctx); 126 void skl_cldma_process_intr(struct sst_dsp *ctx); 127 void skl_cldma_int_disable(struct sst_dsp *ctx); 128 int skl_cldma_prepare(struct sst_dsp *ctx); 130 void skl_dsp_set_state_locked(struct sst_dsp *ctx, int state); 133 int skl_dsp_disable_core(struct sst_dsp *ctx); [all …]
|
/linux-4.4.14/drivers/staging/skein/ |
D | skein_base.c | 23 int skein_256_init(struct skein_256_ctx *ctx, size_t hash_bit_len) in skein_256_init() argument 31 ctx->h.hash_bit_len = hash_bit_len; /* output hash bit count */ in skein_256_init() 35 memcpy(ctx->x, SKEIN_256_IV_256, sizeof(ctx->x)); in skein_256_init() 38 memcpy(ctx->x, SKEIN_256_IV_224, sizeof(ctx->x)); in skein_256_init() 41 memcpy(ctx->x, SKEIN_256_IV_160, sizeof(ctx->x)); in skein_256_init() 44 memcpy(ctx->x, SKEIN_256_IV_128, sizeof(ctx->x)); in skein_256_init() 53 skein_start_new_type(ctx, CFG_FINAL); in skein_256_init() 65 memset(ctx->x, 0, sizeof(ctx->x)); in skein_256_init() 66 skein_256_process_block(ctx, cfg.b, 1, SKEIN_CFG_STR_LEN); in skein_256_init() 71 skein_start_new_type(ctx, MSG); /* T0=0, T1= MSG type */ in skein_256_init() [all …]
|
D | skein_api.c | 29 int skein_ctx_prepare(struct skein_ctx *ctx, enum skein_size size) in skein_ctx_prepare() argument 31 skein_assert_ret(ctx && size, SKEIN_FAIL); in skein_ctx_prepare() 33 memset(ctx, 0, sizeof(struct skein_ctx)); in skein_ctx_prepare() 34 ctx->skein_size = size; in skein_ctx_prepare() 39 int skein_init(struct skein_ctx *ctx, size_t hash_bit_len) in skein_init() argument 46 skein_assert_ret(ctx, SKEIN_FAIL); in skein_init() 52 x = ctx->m.s256.x; in skein_init() 53 x_len = ctx->skein_size / 8; in skein_init() 58 switch (ctx->skein_size) { in skein_init() 60 ret = skein_256_init_ext(&ctx->m.s256, hash_bit_len, in skein_init() [all …]
|
D | skein_block.c | 36 #define debug_save_tweak(ctx) \ argument 38 ctx->h.tweak[0] = ts[0]; \ 39 ctx->h.tweak[1] = ts[1]; \ 42 #define debug_save_tweak(ctx) argument 345 void skein_256_process_block(struct skein_256_ctx *ctx, const u8 *blk_ptr, in skein_256_process_block() argument 370 ts[0] = ctx->h.tweak[0]; in skein_256_process_block() 371 ts[1] = ctx->h.tweak[1]; in skein_256_process_block() 380 ks[0] = ctx->x[0]; in skein_256_process_block() 381 ks[1] = ctx->x[1]; in skein_256_process_block() 382 ks[2] = ctx->x[2]; in skein_256_process_block() [all …]
|
/linux-4.4.14/drivers/misc/cxl/ |
D | context.c | 37 int cxl_context_init(struct cxl_context *ctx, struct cxl_afu *afu, bool master, in cxl_context_init() argument 42 spin_lock_init(&ctx->sste_lock); in cxl_context_init() 43 ctx->afu = afu; in cxl_context_init() 44 ctx->master = master; in cxl_context_init() 45 ctx->pid = NULL; /* Set in start work ioctl */ in cxl_context_init() 46 mutex_init(&ctx->mapping_lock); in cxl_context_init() 47 ctx->mapping = mapping; in cxl_context_init() 56 i = cxl_alloc_sst(ctx); in cxl_context_init() 60 INIT_WORK(&ctx->fault_work, cxl_handle_fault); in cxl_context_init() 62 init_waitqueue_head(&ctx->wq); in cxl_context_init() [all …]
|
D | fault.c | 35 static struct cxl_sste* find_free_sste(struct cxl_context *ctx, in find_free_sste() argument 39 unsigned int mask = (ctx->sst_size >> 7) - 1; /* SSTP0[SegTableSize] */ in find_free_sste() 48 primary = ctx->sstp + (hash << 3); in find_free_sste() 60 ret = primary + ctx->sst_lru; in find_free_sste() 61 ctx->sst_lru = (ctx->sst_lru + 1) & 0x7; in find_free_sste() 66 static void cxl_load_segment(struct cxl_context *ctx, struct copro_slb *slb) in cxl_load_segment() argument 72 spin_lock_irqsave(&ctx->sste_lock, flags); in cxl_load_segment() 73 sste = find_free_sste(ctx, slb); in cxl_load_segment() 78 sste - ctx->sstp, slb->vsid, slb->esid); in cxl_load_segment() 79 trace_cxl_ste_write(ctx, sste - ctx->sstp, slb->esid, slb->vsid); in cxl_load_segment() [all …]
|
D | api.c | 23 struct cxl_context *ctx; in cxl_dev_context_init() local 29 ctx = cxl_context_alloc(); in cxl_dev_context_init() 30 if (IS_ERR(ctx)) { in cxl_dev_context_init() 31 rc = PTR_ERR(ctx); in cxl_dev_context_init() 35 ctx->kernelapi = true; in cxl_dev_context_init() 51 rc = cxl_context_init(ctx, afu, false, mapping); in cxl_dev_context_init() 55 cxl_assign_psn_space(ctx); in cxl_dev_context_init() 57 return ctx; in cxl_dev_context_init() 62 kfree(ctx); in cxl_dev_context_init() 85 int cxl_release_context(struct cxl_context *ctx) in cxl_release_context() argument [all …]
|
D | native.c | 310 static void slb_invalid(struct cxl_context *ctx) in slb_invalid() argument 312 struct cxl *adapter = ctx->afu->adapter; in slb_invalid() 315 WARN_ON(!mutex_is_locked(&ctx->afu->spa_mutex)); in slb_invalid() 318 ((u64)be32_to_cpu(ctx->elem->common.pid) << 32) | in slb_invalid() 319 be32_to_cpu(ctx->elem->lpid)); in slb_invalid() 332 static int do_process_element_cmd(struct cxl_context *ctx, in do_process_element_cmd() argument 339 trace_cxl_llcmd(ctx, cmd); in do_process_element_cmd() 341 WARN_ON(!ctx->afu->enabled); in do_process_element_cmd() 343 ctx->elem->software_state = cpu_to_be32(pe_state); in do_process_element_cmd() 345 *(ctx->afu->sw_command_status) = cpu_to_be64(cmd | 0 | ctx->pe); in do_process_element_cmd() [all …]
|
D | file.c | 52 struct cxl_context *ctx; in __afu_open() local 81 if (!(ctx = cxl_context_alloc())) { in __afu_open() 86 if ((rc = cxl_context_init(ctx, afu, master, inode->i_mapping))) in __afu_open() 89 pr_devel("afu_open pe: %i\n", ctx->pe); in __afu_open() 90 file->private_data = ctx; in __afu_open() 117 struct cxl_context *ctx = file->private_data; in afu_release() local 120 __func__, ctx->pe); in afu_release() 121 cxl_context_detach(ctx); in afu_release() 128 if (!ctx->kernelapi) { in afu_release() 129 mutex_lock(&ctx->mapping_lock); in afu_release() [all …]
|
D | irq.c | 23 static irqreturn_t handle_psl_slice_error(struct cxl_context *ctx, u64 dsisr, u64 errstat) in handle_psl_slice_error() argument 27 fir1 = cxl_p1_read(ctx->afu->adapter, CXL_PSL_FIR1); in handle_psl_slice_error() 28 fir2 = cxl_p1_read(ctx->afu->adapter, CXL_PSL_FIR2); in handle_psl_slice_error() 29 fir_slice = cxl_p1n_read(ctx->afu, CXL_PSL_FIR_SLICE_An); in handle_psl_slice_error() 30 serr = cxl_p1n_read(ctx->afu, CXL_PSL_SERR_An); in handle_psl_slice_error() 31 afu_debug = cxl_p1n_read(ctx->afu, CXL_AFU_DEBUG_An); in handle_psl_slice_error() 33 dev_crit(&ctx->afu->dev, "PSL ERROR STATUS: 0x%016llx\n", errstat); in handle_psl_slice_error() 34 dev_crit(&ctx->afu->dev, "PSL_FIR1: 0x%016llx\n", fir1); in handle_psl_slice_error() 35 dev_crit(&ctx->afu->dev, "PSL_FIR2: 0x%016llx\n", fir2); in handle_psl_slice_error() 36 dev_crit(&ctx->afu->dev, "PSL_SERR_An: 0x%016llx\n", serr); in handle_psl_slice_error() [all …]
|
D | trace.h | 59 TP_PROTO(struct cxl_context *ctx), 61 TP_ARGS(ctx), 70 __entry->card = ctx->afu->adapter->adapter_num; 71 __entry->afu = ctx->afu->slice; 72 __entry->pe = ctx->pe; 84 TP_PROTO(struct cxl_context *ctx, u64 wed, s16 num_interrupts, u64 amr), 86 TP_ARGS(ctx, wed, num_interrupts, amr), 99 __entry->card = ctx->afu->adapter->adapter_num; 100 __entry->afu = ctx->afu->slice; 101 __entry->pe = ctx->pe; [all …]
|
D | main.c | 35 static inline void _cxl_slbia(struct cxl_context *ctx, struct mm_struct *mm) in _cxl_slbia() argument 39 if (!(task = get_pid_task(ctx->pid, PIDTYPE_PID))) { in _cxl_slbia() 41 __func__, pid_nr(ctx->pid)); in _cxl_slbia() 49 ctx->afu->adapter->adapter_num, ctx->afu->slice, ctx->pe); in _cxl_slbia() 51 spin_lock_irqsave(&ctx->sste_lock, flags); in _cxl_slbia() 52 trace_cxl_slbia(ctx); in _cxl_slbia() 53 memset(ctx->sstp, 0, ctx->sst_size); in _cxl_slbia() 54 spin_unlock_irqrestore(&ctx->sste_lock, flags); in _cxl_slbia() 56 cxl_afu_slbia(ctx->afu); in _cxl_slbia() 65 struct cxl_context *ctx; in cxl_slbia_core() local [all …]
|
/linux-4.4.14/arch/arm/net/ |
D | bpf_jit_32.c | 141 static inline void _emit(int cond, u32 inst, struct jit_ctx *ctx) in _emit() argument 146 if (ctx->target != NULL) in _emit() 147 ctx->target[ctx->idx] = inst; in _emit() 149 ctx->idx++; in _emit() 155 static inline void emit(u32 inst, struct jit_ctx *ctx) in emit() argument 157 _emit(ARM_COND_AL, inst, ctx); in emit() 160 static u16 saved_regs(struct jit_ctx *ctx) in saved_regs() argument 164 if ((ctx->skf->len > 1) || in saved_regs() 165 (ctx->skf->insns[0].code == (BPF_RET | BPF_A))) in saved_regs() 171 if (ctx->seen & SEEN_CALL) in saved_regs() [all …]
|
/linux-4.4.14/drivers/gpu/drm/panel/ |
D | panel-samsung-s6e8aa0.c | 128 static int s6e8aa0_clear_error(struct s6e8aa0 *ctx) in s6e8aa0_clear_error() argument 130 int ret = ctx->error; in s6e8aa0_clear_error() 132 ctx->error = 0; in s6e8aa0_clear_error() 136 static void s6e8aa0_dcs_write(struct s6e8aa0 *ctx, const void *data, size_t len) in s6e8aa0_dcs_write() argument 138 struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev); in s6e8aa0_dcs_write() 141 if (ctx->error < 0) in s6e8aa0_dcs_write() 146 dev_err(ctx->dev, "error %zd writing dcs seq: %*ph\n", ret, in s6e8aa0_dcs_write() 148 ctx->error = ret; in s6e8aa0_dcs_write() 152 static int s6e8aa0_dcs_read(struct s6e8aa0 *ctx, u8 cmd, void *data, size_t len) in s6e8aa0_dcs_read() argument 154 struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev); in s6e8aa0_dcs_read() [all …]
|
D | panel-samsung-ld9040.c | 118 static int ld9040_clear_error(struct ld9040 *ctx) in ld9040_clear_error() argument 120 int ret = ctx->error; in ld9040_clear_error() 122 ctx->error = 0; in ld9040_clear_error() 126 static int ld9040_spi_write_word(struct ld9040 *ctx, u16 data) in ld9040_spi_write_word() argument 128 struct spi_device *spi = to_spi_device(ctx->dev); in ld9040_spi_write_word() 141 static void ld9040_dcs_write(struct ld9040 *ctx, const u8 *data, size_t len) in ld9040_dcs_write() argument 145 if (ctx->error < 0 || len == 0) in ld9040_dcs_write() 148 dev_dbg(ctx->dev, "writing dcs seq: %*ph\n", (int)len, data); in ld9040_dcs_write() 149 ret = ld9040_spi_write_word(ctx, *data); in ld9040_dcs_write() 153 ret = ld9040_spi_write_word(ctx, *data | 0x100); in ld9040_dcs_write() [all …]
|
D | panel-lg-lg4573.c | 40 static int lg4573_spi_write_u16(struct lg4573 *ctx, u16 data) in lg4573_spi_write_u16() argument 48 dev_dbg(ctx->panel.dev, "writing data: %x\n", data); in lg4573_spi_write_u16() 53 return spi_sync(ctx->spi, &msg); in lg4573_spi_write_u16() 56 static int lg4573_spi_write_u16_array(struct lg4573 *ctx, const u16 *buffer, in lg4573_spi_write_u16_array() argument 63 ret = lg4573_spi_write_u16(ctx, buffer[i]); in lg4573_spi_write_u16_array() 71 static int lg4573_spi_write_dcs(struct lg4573 *ctx, u8 dcs) in lg4573_spi_write_dcs() argument 73 return lg4573_spi_write_u16(ctx, (0x70 << 8 | dcs)); in lg4573_spi_write_dcs() 76 static int lg4573_display_on(struct lg4573 *ctx) in lg4573_display_on() argument 80 ret = lg4573_spi_write_dcs(ctx, MIPI_DCS_EXIT_SLEEP_MODE); in lg4573_display_on() 86 return lg4573_spi_write_dcs(ctx, MIPI_DCS_SET_DISPLAY_ON); in lg4573_display_on() [all …]
|
/linux-4.4.14/fs/ |
D | timerfd.c | 49 static inline bool isalarm(struct timerfd_ctx *ctx) in isalarm() argument 51 return ctx->clockid == CLOCK_REALTIME_ALARM || in isalarm() 52 ctx->clockid == CLOCK_BOOTTIME_ALARM; in isalarm() 60 static void timerfd_triggered(struct timerfd_ctx *ctx) in timerfd_triggered() argument 64 spin_lock_irqsave(&ctx->wqh.lock, flags); in timerfd_triggered() 65 ctx->expired = 1; in timerfd_triggered() 66 ctx->ticks++; in timerfd_triggered() 67 wake_up_locked(&ctx->wqh); in timerfd_triggered() 68 spin_unlock_irqrestore(&ctx->wqh.lock, flags); in timerfd_triggered() 73 struct timerfd_ctx *ctx = container_of(htmr, struct timerfd_ctx, in timerfd_tmrproc() local [all …]
|
D | eventfd.c | 54 __u64 eventfd_signal(struct eventfd_ctx *ctx, __u64 n) in eventfd_signal() argument 58 spin_lock_irqsave(&ctx->wqh.lock, flags); in eventfd_signal() 59 if (ULLONG_MAX - ctx->count < n) in eventfd_signal() 60 n = ULLONG_MAX - ctx->count; in eventfd_signal() 61 ctx->count += n; in eventfd_signal() 62 if (waitqueue_active(&ctx->wqh)) in eventfd_signal() 63 wake_up_locked_poll(&ctx->wqh, POLLIN); in eventfd_signal() 64 spin_unlock_irqrestore(&ctx->wqh.lock, flags); in eventfd_signal() 70 static void eventfd_free_ctx(struct eventfd_ctx *ctx) in eventfd_free_ctx() argument 72 kfree(ctx); in eventfd_free_ctx() [all …]
|
D | aio.c | 205 static struct file *aio_private_file(struct kioctx *ctx, loff_t nr_pages) in aio_private_file() argument 215 inode->i_mapping->private_data = ctx; in aio_private_file() 269 static void put_aio_ring_file(struct kioctx *ctx) in put_aio_ring_file() argument 271 struct file *aio_ring_file = ctx->aio_ring_file; in put_aio_ring_file() 278 ctx->aio_ring_file = NULL; in put_aio_ring_file() 285 static void aio_free_ring(struct kioctx *ctx) in aio_free_ring() argument 292 put_aio_ring_file(ctx); in aio_free_ring() 294 for (i = 0; i < ctx->nr_pages; i++) { in aio_free_ring() 297 page_count(ctx->ring_pages[i])); in aio_free_ring() 298 page = ctx->ring_pages[i]; in aio_free_ring() [all …]
|
D | userfaultfd.c | 65 struct userfaultfd_ctx *ctx; member 117 static void userfaultfd_ctx_get(struct userfaultfd_ctx *ctx) in userfaultfd_ctx_get() argument 119 if (!atomic_inc_not_zero(&ctx->refcount)) in userfaultfd_ctx_get() 131 static void userfaultfd_ctx_put(struct userfaultfd_ctx *ctx) in userfaultfd_ctx_put() argument 133 if (atomic_dec_and_test(&ctx->refcount)) { in userfaultfd_ctx_put() 134 VM_BUG_ON(spin_is_locked(&ctx->fault_pending_wqh.lock)); in userfaultfd_ctx_put() 135 VM_BUG_ON(waitqueue_active(&ctx->fault_pending_wqh)); in userfaultfd_ctx_put() 136 VM_BUG_ON(spin_is_locked(&ctx->fault_wqh.lock)); in userfaultfd_ctx_put() 137 VM_BUG_ON(waitqueue_active(&ctx->fault_wqh)); in userfaultfd_ctx_put() 138 VM_BUG_ON(spin_is_locked(&ctx->fd_wqh.lock)); in userfaultfd_ctx_put() [all …]
|
/linux-4.4.14/sound/soc/intel/atom/sst/ |
D | sst.c | 200 struct intel_sst_drv *ctx = container_of(work, in sst_process_pending_msg() local 203 ctx->ops->post_message(ctx, NULL, false); in sst_process_pending_msg() 206 static int sst_workqueue_init(struct intel_sst_drv *ctx) in sst_workqueue_init() argument 208 INIT_LIST_HEAD(&ctx->memcpy_list); in sst_workqueue_init() 209 INIT_LIST_HEAD(&ctx->rx_list); in sst_workqueue_init() 210 INIT_LIST_HEAD(&ctx->ipc_dispatch_list); in sst_workqueue_init() 211 INIT_LIST_HEAD(&ctx->block_list); in sst_workqueue_init() 212 INIT_WORK(&ctx->ipc_post_msg_wq, sst_process_pending_msg); in sst_workqueue_init() 213 init_waitqueue_head(&ctx->wait_queue); in sst_workqueue_init() 215 ctx->post_msg_wq = in sst_workqueue_init() [all …]
|
D | sst_acpi.c | 143 static int sst_platform_get_resources(struct intel_sst_drv *ctx) in sst_platform_get_resources() argument 146 struct platform_device *pdev = to_platform_device(ctx->dev); in sst_platform_get_resources() 151 ctx->pdata->res_info->acpi_lpe_res_index); in sst_platform_get_resources() 153 dev_err(ctx->dev, "Invalid SHIM base from IFWI"); in sst_platform_get_resources() 156 dev_info(ctx->dev, "LPE base: %#x size:%#x", (unsigned int) rsrc->start, in sst_platform_get_resources() 159 ctx->iram_base = rsrc->start + ctx->pdata->res_info->iram_offset; in sst_platform_get_resources() 160 ctx->iram_end = ctx->iram_base + ctx->pdata->res_info->iram_size - 1; in sst_platform_get_resources() 161 dev_info(ctx->dev, "IRAM base: %#x", ctx->iram_base); in sst_platform_get_resources() 162 ctx->iram = devm_ioremap_nocache(ctx->dev, ctx->iram_base, in sst_platform_get_resources() 163 ctx->pdata->res_info->iram_size); in sst_platform_get_resources() [all …]
|
D | sst_drv_interface.c | 51 int free_stream_context(struct intel_sst_drv *ctx, unsigned int str_id) in free_stream_context() argument 56 stream = get_stream_info(ctx, str_id); in free_stream_context() 59 ret = sst_free_stream(ctx, str_id); in free_stream_context() 61 sst_clean_stream(&ctx->streams[str_id]); in free_stream_context() 64 dev_err(ctx->dev, "we tried to free stream context %d which was freed!!!\n", str_id); in free_stream_context() 69 int sst_get_stream_allocated(struct intel_sst_drv *ctx, in sst_get_stream_allocated() argument 75 retval = ctx->ops->alloc_stream(ctx, str_param); in sst_get_stream_allocated() 77 dev_dbg(ctx->dev, "Stream allocated %d\n", retval); in sst_get_stream_allocated() 125 int sst_get_stream(struct intel_sst_drv *ctx, in sst_get_stream() argument 132 retval = ctx->ops->alloc_stream(ctx, str_param); in sst_get_stream() [all …]
|
D | sst_pci.c | 33 static int sst_platform_get_resources(struct intel_sst_drv *ctx) in sst_platform_get_resources() argument 36 struct pci_dev *pci = ctx->pci; in sst_platform_get_resources() 44 if (ctx->dev_id == SST_MRFLD_PCI_ID) { in sst_platform_get_resources() 45 ctx->ddr_base = pci_resource_start(pci, 0); in sst_platform_get_resources() 47 ddr_base = relocate_imr_addr_mrfld(ctx->ddr_base); in sst_platform_get_resources() 48 if (!ctx->pdata->lib_info) { in sst_platform_get_resources() 49 dev_err(ctx->dev, "lib_info pointer NULL\n"); in sst_platform_get_resources() 53 if (ddr_base != ctx->pdata->lib_info->mod_base) { in sst_platform_get_resources() 54 dev_err(ctx->dev, in sst_platform_get_resources() 59 ctx->ddr_end = pci_resource_end(pci, 0); in sst_platform_get_resources() [all …]
|
/linux-4.4.14/drivers/gpu/drm/exynos/ |
D | exynos5433_drm_decon.c | 74 static inline void decon_set_bits(struct decon_context *ctx, u32 reg, u32 mask, in decon_set_bits() argument 77 val = (val & mask) | (readl(ctx->addr + reg) & ~mask); in decon_set_bits() 78 writel(val, ctx->addr + reg); in decon_set_bits() 83 struct decon_context *ctx = crtc->ctx; in decon_enable_vblank() local 86 if (test_bit(BIT_SUSPENDED, &ctx->flags)) in decon_enable_vblank() 89 if (test_and_set_bit(BIT_IRQS_ENABLED, &ctx->flags)) { in decon_enable_vblank() 91 if (ctx->out_type == IFTYPE_I80) in decon_enable_vblank() 96 writel(val, ctx->addr + DECON_VIDINTCON0); in decon_enable_vblank() 104 struct decon_context *ctx = crtc->ctx; in decon_disable_vblank() local 106 if (test_bit(BIT_SUSPENDED, &ctx->flags)) in decon_disable_vblank() [all …]
|
D | exynos7_drm_decon.c | 86 struct decon_context *ctx = crtc->ctx; in decon_wait_for_vblank() local 88 if (ctx->suspended) in decon_wait_for_vblank() 91 atomic_set(&ctx->wait_vsync_event, 1); in decon_wait_for_vblank() 97 if (!wait_event_timeout(ctx->wait_vsync_queue, in decon_wait_for_vblank() 98 !atomic_read(&ctx->wait_vsync_event), in decon_wait_for_vblank() 105 struct decon_context *ctx = crtc->ctx; in decon_clear_channels() local 112 u32 val = readl(ctx->regs + WINCON(win)); in decon_clear_channels() 116 writel(val, ctx->regs + WINCON(win)); in decon_clear_channels() 123 unsigned int state = ctx->suspended; in decon_clear_channels() 125 ctx->suspended = 0; in decon_clear_channels() [all …]
|
D | exynos_drm_fimd.c | 209 struct fimd_context *ctx = crtc->ctx; in fimd_enable_vblank() local 212 if (ctx->suspended) in fimd_enable_vblank() 215 if (!test_and_set_bit(0, &ctx->irq_flags)) { in fimd_enable_vblank() 216 val = readl(ctx->regs + VIDINTCON0); in fimd_enable_vblank() 220 if (ctx->i80_if) { in fimd_enable_vblank() 233 writel(val, ctx->regs + VIDINTCON0); in fimd_enable_vblank() 241 struct fimd_context *ctx = crtc->ctx; in fimd_disable_vblank() local 244 if (ctx->suspended) in fimd_disable_vblank() 247 if (test_and_clear_bit(0, &ctx->irq_flags)) { in fimd_disable_vblank() 248 val = readl(ctx->regs + VIDINTCON0); in fimd_disable_vblank() [all …]
|
D | exynos_drm_vidi.c | 94 struct vidi_context *ctx = crtc->ctx; in vidi_enable_vblank() local 96 if (ctx->suspended) in vidi_enable_vblank() 99 if (!test_and_set_bit(0, &ctx->irq_flags)) in vidi_enable_vblank() 100 ctx->vblank_on = true; in vidi_enable_vblank() 102 ctx->direct_vblank = true; in vidi_enable_vblank() 109 schedule_work(&ctx->work); in vidi_enable_vblank() 116 struct vidi_context *ctx = crtc->ctx; in vidi_disable_vblank() local 118 if (ctx->suspended) in vidi_disable_vblank() 121 if (test_and_clear_bit(0, &ctx->irq_flags)) in vidi_disable_vblank() 122 ctx->vblank_on = false; in vidi_disable_vblank() [all …]
|
D | exynos_drm_fimc.c | 172 static u32 fimc_read(struct fimc_context *ctx, u32 reg) in fimc_read() argument 174 return readl(ctx->regs + reg); in fimc_read() 177 static void fimc_write(struct fimc_context *ctx, u32 val, u32 reg) in fimc_write() argument 179 writel(val, ctx->regs + reg); in fimc_write() 182 static void fimc_set_bits(struct fimc_context *ctx, u32 reg, u32 bits) in fimc_set_bits() argument 184 void __iomem *r = ctx->regs + reg; in fimc_set_bits() 189 static void fimc_clear_bits(struct fimc_context *ctx, u32 reg, u32 bits) in fimc_clear_bits() argument 191 void __iomem *r = ctx->regs + reg; in fimc_clear_bits() 196 static void fimc_sw_reset(struct fimc_context *ctx) in fimc_sw_reset() argument 201 cfg = fimc_read(ctx, EXYNOS_CISTATUS); in fimc_sw_reset() [all …]
|
D | exynos_drm_dpi.c | 46 struct exynos_dpi *ctx = connector_to_dpi(connector); in exynos_dpi_detect() local 48 if (ctx->panel && !ctx->panel->connector) in exynos_dpi_detect() 49 drm_panel_attach(ctx->panel, &ctx->connector); in exynos_dpi_detect() 72 struct exynos_dpi *ctx = connector_to_dpi(connector); in exynos_dpi_get_modes() local 75 if (ctx->vm) { in exynos_dpi_get_modes() 83 drm_display_mode_from_videomode(ctx->vm, mode); in exynos_dpi_get_modes() 89 if (ctx->panel) in exynos_dpi_get_modes() 90 return ctx->panel->funcs->get_modes(ctx->panel); in exynos_dpi_get_modes() 98 struct exynos_dpi *ctx = connector_to_dpi(connector); in exynos_dpi_best_encoder() local 100 return &ctx->encoder; in exynos_dpi_best_encoder() [all …]
|
D | exynos_mixer.c | 183 static void mixer_regs_dump(struct mixer_context *ctx) in mixer_regs_dump() argument 188 (u32)readl(ctx->mixer_res.mixer_regs + reg_id)); \ in mixer_regs_dump() 215 static void vp_regs_dump(struct mixer_context *ctx) in vp_regs_dump() argument 220 (u32) readl(ctx->mixer_res.vp_regs + reg_id)); \ in vp_regs_dump() 272 static void mixer_vsync_set_update(struct mixer_context *ctx, bool enable) in mixer_vsync_set_update() argument 274 struct mixer_resources *res = &ctx->mixer_res; in mixer_vsync_set_update() 280 if (ctx->vp_enabled) in mixer_vsync_set_update() 285 static void mixer_cfg_scan(struct mixer_context *ctx, unsigned int height) in mixer_cfg_scan() argument 287 struct mixer_resources *res = &ctx->mixer_res; in mixer_cfg_scan() 291 val = (ctx->interlace ? MXR_CFG_SCAN_INTERLACE : in mixer_cfg_scan() [all …]
|
D | exynos_drm_gsc.c | 83 #define gsc_read(offset) readl(ctx->regs + (offset)) 84 #define gsc_write(cfg, offset) writel(cfg, ctx->regs + (offset)) 398 static int gsc_sw_reset(struct gsc_context *ctx) in gsc_sw_reset() argument 438 static void gsc_set_gscblk_fimd_wb(struct gsc_context *ctx, bool enable) in gsc_set_gscblk_fimd_wb() argument 445 gscblk_cfg |= GSC_BLK_DISP1WB_DEST(ctx->id) | in gsc_set_gscblk_fimd_wb() 446 GSC_BLK_GSCL_WB_IN_SRC_SEL(ctx->id) | in gsc_set_gscblk_fimd_wb() 447 GSC_BLK_SW_RESET_WB_DEST(ctx->id); in gsc_set_gscblk_fimd_wb() 449 gscblk_cfg |= GSC_BLK_PXLASYNC_LO_MASK_WB(ctx->id); in gsc_set_gscblk_fimd_wb() 454 static void gsc_handle_irq(struct gsc_context *ctx, bool enable, in gsc_handle_irq() argument 486 struct gsc_context *ctx = get_gsc_context(dev); in gsc_src_set_fmt() local [all …]
|
/linux-4.4.14/crypto/asymmetric_keys/ |
D | x509_cert_parser.c | 71 struct x509_parse_context *ctx; in x509_cert_parse() local 82 ctx = kzalloc(sizeof(struct x509_parse_context), GFP_KERNEL); in x509_cert_parse() 83 if (!ctx) in x509_cert_parse() 86 ctx->cert = cert; in x509_cert_parse() 87 ctx->data = (unsigned long)data; in x509_cert_parse() 90 ret = asn1_ber_decoder(&x509_decoder, ctx, data, datalen); in x509_cert_parse() 95 if (ctx->raw_akid) { in x509_cert_parse() 97 ctx->raw_akid_size, ctx->raw_akid_size, ctx->raw_akid); in x509_cert_parse() 98 ret = asn1_ber_decoder(&x509_akid_decoder, ctx, in x509_cert_parse() 99 ctx->raw_akid, ctx->raw_akid_size); in x509_cert_parse() [all …]
|
D | pkcs7_parser.c | 115 struct pkcs7_parse_context *ctx; in pkcs7_parse_message() local 119 ctx = kzalloc(sizeof(struct pkcs7_parse_context), GFP_KERNEL); in pkcs7_parse_message() 120 if (!ctx) in pkcs7_parse_message() 122 ctx->msg = kzalloc(sizeof(struct pkcs7_message), GFP_KERNEL); in pkcs7_parse_message() 123 if (!ctx->msg) in pkcs7_parse_message() 125 ctx->sinfo = kzalloc(sizeof(struct pkcs7_signed_info), GFP_KERNEL); in pkcs7_parse_message() 126 if (!ctx->sinfo) in pkcs7_parse_message() 129 ctx->data = (unsigned long)data; in pkcs7_parse_message() 130 ctx->ppcerts = &ctx->certs; in pkcs7_parse_message() 131 ctx->ppsinfo = &ctx->msg->signed_infos; in pkcs7_parse_message() [all …]
|
D | verify_pefile.c | 27 struct pefile_context *ctx) in pefile_parse_binary() argument 64 ctx->image_checksum_offset = in pefile_parse_binary() 66 ctx->header_size = pe32->header_size; in pefile_parse_binary() 68 ctx->n_data_dirents = pe32->data_dirs; in pefile_parse_binary() 73 ctx->image_checksum_offset = in pefile_parse_binary() 75 ctx->header_size = pe64->header_size; in pefile_parse_binary() 77 ctx->n_data_dirents = pe64->data_dirs; in pefile_parse_binary() 85 pr_debug("checksum @ %x\n", ctx->image_checksum_offset); in pefile_parse_binary() 86 pr_debug("header size = %x\n", ctx->header_size); in pefile_parse_binary() 88 if (cursor >= ctx->header_size || ctx->header_size >= datalen) in pefile_parse_binary() [all …]
|
D | mscode_parser.c | 24 int mscode_parse(struct pefile_context *ctx) in mscode_parse() argument 30 ret = pkcs7_get_content_data(ctx->pkcs7, &content_data, &data_len, 1); in mscode_parse() 40 return asn1_ber_decoder(&mscode_decoder, ctx, content_data, data_len); in mscode_parse() 82 struct pefile_context *ctx = context; in mscode_note_digest_algo() local 89 ctx->digest_algo = HASH_ALGO_MD4; in mscode_note_digest_algo() 92 ctx->digest_algo = HASH_ALGO_MD5; in mscode_note_digest_algo() 95 ctx->digest_algo = HASH_ALGO_SHA1; in mscode_note_digest_algo() 98 ctx->digest_algo = HASH_ALGO_SHA256; in mscode_note_digest_algo() 101 ctx->digest_algo = HASH_ALGO_SHA384; in mscode_note_digest_algo() 104 ctx->digest_algo = HASH_ALGO_SHA512; in mscode_note_digest_algo() [all …]
|
/linux-4.4.14/drivers/media/platform/coda/ |
D | coda-bit.c | 43 static void coda_free_bitstream_buffer(struct coda_ctx *ctx); 66 static void coda_command_async(struct coda_ctx *ctx, int cmd) in coda_command_async() argument 68 struct coda_dev *dev = ctx->dev; in coda_command_async() 73 coda_write(dev, ctx->bit_stream_param, in coda_command_async() 75 coda_write(dev, ctx->frm_dis_flg, in coda_command_async() 76 CODA_REG_BIT_FRM_DIS_FLG(ctx->reg_idx)); in coda_command_async() 77 coda_write(dev, ctx->frame_mem_ctrl, in coda_command_async() 79 coda_write(dev, ctx->workbuf.paddr, CODA_REG_BIT_WORK_BUF_ADDR); in coda_command_async() 89 coda_write(dev, ctx->idx, CODA_REG_BIT_RUN_INDEX); in coda_command_async() 90 coda_write(dev, ctx->params.codec_mode, CODA_REG_BIT_RUN_COD_STD); in coda_command_async() [all …]
|
D | trace.h | 13 TP_PROTO(struct coda_ctx *ctx, int cmd), 15 TP_ARGS(ctx, cmd), 19 __field(int, ctx) 24 __entry->minor = ctx->fh.vdev->minor; 25 __entry->ctx = ctx->idx; 30 __entry->minor, __entry->ctx, __entry->cmd) 34 TP_PROTO(struct coda_ctx *ctx), 36 TP_ARGS(ctx), 40 __field(int, ctx) 44 __entry->minor = ctx->fh.vdev->minor; [all …]
|
D | coda-common.c | 86 void coda_write_base(struct coda_ctx *ctx, struct coda_q_data *q_data, in coda_write_base() argument 109 coda_write(ctx->dev, base_y, reg_y); in coda_write_base() 110 coda_write(ctx->dev, base_cb, reg_y + 4); in coda_write_base() 111 coda_write(ctx->dev, base_cr, reg_y + 8); in coda_write_base() 330 struct coda_ctx *ctx = fh_to_ctx(priv); in coda_querycap() local 333 strlcpy(cap->card, coda_product_name(ctx->dev->devtype->product), in coda_querycap() 368 struct coda_ctx *ctx = fh_to_ctx(priv); in coda_g_fmt() local 370 q_data = get_q_data(ctx, f->type); in coda_g_fmt() 384 f->fmt.pix.colorspace = ctx->colorspace; in coda_g_fmt() 389 static int coda_try_pixelformat(struct coda_ctx *ctx, struct v4l2_format *f) in coda_try_pixelformat() argument [all …]
|
D | coda-jpeg.c | 151 int coda_jpeg_write_tables(struct coda_ctx *ctx) in coda_jpeg_write_tables() argument 165 { 512, ctx->params.jpeg_qmat_tab[0], 64 }, in coda_jpeg_write_tables() 166 { 576, ctx->params.jpeg_qmat_tab[1], 64 }, in coda_jpeg_write_tables() 167 { 640, ctx->params.jpeg_qmat_tab[1], 64 }, in coda_jpeg_write_tables() 172 coda_memcpy_parabuf(ctx->parabuf.vaddr, huff + i); in coda_jpeg_write_tables() 176 coda_memcpy_parabuf(ctx->parabuf.vaddr, qmat + i); in coda_jpeg_write_tables() 181 bool coda_jpeg_check_buffer(struct coda_ctx *ctx, struct vb2_v4l2_buffer *vb) in coda_jpeg_check_buffer() argument 210 void coda_set_jpeg_compression_quality(struct coda_ctx *ctx, int quality) in coda_set_jpeg_compression_quality() argument 214 ctx->params.jpeg_quality = quality; in coda_set_jpeg_compression_quality() 231 if (ctx->params.jpeg_qmat_tab[0]) { in coda_set_jpeg_compression_quality() [all …]
|
/linux-4.4.14/drivers/media/platform/s5p-mfc/ |
D | s5p_mfc_opr_v5.c | 37 static int s5p_mfc_alloc_dec_temp_buffers_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_dec_temp_buffers_v5() argument 39 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_dec_temp_buffers_v5() 43 ctx->dsc.size = buf_size->dsc; in s5p_mfc_alloc_dec_temp_buffers_v5() 44 ret = s5p_mfc_alloc_priv_buf(dev->mem_dev_l, dev->bank1, &ctx->dsc); in s5p_mfc_alloc_dec_temp_buffers_v5() 50 BUG_ON(ctx->dsc.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1)); in s5p_mfc_alloc_dec_temp_buffers_v5() 51 memset(ctx->dsc.virt, 0, ctx->dsc.size); in s5p_mfc_alloc_dec_temp_buffers_v5() 58 static void s5p_mfc_release_dec_desc_buffer_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_release_dec_desc_buffer_v5() argument 60 s5p_mfc_release_priv_buf(ctx->dev->mem_dev_l, &ctx->dsc); in s5p_mfc_release_dec_desc_buffer_v5() 64 static int s5p_mfc_alloc_codec_buffers_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_codec_buffers_v5() argument 66 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_codec_buffers_v5() [all …]
|
D | s5p_mfc_dec.c | 224 static int s5p_mfc_ctx_ready(struct s5p_mfc_ctx *ctx) in s5p_mfc_ctx_ready() argument 227 if (ctx->src_queue_cnt >= 1 && ctx->state == MFCINST_GOT_INST) in s5p_mfc_ctx_ready() 230 if (ctx->src_queue_cnt >= 1 && in s5p_mfc_ctx_ready() 231 ctx->state == MFCINST_RUNNING && in s5p_mfc_ctx_ready() 232 ctx->dst_queue_cnt >= ctx->pb_count) in s5p_mfc_ctx_ready() 235 if (ctx->state == MFCINST_FINISHING && in s5p_mfc_ctx_ready() 236 ctx->dst_queue_cnt >= ctx->pb_count) in s5p_mfc_ctx_ready() 239 if (ctx->src_queue_cnt >= 1 && in s5p_mfc_ctx_ready() 240 ctx->state == MFCINST_HEAD_PARSED && in s5p_mfc_ctx_ready() 241 ctx->capture_state == QUEUE_BUFS_MMAPED) in s5p_mfc_ctx_ready() [all …]
|
D | s5p_mfc_opr_v6.c | 49 static int s5p_mfc_alloc_dec_temp_buffers_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_dec_temp_buffers_v6() argument 57 static void s5p_mfc_release_dec_desc_buffer_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_release_dec_desc_buffer_v6() argument 63 static int s5p_mfc_alloc_codec_buffers_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_codec_buffers_v6() argument 65 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_codec_buffers_v6() 69 mb_width = MB_WIDTH(ctx->img_width); in s5p_mfc_alloc_codec_buffers_v6() 70 mb_height = MB_HEIGHT(ctx->img_height); in s5p_mfc_alloc_codec_buffers_v6() 72 if (ctx->type == MFCINST_DECODER) { in s5p_mfc_alloc_codec_buffers_v6() 74 ctx->luma_size, ctx->chroma_size, ctx->mv_size); in s5p_mfc_alloc_codec_buffers_v6() 75 mfc_debug(2, "Totals bufs: %d\n", ctx->total_dpb_count); in s5p_mfc_alloc_codec_buffers_v6() 76 } else if (ctx->type == MFCINST_ENCODER) { in s5p_mfc_alloc_codec_buffers_v6() [all …]
|
D | s5p_mfc.c | 47 void clear_work_bit(struct s5p_mfc_ctx *ctx) in clear_work_bit() argument 49 struct s5p_mfc_dev *dev = ctx->dev; in clear_work_bit() 52 __clear_bit(ctx->num, &dev->ctx_work_bits); in clear_work_bit() 57 void set_work_bit(struct s5p_mfc_ctx *ctx) in set_work_bit() argument 59 struct s5p_mfc_dev *dev = ctx->dev; in set_work_bit() 62 __set_bit(ctx->num, &dev->ctx_work_bits); in set_work_bit() 67 void clear_work_bit_irqsave(struct s5p_mfc_ctx *ctx) in clear_work_bit_irqsave() argument 69 struct s5p_mfc_dev *dev = ctx->dev; in clear_work_bit_irqsave() 73 __clear_bit(ctx->num, &dev->ctx_work_bits); in clear_work_bit_irqsave() 78 void set_work_bit_irqsave(struct s5p_mfc_ctx *ctx) in set_work_bit_irqsave() argument [all …]
|
D | s5p_mfc_enc.c | 727 static int s5p_mfc_ctx_ready(struct s5p_mfc_ctx *ctx) in s5p_mfc_ctx_ready() argument 730 ctx->src_queue_cnt, ctx->dst_queue_cnt, ctx->state); in s5p_mfc_ctx_ready() 732 if (ctx->state == MFCINST_GOT_INST && ctx->dst_queue_cnt >= 1) in s5p_mfc_ctx_ready() 735 if ((ctx->state == MFCINST_RUNNING || in s5p_mfc_ctx_ready() 736 ctx->state == MFCINST_HEAD_PRODUCED) && in s5p_mfc_ctx_ready() 737 ctx->src_queue_cnt >= 1 && ctx->dst_queue_cnt >= 1) in s5p_mfc_ctx_ready() 740 if (ctx->state == MFCINST_FINISHING && in s5p_mfc_ctx_ready() 741 ctx->dst_queue_cnt >= 1) in s5p_mfc_ctx_ready() 747 static void cleanup_ref_queue(struct s5p_mfc_ctx *ctx) in cleanup_ref_queue() argument 752 while (!list_empty(&ctx->ref_queue)) { in cleanup_ref_queue() [all …]
|
D | s5p_mfc_intr.c | 54 int s5p_mfc_wait_for_done_ctx(struct s5p_mfc_ctx *ctx, in s5p_mfc_wait_for_done_ctx() argument 60 ret = wait_event_interruptible_timeout(ctx->queue, in s5p_mfc_wait_for_done_ctx() 61 (ctx->int_cond && (ctx->int_type == command in s5p_mfc_wait_for_done_ctx() 62 || ctx->int_type == S5P_MFC_R2H_CMD_ERR_RET)), in s5p_mfc_wait_for_done_ctx() 65 ret = wait_event_timeout(ctx->queue, in s5p_mfc_wait_for_done_ctx() 66 (ctx->int_cond && (ctx->int_type == command in s5p_mfc_wait_for_done_ctx() 67 || ctx->int_type == S5P_MFC_R2H_CMD_ERR_RET)), in s5p_mfc_wait_for_done_ctx() 72 ctx->int_type, command); in s5p_mfc_wait_for_done_ctx() 79 ctx->int_type, command); in s5p_mfc_wait_for_done_ctx() 80 if (ctx->int_type == S5P_MFC_R2H_CMD_ERR_RET) in s5p_mfc_wait_for_done_ctx() [all …]
|
D | s5p_mfc_cmd_v5.c | 75 static int s5p_mfc_open_inst_cmd_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_open_inst_cmd_v5() argument 77 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_open_inst_cmd_v5() 82 mfc_debug(2, "Getting instance number (codec: %d)\n", ctx->codec_mode); in s5p_mfc_open_inst_cmd_v5() 83 dev->curr_ctx = ctx->num; in s5p_mfc_open_inst_cmd_v5() 85 switch (ctx->codec_mode) { in s5p_mfc_open_inst_cmd_v5() 117 h2r_args.arg[2] = ctx->ctx.ofs; in s5p_mfc_open_inst_cmd_v5() 118 h2r_args.arg[3] = ctx->ctx.size; in s5p_mfc_open_inst_cmd_v5() 123 ctx->state = MFCINST_ERROR; in s5p_mfc_open_inst_cmd_v5() 128 static int s5p_mfc_close_inst_cmd_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_close_inst_cmd_v5() argument 130 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_close_inst_cmd_v5() [all …]
|
/linux-4.4.14/drivers/gpu/drm/ |
D | drm_modeset_lock.c | 69 struct drm_modeset_acquire_ctx *ctx; in drm_modeset_lock_all() local 72 ctx = kzalloc(sizeof(*ctx), GFP_KERNEL); in drm_modeset_lock_all() 73 if (WARN_ON(!ctx)) in drm_modeset_lock_all() 78 drm_modeset_acquire_init(ctx, 0); in drm_modeset_lock_all() 81 ret = drm_modeset_lock(&config->connection_mutex, ctx); in drm_modeset_lock_all() 84 ret = drm_modeset_lock_all_crtcs(dev, ctx); in drm_modeset_lock_all() 93 config->acquire_ctx = ctx; in drm_modeset_lock_all() 101 drm_modeset_backoff(ctx); in drm_modeset_lock_all() 105 kfree(ctx); in drm_modeset_lock_all() 118 struct drm_modeset_acquire_ctx *ctx = config->acquire_ctx; in drm_modeset_unlock_all() local [all …]
|
D | drm_context.c | 328 struct drm_ctx ctx; in drm_legacy_resctx() local 336 memset(&ctx, 0, sizeof(ctx)); in drm_legacy_resctx() 338 ctx.handle = i; in drm_legacy_resctx() 339 if (copy_to_user(&res->contexts[i], &ctx, sizeof(ctx))) in drm_legacy_resctx() 363 struct drm_ctx *ctx = data; in drm_legacy_addctx() local 369 ctx->handle = drm_legacy_ctxbitmap_next(dev); in drm_legacy_addctx() 370 if (ctx->handle == DRM_KERNEL_CONTEXT) { in drm_legacy_addctx() 372 ctx->handle = drm_legacy_ctxbitmap_next(dev); in drm_legacy_addctx() 374 DRM_DEBUG("%d\n", ctx->handle); in drm_legacy_addctx() 375 if (ctx->handle == -1) { in drm_legacy_addctx() [all …]
|
/linux-4.4.14/drivers/hwmon/ |
D | pwm-fan.c | 41 static int __set_pwm(struct pwm_fan_ctx *ctx, unsigned long pwm) in __set_pwm() argument 46 mutex_lock(&ctx->lock); in __set_pwm() 47 if (ctx->pwm_value == pwm) in __set_pwm() 50 duty = DIV_ROUND_UP(pwm * (ctx->pwm->period - 1), MAX_PWM); in __set_pwm() 51 ret = pwm_config(ctx->pwm, duty, ctx->pwm->period); in __set_pwm() 56 pwm_disable(ctx->pwm); in __set_pwm() 58 if (ctx->pwm_value == 0) { in __set_pwm() 59 ret = pwm_enable(ctx->pwm); in __set_pwm() 64 ctx->pwm_value = pwm; in __set_pwm() 66 mutex_unlock(&ctx->lock); in __set_pwm() [all …]
|
/linux-4.4.14/arch/arm64/net/ |
D | bpf_jit_comp.c | 68 static inline void emit(const u32 insn, struct jit_ctx *ctx) in emit() argument 70 if (ctx->image != NULL) in emit() 71 ctx->image[ctx->idx] = cpu_to_le32(insn); in emit() 73 ctx->idx++; in emit() 77 struct jit_ctx *ctx) in emit_a64_mov_i64() argument 82 emit(A64_MOVZ(1, reg, tmp & 0xffff, shift), ctx); in emit_a64_mov_i64() 87 emit(A64_MOVK(1, reg, tmp & 0xffff, shift), ctx); in emit_a64_mov_i64() 94 const s32 val, struct jit_ctx *ctx) in emit_a64_mov_i() argument 101 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx); in emit_a64_mov_i() 103 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx); in emit_a64_mov_i() [all …]
|
/linux-4.4.14/drivers/char/hw_random/ |
D | xgene-rng.c | 105 struct xgene_rng_dev *ctx = (struct xgene_rng_dev *) arg; in xgene_rng_expired_timer() local 108 disable_irq(ctx->irq); in xgene_rng_expired_timer() 109 ctx->failure_cnt = 0; in xgene_rng_expired_timer() 110 del_timer(&ctx->failure_timer); in xgene_rng_expired_timer() 111 enable_irq(ctx->irq); in xgene_rng_expired_timer() 114 static void xgene_rng_start_timer(struct xgene_rng_dev *ctx) in xgene_rng_start_timer() argument 116 ctx->failure_timer.data = (unsigned long) ctx; in xgene_rng_start_timer() 117 ctx->failure_timer.function = xgene_rng_expired_timer; in xgene_rng_start_timer() 118 ctx->failure_timer.expires = jiffies + 120 * HZ; in xgene_rng_start_timer() 119 add_timer(&ctx->failure_timer); in xgene_rng_start_timer() [all …]
|
/linux-4.4.14/drivers/crypto/ |
D | atmel-sha.c | 158 static size_t atmel_sha_append_sg(struct atmel_sha_reqctx *ctx) in atmel_sha_append_sg() argument 162 while ((ctx->bufcnt < ctx->buflen) && ctx->total) { in atmel_sha_append_sg() 163 count = min(ctx->sg->length - ctx->offset, ctx->total); in atmel_sha_append_sg() 164 count = min(count, ctx->buflen - ctx->bufcnt); in atmel_sha_append_sg() 173 if ((ctx->sg->length == 0) && !sg_is_last(ctx->sg)) { in atmel_sha_append_sg() 174 ctx->sg = sg_next(ctx->sg); in atmel_sha_append_sg() 181 scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, ctx->sg, in atmel_sha_append_sg() 182 ctx->offset, count, 0); in atmel_sha_append_sg() 184 ctx->bufcnt += count; in atmel_sha_append_sg() 185 ctx->offset += count; in atmel_sha_append_sg() [all …]
|
D | bfin_crc.c | 132 struct bfin_crypto_crc_reqctx *ctx = ahash_request_ctx(req); in bfin_crypto_crc_init() local 135 dev_dbg(ctx->crc->dev, "crc_init\n"); in bfin_crypto_crc_init() 144 dev_dbg(ctx->crc->dev, "init: requested sg list is too big > %d\n", in bfin_crypto_crc_init() 149 ctx->crc = crc; in bfin_crypto_crc_init() 150 ctx->bufnext_len = 0; in bfin_crypto_crc_init() 151 ctx->buflast_len = 0; in bfin_crypto_crc_init() 152 ctx->sg_buflen = 0; in bfin_crypto_crc_init() 153 ctx->total = 0; in bfin_crypto_crc_init() 154 ctx->flag = 0; in bfin_crypto_crc_init() 159 dev_dbg(ctx->crc->dev, "init: digest size: %d\n", in bfin_crypto_crc_init() [all …]
|
D | img-hash.c | 165 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_start() local 168 if (ctx->flags & DRIVER_FLAGS_MD5) in img_hash_start() 170 else if (ctx->flags & DRIVER_FLAGS_SHA1) in img_hash_start() 172 else if (ctx->flags & DRIVER_FLAGS_SHA224) in img_hash_start() 174 else if (ctx->flags & DRIVER_FLAGS_SHA256) in img_hash_start() 211 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_dma_callback() local 213 if (ctx->bufcnt) { in img_hash_dma_callback() 214 img_hash_xmit_cpu(hdev, ctx->buffer, ctx->bufcnt, 0); in img_hash_dma_callback() 215 ctx->bufcnt = 0; in img_hash_dma_callback() 217 if (ctx->sg) in img_hash_dma_callback() [all …]
|
D | omap-sham.c | 281 struct omap_sham_reqctx *ctx = ahash_request_ctx(req); in omap_sham_copy_hash_omap2() local 282 struct omap_sham_dev *dd = ctx->dd; in omap_sham_copy_hash_omap2() 283 u32 *hash = (u32 *)ctx->digest; in omap_sham_copy_hash_omap2() 296 struct omap_sham_reqctx *ctx = ahash_request_ctx(req); in omap_sham_copy_hash_omap4() local 297 struct omap_sham_dev *dd = ctx->dd; in omap_sham_copy_hash_omap4() 300 if (ctx->flags & BIT(FLAGS_HMAC)) { in omap_sham_copy_hash_omap4() 321 struct omap_sham_reqctx *ctx = ahash_request_ctx(req); in omap_sham_copy_ready_hash() local 322 u32 *in = (u32 *)ctx->digest; in omap_sham_copy_ready_hash() 329 switch (ctx->flags & FLAGS_MODE_MASK) { in omap_sham_copy_ready_hash() 335 if (test_bit(FLAGS_BE32_SHA1, &ctx->dd->flags)) in omap_sham_copy_ready_hash() [all …]
|
D | padlock-aes.c | 84 static inline struct aes_ctx *aes_ctx_common(void *ctx) in aes_ctx_common() argument 86 unsigned long addr = (unsigned long)ctx; in aes_ctx_common() 107 struct aes_ctx *ctx = aes_ctx(tfm); in aes_set_key() local 123 ctx->D = ctx->E; in aes_set_key() 125 ctx->E[0] = le32_to_cpu(key[0]); in aes_set_key() 126 ctx->E[1] = le32_to_cpu(key[1]); in aes_set_key() 127 ctx->E[2] = le32_to_cpu(key[2]); in aes_set_key() 128 ctx->E[3] = le32_to_cpu(key[3]); in aes_set_key() 131 memset(&ctx->cword, 0, sizeof(ctx->cword)); in aes_set_key() 133 ctx->cword.decrypt.encdec = 1; in aes_set_key() [all …]
|
D | mv_cesa.c | 174 static void compute_aes_dec_key(struct mv_ctx *ctx) in compute_aes_dec_key() argument 179 if (!ctx->need_calc_aes_dkey) in compute_aes_dec_key() 182 crypto_aes_expand_key(&gen_aes_key, ctx->aes_enc_key, ctx->key_len); in compute_aes_dec_key() 184 key_pos = ctx->key_len + 24; in compute_aes_dec_key() 185 memcpy(ctx->aes_dec_key, &gen_aes_key.key_enc[key_pos], 4 * 4); in compute_aes_dec_key() 186 switch (ctx->key_len) { in compute_aes_dec_key() 192 memcpy(&ctx->aes_dec_key[4], &gen_aes_key.key_enc[key_pos], in compute_aes_dec_key() 196 ctx->need_calc_aes_dkey = 0; in compute_aes_dec_key() 203 struct mv_ctx *ctx = crypto_tfm_ctx(tfm); in mv_setkey_aes() local 214 ctx->key_len = len; in mv_setkey_aes() [all …]
|
/linux-4.4.14/drivers/net/wireless/iwlwifi/dvm/ |
D | rxon.c | 39 struct iwl_rxon_context *ctx) in iwl_connection_init_rx_config() argument 41 memset(&ctx->staging, 0, sizeof(ctx->staging)); in iwl_connection_init_rx_config() 43 if (!ctx->vif) { in iwl_connection_init_rx_config() 44 ctx->staging.dev_type = ctx->unused_devtype; in iwl_connection_init_rx_config() 46 switch (ctx->vif->type) { in iwl_connection_init_rx_config() 48 ctx->staging.dev_type = ctx->ap_devtype; in iwl_connection_init_rx_config() 52 ctx->staging.dev_type = ctx->station_devtype; in iwl_connection_init_rx_config() 53 ctx->staging.filter_flags = RXON_FILTER_ACCEPT_GRP_MSK; in iwl_connection_init_rx_config() 57 ctx->staging.dev_type = ctx->ibss_devtype; in iwl_connection_init_rx_config() 58 ctx->staging.flags = RXON_FLG_SHORT_PREAMBLE_MSK; in iwl_connection_init_rx_config() [all …]
|
/linux-4.4.14/drivers/base/regmap/ |
D | regmap-mmio.c | 92 struct regmap_mmio_context *ctx = context; in regmap_mmio_gather_write() local 98 if (!IS_ERR(ctx->clk)) { in regmap_mmio_gather_write() 99 ret = clk_enable(ctx->clk); in regmap_mmio_gather_write() 107 switch (ctx->val_bytes) { in regmap_mmio_gather_write() 109 writeb(*(u8 *)val, ctx->regs + offset); in regmap_mmio_gather_write() 112 writew(*(u16 *)val, ctx->regs + offset); in regmap_mmio_gather_write() 115 writel(*(u32 *)val, ctx->regs + offset); in regmap_mmio_gather_write() 119 writeq(*(u64 *)val, ctx->regs + offset); in regmap_mmio_gather_write() 126 val_size -= ctx->val_bytes; in regmap_mmio_gather_write() 127 val += ctx->val_bytes; in regmap_mmio_gather_write() [all …]
|
/linux-4.4.14/drivers/media/platform/exynos-gsc/ |
D | gsc-m2m.c | 31 static int gsc_m2m_ctx_stop_req(struct gsc_ctx *ctx) in gsc_m2m_ctx_stop_req() argument 34 struct gsc_dev *gsc = ctx->gsc_dev; in gsc_m2m_ctx_stop_req() 38 if (!gsc_m2m_pending(gsc) || (curr_ctx != ctx)) in gsc_m2m_ctx_stop_req() 41 gsc_ctx_state_lock_set(GSC_CTX_STOP_REQ, ctx); in gsc_m2m_ctx_stop_req() 43 !gsc_ctx_state_is_set(GSC_CTX_STOP_REQ, ctx), in gsc_m2m_ctx_stop_req() 49 static void __gsc_m2m_job_abort(struct gsc_ctx *ctx) in __gsc_m2m_job_abort() argument 53 ret = gsc_m2m_ctx_stop_req(ctx); in __gsc_m2m_job_abort() 54 if ((ret == -ETIMEDOUT) || (ctx->state & GSC_CTX_ABORT)) { in __gsc_m2m_job_abort() 55 gsc_ctx_state_lock_clear(GSC_CTX_STOP_REQ | GSC_CTX_ABORT, ctx); in __gsc_m2m_job_abort() 56 gsc_m2m_job_finish(ctx, VB2_BUF_STATE_ERROR); in __gsc_m2m_job_abort() [all …]
|
D | gsc-core.h | 221 struct gsc_ctx *ctx; member 388 void gsc_m2m_job_finish(struct gsc_ctx *ctx, int vb_state); 394 int gsc_try_fmt_mplane(struct gsc_ctx *ctx, struct v4l2_format *f); 396 int gsc_g_fmt_mplane(struct gsc_ctx *ctx, struct v4l2_format *f); 398 int gsc_g_crop(struct gsc_ctx *ctx, struct v4l2_crop *cr); 399 int gsc_try_crop(struct gsc_ctx *ctx, struct v4l2_crop *cr); 408 int gsc_set_scaler_info(struct gsc_ctx *ctx); 409 int gsc_ctrls_create(struct gsc_ctx *ctx); 410 void gsc_ctrls_delete(struct gsc_ctx *ctx); 411 int gsc_prepare_addr(struct gsc_ctx *ctx, struct vb2_buffer *vb, [all …]
|
D | gsc-regs.c | 111 void gsc_hw_set_input_path(struct gsc_ctx *ctx) in gsc_hw_set_input_path() argument 113 struct gsc_dev *dev = ctx->gsc_dev; in gsc_hw_set_input_path() 118 if (ctx->in_path == GSC_DMA) in gsc_hw_set_input_path() 124 void gsc_hw_set_in_size(struct gsc_ctx *ctx) in gsc_hw_set_in_size() argument 126 struct gsc_dev *dev = ctx->gsc_dev; in gsc_hw_set_in_size() 127 struct gsc_frame *frame = &ctx->s_frame; in gsc_hw_set_in_size() 146 void gsc_hw_set_in_image_rgb(struct gsc_ctx *ctx) in gsc_hw_set_in_image_rgb() argument 148 struct gsc_dev *dev = ctx->gsc_dev; in gsc_hw_set_in_image_rgb() 149 struct gsc_frame *frame = &ctx->s_frame; in gsc_hw_set_in_image_rgb() 166 void gsc_hw_set_in_image_format(struct gsc_ctx *ctx) in gsc_hw_set_in_image_format() argument [all …]
|
/linux-4.4.14/drivers/gpu/drm/radeon/ |
D | atom.c | 56 struct atom_context *ctx; member 66 static int atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params); 67 int atom_execute_table(struct atom_context *ctx, int index, uint32_t * params); 102 static uint32_t atom_iio_execute(struct atom_context *ctx, int base, in atom_iio_execute() argument 105 struct radeon_device *rdev = ctx->card->dev->dev_private; in atom_iio_execute() 114 temp = ctx->card->ioreg_read(ctx->card, CU16(base + 1)); in atom_iio_execute() 119 (void)ctx->card->ioreg_read(ctx->card, CU16(base + 1)); in atom_iio_execute() 120 ctx->card->ioreg_write(ctx->card, CU16(base + 1), temp); in atom_iio_execute() 160 ((ctx-> in atom_iio_execute() 177 static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr, in atom_get_src_int() argument [all …]
|
/linux-4.4.14/net/sunrpc/auth_gss/ |
D | gss_krb5_mech.c | 220 struct krb5_ctx *ctx, struct crypto_blkcipher **res) in get_key() argument 248 *res = crypto_alloc_blkcipher(ctx->gk5e->encrypt_name, 0, in get_key() 252 "crypto algorithm %s\n", ctx->gk5e->encrypt_name); in get_key() 258 "crypto algorithm %s\n", ctx->gk5e->encrypt_name); in get_key() 275 gss_import_v1_context(const void *p, const void *end, struct krb5_ctx *ctx) in gss_import_v1_context() argument 279 p = simple_get_bytes(p, end, &ctx->initiate, sizeof(ctx->initiate)); in gss_import_v1_context() 284 ctx->enctype = ENCTYPE_DES_CBC_RAW; in gss_import_v1_context() 286 ctx->gk5e = get_gss_krb5_enctype(ctx->enctype); in gss_import_v1_context() 287 if (ctx->gk5e == NULL) { in gss_import_v1_context() 315 p = simple_get_bytes(p, end, &ctx->endtime, sizeof(ctx->endtime)); in gss_import_v1_context() [all …]
|
D | gss_krb5_seal.c | 74 setup_token(struct krb5_ctx *ctx, struct xdr_netobj *token) in setup_token() argument 78 int body_size = GSS_KRB5_TOK_HDR_LEN + ctx->gk5e->cksumlength; in setup_token() 80 token->len = g_token_size(&ctx->mech_used, body_size); in setup_token() 83 g_make_token_header(&ctx->mech_used, body_size, (unsigned char **)&ptr); in setup_token() 92 *ptr++ = (__force u16)cpu_to_le16(ctx->gk5e->signalg); in setup_token() 100 setup_token_v2(struct krb5_ctx *ctx, struct xdr_netobj *token) in setup_token_v2() argument 106 if ((ctx->flags & KRB5_CTX_FLAG_INITIATOR) == 0) in setup_token_v2() 108 if (ctx->flags & KRB5_CTX_FLAG_ACCEPTOR_SUBKEY) in setup_token_v2() 123 token->len = GSS_KRB5_TOK_HDR_LEN + ctx->gk5e->cksumlength; in setup_token_v2() 128 gss_get_mic_v1(struct krb5_ctx *ctx, struct xdr_buf *text, in gss_get_mic_v1() argument [all …]
|
D | gss_krb5_unseal.c | 74 gss_verify_mic_v1(struct krb5_ctx *ctx, in gss_verify_mic_v1() argument 91 if (g_verify_token_header(&ctx->mech_used, &bodysize, &ptr, in gss_verify_mic_v1() 102 if (signalg != ctx->gk5e->signalg) in gss_verify_mic_v1() 112 if (ctx->gk5e->keyed_cksum) in gss_verify_mic_v1() 113 cksumkey = ctx->cksum; in gss_verify_mic_v1() 117 if (make_checksum(ctx, ptr, 8, message_buffer, 0, in gss_verify_mic_v1() 122 ctx->gk5e->cksumlength)) in gss_verify_mic_v1() 129 if (now > ctx->endtime) in gss_verify_mic_v1() 134 if (krb5_get_seq_num(ctx, ptr + GSS_KRB5_TOK_HDR_LEN, ptr + 8, in gss_verify_mic_v1() 138 if ((ctx->initiate && direction != 0xff) || in gss_verify_mic_v1() [all …]
|
/linux-4.4.14/arch/mips/pci/ |
D | pci-alchemy.c | 91 static void alchemy_pci_wired_entry(struct alchemy_pci_context *ctx) in alchemy_pci_wired_entry() argument 93 ctx->wired_entry = read_c0_wired(); in alchemy_pci_wired_entry() 94 add_wired_entry(0, 0, (unsigned long)ctx->pci_cfg_vm->addr, PM_4K); in alchemy_pci_wired_entry() 95 ctx->last_elo0 = ctx->last_elo1 = ~0; in alchemy_pci_wired_entry() 101 struct alchemy_pci_context *ctx = bus->sysdata; in config_access() local 113 r = __raw_readl(ctx->regs + PCI_REG_STATCMD) & 0x0000ffff; in config_access() 115 __raw_writel(r, ctx->regs + PCI_REG_STATCMD); in config_access() 121 if (ctx->board_pci_idsel(device, 1) == 0) { in config_access() 146 if ((entryLo0 != ctx->last_elo0) || (entryLo1 != ctx->last_elo1)) { in config_access() 147 mod_wired_entry(ctx->wired_entry, entryLo0, entryLo1, in config_access() [all …]
|
/linux-4.4.14/drivers/net/usb/ |
D | cdc_ncm.c | 65 static void cdc_ncm_tx_timeout_start(struct cdc_ncm_ctx *ctx); 107 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_get_ethtool_stats() local 112 p = (char *)ctx + cdc_ncm_gstrings_stats[i].stat_offset; in cdc_ncm_get_ethtool_stats() 149 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_check_rx_max() local 154 max = min_t(u32, CDC_NCM_NTB_MAX_SIZE_RX, le32_to_cpu(ctx->ncm_parm.dwNtbInMaxSize)); in cdc_ncm_check_rx_max() 159 le32_to_cpu(ctx->ncm_parm.dwNtbInMaxSize), min); in cdc_ncm_check_rx_max() 172 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_check_tx_max() local 176 min = ctx->max_datagram_size + ctx->max_ndp_size + sizeof(struct usb_cdc_ncm_nth16); in cdc_ncm_check_tx_max() 177 max = min_t(u32, CDC_NCM_NTB_MAX_SIZE_TX, le32_to_cpu(ctx->ncm_parm.dwNtbOutMaxSize)); in cdc_ncm_check_tx_max() 192 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_show_min_tx_pkt() local [all …]
|
/linux-4.4.14/crypto/ |
D | ansi_cprng.c | 86 static int _get_more_prng_bytes(struct prng_context *ctx, int cont_test) in _get_more_prng_bytes() argument 94 ctx); in _get_more_prng_bytes() 96 hexdump("Input DT: ", ctx->DT, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 97 hexdump("Input I: ", ctx->I, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 98 hexdump("Input V: ", ctx->V, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 111 memcpy(tmp, ctx->DT, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 112 output = ctx->I; in _get_more_prng_bytes() 122 xor_vectors(ctx->I, ctx->V, tmp, DEFAULT_BLK_SZ); in _get_more_prng_bytes() 124 output = ctx->rand_data; in _get_more_prng_bytes() 131 if (!memcmp(ctx->rand_data, ctx->last_rand_data, in _get_more_prng_bytes() [all …]
|
D | algif_aead.c | 60 struct aead_ctx *ctx = ask->private; in aead_sndbuf() local 63 ctx->used, 0); in aead_sndbuf() 71 static inline bool aead_sufficient_data(struct aead_ctx *ctx) in aead_sufficient_data() argument 73 unsigned as = crypto_aead_authsize(crypto_aead_reqtfm(&ctx->aead_req)); in aead_sufficient_data() 75 return ctx->used >= ctx->aead_assoclen + as; in aead_sufficient_data() 81 struct aead_ctx *ctx = ask->private; in aead_put_sgl() local 82 struct aead_sg_list *sgl = &ctx->tsgl; in aead_put_sgl() 95 ctx->used = 0; in aead_put_sgl() 96 ctx->more = 0; in aead_put_sgl() 97 ctx->merge = 0; in aead_put_sgl() [all …]
|
D | aes_generic.c | 1150 t ^= ctx->key_enc[4 * i]; \ 1151 ctx->key_enc[4 * i + 4] = t; \ 1152 t ^= ctx->key_enc[4 * i + 1]; \ 1153 ctx->key_enc[4 * i + 5] = t; \ 1154 t ^= ctx->key_enc[4 * i + 2]; \ 1155 ctx->key_enc[4 * i + 6] = t; \ 1156 t ^= ctx->key_enc[4 * i + 3]; \ 1157 ctx->key_enc[4 * i + 7] = t; \ 1163 t ^= ctx->key_enc[6 * i]; \ 1164 ctx->key_enc[6 * i + 6] = t; \ [all …]
|
D | algif_hash.c | 48 struct hash_ctx *ctx = ask->private; in hash_sendmsg() local 56 if (!ctx->more) { in hash_sendmsg() 57 err = af_alg_wait_for_completion(crypto_ahash_init(&ctx->req), in hash_sendmsg() 58 &ctx->completion); in hash_sendmsg() 63 ctx->more = 0; in hash_sendmsg() 71 len = af_alg_make_sg(&ctx->sgl, &msg->msg_iter, len); in hash_sendmsg() 77 ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, NULL, len); in hash_sendmsg() 79 err = af_alg_wait_for_completion(crypto_ahash_update(&ctx->req), in hash_sendmsg() 80 &ctx->completion); in hash_sendmsg() 81 af_alg_free_sg(&ctx->sgl); in hash_sendmsg() [all …]
|
D | chainiv.c | 51 struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); in chainiv_givencrypt() local 64 spin_lock_bh(&ctx->lock); in chainiv_givencrypt() 68 memcpy(req->giv, ctx->iv, ivsize); in chainiv_givencrypt() 69 memcpy(subreq->info, ctx->iv, ivsize); in chainiv_givencrypt() 75 memcpy(ctx->iv, subreq->info, ivsize); in chainiv_givencrypt() 78 spin_unlock_bh(&ctx->lock); in chainiv_givencrypt() 102 struct chainiv_ctx *ctx = crypto_tfm_ctx(tfm); in chainiv_init() local 105 spin_lock_init(&ctx->lock); in chainiv_init() 110 iv = ctx->iv; in chainiv_init() 116 static int async_chainiv_schedule_work(struct async_chainiv_ctx *ctx) in async_chainiv_schedule_work() argument [all …]
|
D | xcbc.c | 42 u8 ctx[]; member 58 u8 ctx[]; member 65 struct xcbc_tfm_ctx *ctx = crypto_shash_ctx(parent); in crypto_xcbc_digest_setkey() local 67 u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); in crypto_xcbc_digest_setkey() 71 if ((err = crypto_cipher_setkey(ctx->child, inkey, keylen))) in crypto_xcbc_digest_setkey() 74 crypto_cipher_encrypt_one(ctx->child, consts, (u8 *)ks + bs); in crypto_xcbc_digest_setkey() 75 crypto_cipher_encrypt_one(ctx->child, consts + bs, (u8 *)ks + bs * 2); in crypto_xcbc_digest_setkey() 76 crypto_cipher_encrypt_one(ctx->child, key1, (u8 *)ks); in crypto_xcbc_digest_setkey() 78 return crypto_cipher_setkey(ctx->child, key1, bs); in crypto_xcbc_digest_setkey() 85 struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); in crypto_xcbc_digest_init() local [all …]
|
D | cmac.c | 35 u8 ctx[]; member 51 u8 ctx[]; member 58 struct cmac_tfm_ctx *ctx = crypto_shash_ctx(parent); in crypto_cmac_digest_setkey() local 60 __be64 *consts = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); in crypto_cmac_digest_setkey() 65 err = crypto_cipher_setkey(ctx->child, inkey, keylen); in crypto_cmac_digest_setkey() 71 crypto_cipher_encrypt_one(ctx->child, (u8 *)consts, (u8 *)consts); in crypto_cmac_digest_setkey() 111 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); in crypto_cmac_digest_init() local 113 u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs; in crypto_cmac_digest_init() 115 ctx->len = 0; in crypto_cmac_digest_init() 127 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); in crypto_cmac_digest_update() local [all …]
|
D | algif_skcipher.c | 109 struct skcipher_ctx *ctx = ask->private; in skcipher_sndbuf() local 112 ctx->used, 0); in skcipher_sndbuf() 123 struct skcipher_ctx *ctx = ask->private; in skcipher_alloc_sgl() local 127 sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); in skcipher_alloc_sgl() 128 if (!list_empty(&ctx->tsgl)) in skcipher_alloc_sgl() 144 list_add_tail(&sgl->list, &ctx->tsgl); in skcipher_alloc_sgl() 153 struct skcipher_ctx *ctx = ask->private; in skcipher_pull_sgl() local 158 while (!list_empty(&ctx->tsgl)) { in skcipher_pull_sgl() 159 sgl = list_first_entry(&ctx->tsgl, struct skcipher_sg_list, in skcipher_pull_sgl() 173 ctx->used -= plen; in skcipher_pull_sgl() [all …]
|
D | salsa20_generic.c | 107 static void salsa20_keysetup(struct salsa20_ctx *ctx, const u8 *k, u32 kbytes) in salsa20_keysetup() argument 111 ctx->input[1] = U8TO32_LITTLE(k + 0); in salsa20_keysetup() 112 ctx->input[2] = U8TO32_LITTLE(k + 4); in salsa20_keysetup() 113 ctx->input[3] = U8TO32_LITTLE(k + 8); in salsa20_keysetup() 114 ctx->input[4] = U8TO32_LITTLE(k + 12); in salsa20_keysetup() 121 ctx->input[11] = U8TO32_LITTLE(k + 0); in salsa20_keysetup() 122 ctx->input[12] = U8TO32_LITTLE(k + 4); in salsa20_keysetup() 123 ctx->input[13] = U8TO32_LITTLE(k + 8); in salsa20_keysetup() 124 ctx->input[14] = U8TO32_LITTLE(k + 12); in salsa20_keysetup() 125 ctx->input[0] = U8TO32_LITTLE(constants + 0); in salsa20_keysetup() [all …]
|
D | vmac.c | 321 static void vhash_abort(struct vmac_ctx *ctx) in vhash_abort() argument 323 ctx->polytmp[0] = ctx->polykey[0] ; in vhash_abort() 324 ctx->polytmp[1] = ctx->polykey[1] ; in vhash_abort() 325 ctx->first_block_processed = 0; in vhash_abort() 369 struct vmac_ctx *ctx) in vhash_update() argument 372 const u64 *kptr = (u64 *)ctx->nhkey; in vhash_update() 375 u64 pkh = ctx->polykey[0]; in vhash_update() 376 u64 pkl = ctx->polykey[1]; in vhash_update() 386 ch = ctx->polytmp[0]; in vhash_update() 387 cl = ctx->polytmp[1]; in vhash_update() [all …]
|
D | tea.c | 50 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); in tea_setkey() local 53 ctx->KEY[0] = le32_to_cpu(key[0]); in tea_setkey() 54 ctx->KEY[1] = le32_to_cpu(key[1]); in tea_setkey() 55 ctx->KEY[2] = le32_to_cpu(key[2]); in tea_setkey() 56 ctx->KEY[3] = le32_to_cpu(key[3]); in tea_setkey() 66 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); in tea_encrypt() local 73 k0 = ctx->KEY[0]; in tea_encrypt() 74 k1 = ctx->KEY[1]; in tea_encrypt() 75 k2 = ctx->KEY[2]; in tea_encrypt() 76 k3 = ctx->KEY[3]; in tea_encrypt() [all …]
|
D | ghash-generic.c | 47 struct ghash_ctx *ctx = crypto_shash_ctx(tfm); in ghash_setkey() local 54 if (ctx->gf128) in ghash_setkey() 55 gf128mul_free_4k(ctx->gf128); in ghash_setkey() 56 ctx->gf128 = gf128mul_init_4k_lle((be128 *)key); in ghash_setkey() 57 if (!ctx->gf128) in ghash_setkey() 67 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); in ghash_update() local 70 if (!ctx->gf128) in ghash_update() 84 gf128mul_4k_lle((be128 *)dst, ctx->gf128); in ghash_update() 89 gf128mul_4k_lle((be128 *)dst, ctx->gf128); in ghash_update() 103 static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx) in ghash_flush() argument [all …]
|
/linux-4.4.14/drivers/crypto/amcc/ |
D | crypto4xx_alg.c | 75 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); in crypto4xx_encrypt() local 77 ctx->direction = DIR_OUTBOUND; in crypto4xx_encrypt() 78 ctx->hash_final = 0; in crypto4xx_encrypt() 79 ctx->is_hash = 0; in crypto4xx_encrypt() 80 ctx->pd_ctl = 0x1; in crypto4xx_encrypt() 82 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, in crypto4xx_encrypt() 84 get_dynamic_sa_iv_size(ctx)); in crypto4xx_encrypt() 89 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); in crypto4xx_decrypt() local 91 ctx->direction = DIR_INBOUND; in crypto4xx_decrypt() 92 ctx->hash_final = 0; in crypto4xx_decrypt() [all …]
|
/linux-4.4.14/drivers/crypto/qat/qat_common/ |
D | qat_asym_algs.c | 113 struct qat_rsa_ctx *ctx; member 121 struct device *dev = &GET_DEV(req->ctx->inst->accel_dev); in qat_rsa_cb() 128 dma_free_coherent(dev, req->ctx->key_sz, req->src_align, in qat_rsa_cb() 131 dma_unmap_single(dev, req->in.enc.m, req->ctx->key_sz, in qat_rsa_cb() 134 areq->dst_len = req->ctx->key_sz; in qat_rsa_cb() 143 if (areq->dst_len != req->ctx->key_sz) in qat_rsa_cb() 149 dma_free_coherent(dev, req->ctx->key_sz, req->dst_align, in qat_rsa_cb() 162 dma_unmap_single(dev, req->out.enc.c, req->ctx->key_sz, in qat_rsa_cb() 243 struct qat_rsa_ctx *ctx = akcipher_tfm_ctx(tfm); in qat_rsa_enc() local 244 struct qat_crypto_instance *inst = ctx->inst; in qat_rsa_enc() [all …]
|
D | qat_algs.c | 145 struct qat_alg_aead_ctx *ctx, in qat_alg_do_precomputes() argument 149 SHASH_DESC_ON_STACK(shash, ctx->hash_tfm); in qat_alg_do_precomputes() 153 int block_size = crypto_shash_blocksize(ctx->hash_tfm); in qat_alg_do_precomputes() 154 int digest_size = crypto_shash_digestsize(ctx->hash_tfm); in qat_alg_do_precomputes() 163 shash->tfm = ctx->hash_tfm; in qat_alg_do_precomputes() 194 switch (ctx->qat_hash_alg) { in qat_alg_do_precomputes() 223 offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8); in qat_alg_do_precomputes() 227 switch (ctx->qat_hash_alg) { in qat_alg_do_precomputes() 277 struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(aead_tfm); in qat_alg_aead_init_enc_session() local 279 struct qat_enc *enc_ctx = &ctx->enc_cd->qat_enc_cd; in qat_alg_aead_init_enc_session() [all …]
|
D | qat_hal.c | 151 unsigned char ae, unsigned char ctx, in qat_hal_get_wakeup_event() argument 157 qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx); in qat_hal_get_wakeup_event() 328 unsigned int ctx, cur_ctx; in qat_hal_wr_indr_csr() local 332 for (ctx = 0; ctx < ICP_QAT_UCLO_MAX_CTX; ctx++) { in qat_hal_wr_indr_csr() 333 if (!(ctx_mask & (1 << ctx))) in qat_hal_wr_indr_csr() 335 qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx); in qat_hal_wr_indr_csr() 343 unsigned char ae, unsigned char ctx, in qat_hal_rd_indr_csr() argument 349 qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx); in qat_hal_rd_indr_csr() 358 unsigned int ctx, cur_ctx; in qat_hal_put_sig_event() local 361 for (ctx = 0; ctx < ICP_QAT_UCLO_MAX_CTX; ctx++) { in qat_hal_put_sig_event() [all …]
|
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/ |
D | atom.c | 56 struct atom_context *ctx; member 66 static int amdgpu_atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params); 67 int amdgpu_atom_execute_table(struct atom_context *ctx, int index, uint32_t * params); 102 static uint32_t atom_iio_execute(struct atom_context *ctx, int base, in atom_iio_execute() argument 113 temp = ctx->card->ioreg_read(ctx->card, CU16(base + 1)); in atom_iio_execute() 117 ctx->card->ioreg_write(ctx->card, CU16(base + 1), temp); in atom_iio_execute() 157 ((ctx-> in atom_iio_execute() 174 static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr, in atom_get_src_int() argument 178 struct atom_context *gctx = ctx->ctx; in atom_get_src_int() 222 val = get_unaligned_le32((u32 *)&ctx->ps[idx]); in atom_get_src_int() [all …]
|
D | amdgpu_ctx.c | 29 struct amdgpu_ctx *ctx) in amdgpu_ctx_init() argument 34 memset(ctx, 0, sizeof(*ctx)); in amdgpu_ctx_init() 35 ctx->adev = adev; in amdgpu_ctx_init() 36 kref_init(&ctx->refcount); in amdgpu_ctx_init() 37 spin_lock_init(&ctx->ring_lock); in amdgpu_ctx_init() 39 ctx->rings[i].sequence = 1; in amdgpu_ctx_init() 50 &ctx->rings[i].entity, in amdgpu_ctx_init() 59 &ctx->rings[j].entity); in amdgpu_ctx_init() 60 kfree(ctx); in amdgpu_ctx_init() 67 void amdgpu_ctx_fini(struct amdgpu_ctx *ctx) in amdgpu_ctx_fini() argument [all …]
|
D | amdgpu_uvd.c | 335 static int amdgpu_uvd_cs_pass1(struct amdgpu_uvd_cs_ctx *ctx) in amdgpu_uvd_cs_pass1() argument 343 lo = amdgpu_get_ib_value(ctx->parser, ctx->ib_idx, ctx->data0); in amdgpu_uvd_cs_pass1() 344 hi = amdgpu_get_ib_value(ctx->parser, ctx->ib_idx, ctx->data1); in amdgpu_uvd_cs_pass1() 347 mapping = amdgpu_cs_find_mapping(ctx->parser, addr, &bo); in amdgpu_uvd_cs_pass1() 353 if (!ctx->parser->adev->uvd.address_64_bit) { in amdgpu_uvd_cs_pass1() 355 cmd = amdgpu_get_ib_value(ctx->parser, ctx->ib_idx, ctx->idx) >> 1; in amdgpu_uvd_cs_pass1() 519 static int amdgpu_uvd_cs_msg(struct amdgpu_uvd_cs_ctx *ctx, in amdgpu_uvd_cs_msg() argument 522 struct amdgpu_device *adev = ctx->parser->adev; in amdgpu_uvd_cs_msg() 569 adev->uvd.filp[i] = ctx->parser->filp; in amdgpu_uvd_cs_msg() 579 r = amdgpu_uvd_cs_msg_decode(msg, ctx->buf_sizes); in amdgpu_uvd_cs_msg() [all …]
|
/linux-4.4.14/drivers/media/platform/ti-vpe/ |
D | vpe.c | 409 static struct vpe_q_data *get_q_data(struct vpe_ctx *ctx, in get_q_data() argument 415 return &ctx->q_data[Q_DATA_SRC]; in get_q_data() 418 return &ctx->q_data[Q_DATA_DST]; in get_q_data() 494 #define GET_OFFSET_TOP(ctx, obj, reg) \ argument 495 ((obj)->res->start - ctx->dev->res->start + reg) 497 #define VPE_SET_MMR_ADB_HDR(ctx, hdr, regs, offset_a) \ argument 498 VPDMA_SET_MMR_ADB_HDR(ctx->mmr_adb, vpe_mmr_adb, hdr, regs, offset_a) 502 static void init_adb_hdrs(struct vpe_ctx *ctx) in init_adb_hdrs() argument 504 VPE_SET_MMR_ADB_HDR(ctx, out_fmt_hdr, out_fmt_reg, VPE_CLK_FORMAT_SELECT); in init_adb_hdrs() 505 VPE_SET_MMR_ADB_HDR(ctx, us1_hdr, us1_regs, VPE_US1_R0); in init_adb_hdrs() [all …]
|
/linux-4.4.14/drivers/infiniband/core/ |
D | ucma.c | 104 struct ucma_context *ctx; member 114 struct ucma_context *ctx; member 129 struct ucma_context *ctx; in _ucma_find_context() local 131 ctx = idr_find(&ctx_idr, id); in _ucma_find_context() 132 if (!ctx) in _ucma_find_context() 133 ctx = ERR_PTR(-ENOENT); in _ucma_find_context() 134 else if (ctx->file != file) in _ucma_find_context() 135 ctx = ERR_PTR(-EINVAL); in _ucma_find_context() 136 return ctx; in _ucma_find_context() 141 struct ucma_context *ctx; in ucma_get_ctx() local [all …]
|
D | ucm.c | 92 struct ib_ucm_context *ctx; member 127 struct ib_ucm_context *ctx; in ib_ucm_ctx_get() local 130 ctx = idr_find(&ctx_id_table, id); in ib_ucm_ctx_get() 131 if (!ctx) in ib_ucm_ctx_get() 132 ctx = ERR_PTR(-ENOENT); in ib_ucm_ctx_get() 133 else if (ctx->file != file) in ib_ucm_ctx_get() 134 ctx = ERR_PTR(-EINVAL); in ib_ucm_ctx_get() 136 atomic_inc(&ctx->ref); in ib_ucm_ctx_get() 139 return ctx; in ib_ucm_ctx_get() 142 static void ib_ucm_ctx_put(struct ib_ucm_context *ctx) in ib_ucm_ctx_put() argument [all …]
|
/linux-4.4.14/arch/ia64/kernel/ |
D | perfmon.c | 99 #define PMC_OVFL_NOTIFY(ctx, i) ((ctx)->ctx_pmds[i].flags & PFM_REGFL_OVFL_NOTIFY) argument 126 #define CTX_USED_PMD(ctx, mask) (ctx)->ctx_used_pmds[0] |= (mask) argument 127 #define CTX_IS_USED_PMD(ctx, c) (((ctx)->ctx_used_pmds[0] & (1UL << (c))) != 0UL) argument 129 #define CTX_USED_MONITOR(ctx, mask) (ctx)->ctx_used_monitors[0] |= (mask) argument 131 #define CTX_USED_IBR(ctx,n) (ctx)->ctx_used_ibrs[(n)>>6] |= 1UL<< ((n) % 64) argument 132 #define CTX_USED_DBR(ctx,n) (ctx)->ctx_used_dbrs[(n)>>6] |= 1UL<< ((n) % 64) argument 133 #define CTX_USES_DBREGS(ctx) (((pfm_context_t *)(ctx))->ctx_fl_using_dbreg==1) argument 348 #define SET_LAST_CPU(ctx, v) (ctx)->ctx_last_cpu = (v) argument 349 #define GET_LAST_CPU(ctx) (ctx)->ctx_last_cpu argument 351 #define SET_LAST_CPU(ctx, v) do {} while(0) argument [all …]
|
/linux-4.4.14/drivers/mmc/core/ |
D | slot-gpio.c | 47 struct mmc_gpio *ctx = devm_kzalloc(host->parent, in mmc_gpio_alloc() local 48 sizeof(*ctx) + 2 * len, GFP_KERNEL); in mmc_gpio_alloc() 50 if (ctx) { in mmc_gpio_alloc() 51 ctx->ro_label = ctx->cd_label + len; in mmc_gpio_alloc() 52 snprintf(ctx->cd_label, len, "%s cd", dev_name(host->parent)); in mmc_gpio_alloc() 53 snprintf(ctx->ro_label, len, "%s ro", dev_name(host->parent)); in mmc_gpio_alloc() 54 host->slot.handler_priv = ctx; in mmc_gpio_alloc() 58 return ctx ? 0 : -ENOMEM; in mmc_gpio_alloc() 63 struct mmc_gpio *ctx = host->slot.handler_priv; in mmc_gpio_get_ro() local 65 if (!ctx || !ctx->ro_gpio) in mmc_gpio_get_ro() [all …]
|
/linux-4.4.14/include/linux/ |
D | ww_mutex.h | 47 struct ww_acquire_ctx *ctx; member 89 lock->ctx = NULL; in ww_mutex_init() 119 static inline void ww_acquire_init(struct ww_acquire_ctx *ctx, in ww_acquire_init() argument 122 ctx->task = current; in ww_acquire_init() 123 ctx->stamp = atomic_long_inc_return(&ww_class->stamp); in ww_acquire_init() 124 ctx->acquired = 0; in ww_acquire_init() 126 ctx->ww_class = ww_class; in ww_acquire_init() 127 ctx->done_acquire = 0; in ww_acquire_init() 128 ctx->contending_lock = NULL; in ww_acquire_init() 131 debug_check_no_locks_freed((void *)ctx, sizeof(*ctx)); in ww_acquire_init() [all …]
|
/linux-4.4.14/sound/soc/au1x/ |
D | ac97c.c | 71 static inline unsigned long RD(struct au1xpsc_audio_data *ctx, int reg) in RD() argument 73 return __raw_readl(ctx->mmio + reg); in RD() 76 static inline void WR(struct au1xpsc_audio_data *ctx, int reg, unsigned long v) in WR() argument 78 __raw_writel(v, ctx->mmio + reg); in WR() 85 struct au1xpsc_audio_data *ctx = ac97_to_ctx(ac97); in au1xac97c_ac97_read() local 92 mutex_lock(&ctx->lock); in au1xac97c_ac97_read() 95 while ((RD(ctx, AC97_STATUS) & STAT_CP) && tmo--) in au1xac97c_ac97_read() 102 WR(ctx, AC97_CMDRESP, CMD_IDX(r) | CMD_READ); in au1xac97c_ac97_read() 108 while ((RD(ctx, AC97_STATUS) & STAT_CP) && tmo--) in au1xac97c_ac97_read() 110 data = RD(ctx, AC97_CMDRESP); in au1xac97c_ac97_read() [all …]
|
D | i2sc.c | 69 static inline unsigned long RD(struct au1xpsc_audio_data *ctx, int reg) in RD() argument 71 return __raw_readl(ctx->mmio + reg); in RD() 74 static inline void WR(struct au1xpsc_audio_data *ctx, int reg, unsigned long v) in WR() argument 76 __raw_writel(v, ctx->mmio + reg); in WR() 82 struct au1xpsc_audio_data *ctx = snd_soc_dai_get_drvdata(cpu_dai); in au1xi2s_set_fmt() local 87 c = ctx->cfg; in au1xi2s_set_fmt() 130 ctx->cfg = c; in au1xi2s_set_fmt() 138 struct au1xpsc_audio_data *ctx = snd_soc_dai_get_drvdata(dai); in au1xi2s_trigger() local 145 WR(ctx, I2S_ENABLE, EN_D | EN_CE); in au1xi2s_trigger() 146 WR(ctx, I2S_ENABLE, EN_CE); in au1xi2s_trigger() [all …]
|
/linux-4.4.14/drivers/i2c/busses/ |
D | i2c-xgene-slimpro.c | 117 struct slimpro_i2c_dev *ctx = to_slimpro_i2c_dev(cl); in slimpro_i2c_rx_cb() local 125 if (ctx->resp_msg) in slimpro_i2c_rx_cb() 126 *ctx->resp_msg = ((u32 *)mssg)[1]; in slimpro_i2c_rx_cb() 128 if (ctx->mbox_client.tx_block) in slimpro_i2c_rx_cb() 129 complete(&ctx->rd_complete); in slimpro_i2c_rx_cb() 132 static int start_i2c_msg_xfer(struct slimpro_i2c_dev *ctx) in start_i2c_msg_xfer() argument 134 if (ctx->mbox_client.tx_block) { in start_i2c_msg_xfer() 135 if (!wait_for_completion_timeout(&ctx->rd_complete, in start_i2c_msg_xfer() 141 if (*ctx->resp_msg == 0xffffffff) in start_i2c_msg_xfer() 147 static int slimpro_i2c_rd(struct slimpro_i2c_dev *ctx, u32 chip, in slimpro_i2c_rd() argument [all …]
|
/linux-4.4.14/drivers/media/platform/exynos4-is/ |
D | fimc-m2m.c | 43 void fimc_m2m_job_finish(struct fimc_ctx *ctx, int vb_state) in fimc_m2m_job_finish() argument 47 if (!ctx || !ctx->fh.m2m_ctx) in fimc_m2m_job_finish() 50 src_vb = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx); in fimc_m2m_job_finish() 51 dst_vb = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx); in fimc_m2m_job_finish() 56 v4l2_m2m_job_finish(ctx->fimc_dev->m2m.m2m_dev, in fimc_m2m_job_finish() 57 ctx->fh.m2m_ctx); in fimc_m2m_job_finish() 62 static int fimc_m2m_shutdown(struct fimc_ctx *ctx) in fimc_m2m_shutdown() argument 64 struct fimc_dev *fimc = ctx->fimc_dev; in fimc_m2m_shutdown() 70 fimc_ctx_state_set(FIMC_CTX_SHUT, ctx); in fimc_m2m_shutdown() 73 !fimc_ctx_state_is_set(FIMC_CTX_SHUT, ctx), in fimc_m2m_shutdown() [all …]
|
D | fimc-reg.c | 44 static u32 fimc_hw_get_in_flip(struct fimc_ctx *ctx) in fimc_hw_get_in_flip() argument 48 if (ctx->hflip) in fimc_hw_get_in_flip() 50 if (ctx->vflip) in fimc_hw_get_in_flip() 53 if (ctx->rotation <= 90) in fimc_hw_get_in_flip() 59 static u32 fimc_hw_get_target_flip(struct fimc_ctx *ctx) in fimc_hw_get_target_flip() argument 63 if (ctx->hflip) in fimc_hw_get_target_flip() 65 if (ctx->vflip) in fimc_hw_get_target_flip() 68 if (ctx->rotation <= 90) in fimc_hw_get_target_flip() 74 void fimc_hw_set_rotation(struct fimc_ctx *ctx) in fimc_hw_set_rotation() argument 77 struct fimc_dev *dev = ctx->fimc_dev; in fimc_hw_set_rotation() [all …]
|
D | fimc-capture.c | 38 struct fimc_ctx *ctx = fimc->vid_cap.ctx; in fimc_capture_hw_init() local 42 if (ctx == NULL || ctx->s_frame.fmt == NULL) in fimc_capture_hw_init() 52 fimc_prepare_dma_offset(ctx, &ctx->d_frame); in fimc_capture_hw_init() 53 fimc_set_yuv_order(ctx); in fimc_capture_hw_init() 58 fimc_hw_set_camera_offset(fimc, &ctx->s_frame); in fimc_capture_hw_init() 60 ret = fimc_set_scaler_info(ctx); in fimc_capture_hw_init() 62 fimc_hw_set_input_path(ctx); in fimc_capture_hw_init() 63 fimc_hw_set_prescaler(ctx); in fimc_capture_hw_init() 64 fimc_hw_set_mainscaler(ctx); in fimc_capture_hw_init() 65 fimc_hw_set_target_format(ctx); in fimc_capture_hw_init() [all …]
|
/linux-4.4.14/fs/cifs/ |
D | asn1.c | 109 asn1_open(struct asn1_ctx *ctx, unsigned char *buf, unsigned int len) in asn1_open() argument 111 ctx->begin = buf; in asn1_open() 112 ctx->end = buf + len; in asn1_open() 113 ctx->pointer = buf; in asn1_open() 114 ctx->error = ASN1_ERR_NOERROR; in asn1_open() 118 asn1_octet_decode(struct asn1_ctx *ctx, unsigned char *ch) in asn1_octet_decode() argument 120 if (ctx->pointer >= ctx->end) { in asn1_octet_decode() 121 ctx->error = ASN1_ERR_DEC_EMPTY; in asn1_octet_decode() 124 *ch = *(ctx->pointer)++; in asn1_octet_decode() 130 asn1_enum_decode(struct asn1_ctx *ctx, __le32 *val) [all …]
|
/linux-4.4.14/drivers/media/platform/ |
D | m2m-deinterlace.c | 158 struct deinterlace_ctx *ctx = priv; in deinterlace_job_ready() local 159 struct deinterlace_dev *pcdev = ctx->dev; in deinterlace_job_ready() 161 if ((v4l2_m2m_num_src_bufs_ready(ctx->m2m_ctx) > 0) in deinterlace_job_ready() 162 && (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) > 0) in deinterlace_job_ready() 163 && (atomic_read(&ctx->dev->busy) == 0)) { in deinterlace_job_ready() 175 struct deinterlace_ctx *ctx = priv; in deinterlace_job_abort() local 176 struct deinterlace_dev *pcdev = ctx->dev; in deinterlace_job_abort() 178 ctx->aborting = 1; in deinterlace_job_abort() 182 v4l2_m2m_job_finish(pcdev->m2m_dev, ctx->m2m_ctx); in deinterlace_job_abort() 187 struct deinterlace_ctx *ctx = priv; in deinterlace_lock() local [all …]
|
D | vim2m.c | 184 static struct vim2m_q_data *get_q_data(struct vim2m_ctx *ctx, in get_q_data() argument 189 return &ctx->q_data[V4L2_M2M_SRC]; in get_q_data() 191 return &ctx->q_data[V4L2_M2M_DST]; in get_q_data() 199 static int device_process(struct vim2m_ctx *ctx, in device_process() argument 203 struct vim2m_dev *dev = ctx->dev; in device_process() 210 q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT); in device_process() 236 get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE)->sequence++; in device_process() 250 switch (ctx->mode) { in device_process() 348 struct vim2m_ctx *ctx = priv; in job_ready() local 350 if (v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx) < ctx->translen in job_ready() [all …]
|
D | rcar_jpu.c | 671 struct jpu_ctx *ctx = fh_to_ctx(priv); in jpu_querycap() local 673 if (ctx->encoder) in jpu_querycap() 680 dev_name(ctx->jpu->dev)); in jpu_querycap() 733 struct jpu_ctx *ctx = fh_to_ctx(priv); in jpu_enum_fmt_cap() local 735 return jpu_enum_fmt(f, ctx->encoder ? JPU_ENC_CAPTURE : in jpu_enum_fmt_cap() 742 struct jpu_ctx *ctx = fh_to_ctx(priv); in jpu_enum_fmt_out() local 744 return jpu_enum_fmt(f, ctx->encoder ? JPU_ENC_OUTPUT : JPU_DEC_OUTPUT); in jpu_enum_fmt_out() 747 static struct jpu_q_data *jpu_get_q_data(struct jpu_ctx *ctx, in jpu_get_q_data() argument 751 return &ctx->out_q; in jpu_get_q_data() 753 return &ctx->cap_q; in jpu_get_q_data() [all …]
|
D | mx2_emmaprp.c | 225 static struct emmaprp_q_data *get_q_data(struct emmaprp_ctx *ctx, in get_q_data() argument 230 return &(ctx->q_data[V4L2_M2M_SRC]); in get_q_data() 232 return &(ctx->q_data[V4L2_M2M_DST]); in get_q_data() 244 struct emmaprp_ctx *ctx = priv; in emmaprp_job_abort() local 245 struct emmaprp_dev *pcdev = ctx->dev; in emmaprp_job_abort() 247 ctx->aborting = 1; in emmaprp_job_abort() 251 v4l2_m2m_job_finish(pcdev->m2m_dev, ctx->m2m_ctx); in emmaprp_job_abort() 256 struct emmaprp_ctx *ctx = priv; in emmaprp_lock() local 257 struct emmaprp_dev *pcdev = ctx->dev; in emmaprp_lock() 263 struct emmaprp_ctx *ctx = priv; in emmaprp_unlock() local [all …]
|
/linux-4.4.14/drivers/phy/ |
D | phy-xgene.c | 605 static void cmu_wr(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type, in cmu_wr() argument 608 void __iomem *sds_base = ctx->sds_base; in cmu_wr() 622 static void cmu_rd(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type, in cmu_rd() argument 625 void __iomem *sds_base = ctx->sds_base; in cmu_rd() 636 static void cmu_toggle1to0(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type, in cmu_toggle1to0() argument 641 cmu_rd(ctx, cmu_type, reg, &val); in cmu_toggle1to0() 643 cmu_wr(ctx, cmu_type, reg, val); in cmu_toggle1to0() 644 cmu_rd(ctx, cmu_type, reg, &val); in cmu_toggle1to0() 646 cmu_wr(ctx, cmu_type, reg, val); in cmu_toggle1to0() 649 static void cmu_clrbits(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type, in cmu_clrbits() argument [all …]
|
/linux-4.4.14/fs/f2fs/ |
D | crypto.c | 81 void f2fs_release_crypto_ctx(struct f2fs_crypto_ctx *ctx) in f2fs_release_crypto_ctx() argument 85 if (ctx->flags & F2FS_WRITE_PATH_FL && ctx->w.bounce_page) { in f2fs_release_crypto_ctx() 86 mempool_free(ctx->w.bounce_page, f2fs_bounce_page_pool); in f2fs_release_crypto_ctx() 87 ctx->w.bounce_page = NULL; in f2fs_release_crypto_ctx() 89 ctx->w.control_page = NULL; in f2fs_release_crypto_ctx() 90 if (ctx->flags & F2FS_CTX_REQUIRES_FREE_ENCRYPT_FL) { in f2fs_release_crypto_ctx() 91 kmem_cache_free(f2fs_crypto_ctx_cachep, ctx); in f2fs_release_crypto_ctx() 94 list_add(&ctx->free_list, &f2fs_free_crypto_ctxs); in f2fs_release_crypto_ctx() 110 struct f2fs_crypto_ctx *ctx = NULL; in f2fs_get_crypto_ctx() local 128 ctx = list_first_entry_or_null(&f2fs_free_crypto_ctxs, in f2fs_get_crypto_ctx() [all …]
|
D | crypto_policy.c | 35 struct f2fs_encryption_context ctx; in f2fs_is_encryption_context_consistent_with_policy() local 37 F2FS_XATTR_NAME_ENCRYPTION_CONTEXT, &ctx, in f2fs_is_encryption_context_consistent_with_policy() 38 sizeof(ctx), NULL); in f2fs_is_encryption_context_consistent_with_policy() 40 if (res != sizeof(ctx)) in f2fs_is_encryption_context_consistent_with_policy() 43 return (memcmp(ctx.master_key_descriptor, policy->master_key_descriptor, in f2fs_is_encryption_context_consistent_with_policy() 45 (ctx.flags == policy->flags) && in f2fs_is_encryption_context_consistent_with_policy() 46 (ctx.contents_encryption_mode == in f2fs_is_encryption_context_consistent_with_policy() 48 (ctx.filenames_encryption_mode == in f2fs_is_encryption_context_consistent_with_policy() 55 struct f2fs_encryption_context ctx; in f2fs_create_encryption_context_from_policy() local 57 ctx.format = F2FS_ENCRYPTION_CONTEXT_FORMAT_V1; in f2fs_create_encryption_context_from_policy() [all …]
|
/linux-4.4.14/drivers/media/platform/sti/bdisp/ |
D | bdisp-v4l2.c | 120 static inline void bdisp_ctx_state_lock_set(u32 state, struct bdisp_ctx *ctx) in bdisp_ctx_state_lock_set() argument 124 spin_lock_irqsave(&ctx->bdisp_dev->slock, flags); in bdisp_ctx_state_lock_set() 125 ctx->state |= state; in bdisp_ctx_state_lock_set() 126 spin_unlock_irqrestore(&ctx->bdisp_dev->slock, flags); in bdisp_ctx_state_lock_set() 129 static inline void bdisp_ctx_state_lock_clear(u32 state, struct bdisp_ctx *ctx) in bdisp_ctx_state_lock_clear() argument 133 spin_lock_irqsave(&ctx->bdisp_dev->slock, flags); in bdisp_ctx_state_lock_clear() 134 ctx->state &= ~state; in bdisp_ctx_state_lock_clear() 135 spin_unlock_irqrestore(&ctx->bdisp_dev->slock, flags); in bdisp_ctx_state_lock_clear() 138 static inline bool bdisp_ctx_state_is_set(u32 mask, struct bdisp_ctx *ctx) in bdisp_ctx_state_is_set() argument 143 spin_lock_irqsave(&ctx->bdisp_dev->slock, flags); in bdisp_ctx_state_is_set() [all …]
|
D | bdisp-hw.c | 126 void bdisp_hw_free_nodes(struct bdisp_ctx *ctx) in bdisp_hw_free_nodes() argument 128 if (ctx && ctx->node[0]) { in bdisp_hw_free_nodes() 132 dma_free_attrs(ctx->bdisp_dev->dev, in bdisp_hw_free_nodes() 134 ctx->node[0], ctx->node_paddr[0], &attrs); in bdisp_hw_free_nodes() 147 int bdisp_hw_alloc_nodes(struct bdisp_ctx *ctx) in bdisp_hw_alloc_nodes() argument 149 struct device *dev = ctx->bdisp_dev->dev; in bdisp_hw_alloc_nodes() 167 ctx->node[i] = base; in bdisp_hw_alloc_nodes() 168 ctx->node_paddr[i] = paddr; in bdisp_hw_alloc_nodes() 169 dev_dbg(dev, "node[%d]=0x%p (paddr=%pad)\n", i, ctx->node[i], in bdisp_hw_alloc_nodes() 333 static int bdisp_hw_get_hv_inc(struct bdisp_ctx *ctx, u16 *h_inc, u16 *v_inc) in bdisp_hw_get_hv_inc() argument [all …]
|
/linux-4.4.14/fs/ext4/ |
D | crypto_policy.c | 33 struct ext4_encryption_context ctx; in ext4_is_encryption_context_consistent_with_policy() local 35 EXT4_XATTR_NAME_ENCRYPTION_CONTEXT, &ctx, in ext4_is_encryption_context_consistent_with_policy() 36 sizeof(ctx)); in ext4_is_encryption_context_consistent_with_policy() 37 if (res != sizeof(ctx)) in ext4_is_encryption_context_consistent_with_policy() 39 return (memcmp(ctx.master_key_descriptor, policy->master_key_descriptor, in ext4_is_encryption_context_consistent_with_policy() 41 (ctx.flags == in ext4_is_encryption_context_consistent_with_policy() 43 (ctx.contents_encryption_mode == in ext4_is_encryption_context_consistent_with_policy() 45 (ctx.filenames_encryption_mode == in ext4_is_encryption_context_consistent_with_policy() 52 struct ext4_encryption_context ctx; in ext4_create_encryption_context_from_policy() local 60 ctx.format = EXT4_ENCRYPTION_CONTEXT_FORMAT_V1; in ext4_create_encryption_context_from_policy() [all …]
|
D | crypto.c | 70 void ext4_release_crypto_ctx(struct ext4_crypto_ctx *ctx) in ext4_release_crypto_ctx() argument 74 if (ctx->flags & EXT4_WRITE_PATH_FL && ctx->w.bounce_page) in ext4_release_crypto_ctx() 75 mempool_free(ctx->w.bounce_page, ext4_bounce_page_pool); in ext4_release_crypto_ctx() 76 ctx->w.bounce_page = NULL; in ext4_release_crypto_ctx() 77 ctx->w.control_page = NULL; in ext4_release_crypto_ctx() 78 if (ctx->flags & EXT4_CTX_REQUIRES_FREE_ENCRYPT_FL) { in ext4_release_crypto_ctx() 79 kmem_cache_free(ext4_crypto_ctx_cachep, ctx); in ext4_release_crypto_ctx() 82 list_add(&ctx->free_list, &ext4_free_crypto_ctxs); in ext4_release_crypto_ctx() 98 struct ext4_crypto_ctx *ctx = NULL; in ext4_get_crypto_ctx() local 117 ctx = list_first_entry_or_null(&ext4_free_crypto_ctxs, in ext4_get_crypto_ctx() [all …]
|
/linux-4.4.14/drivers/crypto/caam/ |
D | caamrng.c | 88 static inline void rng_unmap_ctx(struct caam_rng_ctx *ctx) in rng_unmap_ctx() argument 90 struct device *jrdev = ctx->jrdev; in rng_unmap_ctx() 92 if (ctx->sh_desc_dma) in rng_unmap_ctx() 93 dma_unmap_single(jrdev, ctx->sh_desc_dma, in rng_unmap_ctx() 94 desc_bytes(ctx->sh_desc), DMA_TO_DEVICE); in rng_unmap_ctx() 95 rng_unmap_buf(jrdev, &ctx->bufs[0]); in rng_unmap_ctx() 96 rng_unmap_buf(jrdev, &ctx->bufs[1]); in rng_unmap_ctx() 121 static inline int submit_job(struct caam_rng_ctx *ctx, int to_current) in submit_job() argument 123 struct buf_data *bd = &ctx->bufs[!(to_current ^ ctx->current_buf)]; in submit_job() 124 struct device *jrdev = ctx->jrdev; in submit_job() [all …]
|
D | caamalg.c | 198 static void append_key_aead(u32 *desc, struct caam_ctx *ctx, in append_key_aead() argument 202 unsigned int enckeylen = ctx->enckeylen; in append_key_aead() 213 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len, in append_key_aead() 214 ctx->split_key_len, CLASS_2 | in append_key_aead() 216 append_key_as_imm(desc, (void *)ctx->key + in append_key_aead() 217 ctx->split_key_pad_len, enckeylen, in append_key_aead() 220 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 | in append_key_aead() 222 append_key(desc, ctx->key_dma + ctx->split_key_pad_len, in append_key_aead() 228 nonce = (u32 *)((void *)ctx->key + ctx->split_key_pad_len + in append_key_aead() 240 static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx, in init_sh_desc_key_aead() argument [all …]
|
D | caamhash.c | 235 static inline void append_key_ahash(u32 *desc, struct caam_hash_ctx *ctx) in append_key_ahash() argument 237 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len, in append_key_ahash() 238 ctx->split_key_len, CLASS_2 | in append_key_ahash() 243 static inline void init_sh_desc_key_ahash(u32 *desc, struct caam_hash_ctx *ctx) in init_sh_desc_key_ahash() argument 249 if (ctx->split_key_len) { in init_sh_desc_key_ahash() 254 append_key_ahash(desc, ctx); in init_sh_desc_key_ahash() 287 struct caam_hash_ctx *ctx) in ahash_ctx_data_to_out() argument 289 init_sh_desc_key_ahash(desc, ctx); in ahash_ctx_data_to_out() 293 LDST_CLASS_2_CCB | ctx->ctx_len); in ahash_ctx_data_to_out() 306 int digestsize, struct caam_hash_ctx *ctx) in ahash_data_to_out() argument [all …]
|
/linux-4.4.14/security/selinux/ |
D | xfrm.c | 58 static inline int selinux_authorizable_ctx(struct xfrm_sec_ctx *ctx) in selinux_authorizable_ctx() argument 60 return (ctx && in selinux_authorizable_ctx() 61 (ctx->ctx_doi == XFRM_SC_DOI_LSM) && in selinux_authorizable_ctx() 62 (ctx->ctx_alg == XFRM_SC_ALG_SELINUX)); in selinux_authorizable_ctx() 83 struct xfrm_sec_ctx *ctx = NULL; in selinux_xfrm_alloc_user() local 95 ctx = kmalloc(sizeof(*ctx) + str_len + 1, gfp); in selinux_xfrm_alloc_user() 96 if (!ctx) in selinux_xfrm_alloc_user() 99 ctx->ctx_doi = XFRM_SC_DOI_LSM; in selinux_xfrm_alloc_user() 100 ctx->ctx_alg = XFRM_SC_ALG_SELINUX; in selinux_xfrm_alloc_user() 101 ctx->ctx_len = str_len; in selinux_xfrm_alloc_user() [all …]
|
/linux-4.4.14/drivers/net/wireless/orinoco/ |
D | orinoco_usb.c | 287 static void ezusb_ctx_complete(struct request_context *ctx); 299 static void ezusb_request_context_put(struct request_context *ctx) in ezusb_request_context_put() argument 301 if (!atomic_dec_and_test(&ctx->refcount)) in ezusb_request_context_put() 304 WARN_ON(!ctx->done.done); in ezusb_request_context_put() 305 BUG_ON(ctx->outurb->status == -EINPROGRESS); in ezusb_request_context_put() 306 BUG_ON(timer_pending(&ctx->timer)); in ezusb_request_context_put() 307 usb_free_urb(ctx->outurb); in ezusb_request_context_put() 308 kfree(ctx->buf); in ezusb_request_context_put() 309 kfree(ctx); in ezusb_request_context_put() 323 struct request_context *ctx = (void *) _ctx; in ezusb_request_timerfn() local [all …]
|
/linux-4.4.14/drivers/gpu/drm/i915/ |
D | i915_gem_context.c | 136 static void i915_gem_context_clean(struct intel_context *ctx) in i915_gem_context_clean() argument 138 struct i915_hw_ppgtt *ppgtt = ctx->ppgtt; in i915_gem_context_clean() 153 struct intel_context *ctx = container_of(ctx_ref, typeof(*ctx), ref); in i915_gem_context_free() local 155 trace_i915_context_free(ctx); in i915_gem_context_free() 158 intel_lr_context_free(ctx); in i915_gem_context_free() 165 i915_gem_context_clean(ctx); in i915_gem_context_free() 167 i915_ppgtt_put(ctx->ppgtt); in i915_gem_context_free() 169 if (ctx->legacy_hw_ctx.rcs_state) in i915_gem_context_free() 170 drm_gem_object_unreference(&ctx->legacy_hw_ctx.rcs_state->base); in i915_gem_context_free() 171 list_del(&ctx->link); in i915_gem_context_free() [all …]
|
/linux-4.4.14/drivers/media/platform/s5p-jpeg/ |
D | jpeg-core.c | 552 struct s5p_jpeg_ctx *ctx) in s5p_jpeg_adjust_fourcc_to_subsampling() argument 556 if (ctx->subsampling != V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY) { in s5p_jpeg_adjust_fourcc_to_subsampling() 563 switch (ctx->subsampling) { in s5p_jpeg_adjust_fourcc_to_subsampling() 614 static int s5p_jpeg_to_user_subsampling(struct s5p_jpeg_ctx *ctx) in s5p_jpeg_to_user_subsampling() argument 616 WARN_ON(ctx->subsampling > 3); in s5p_jpeg_to_user_subsampling() 618 switch (ctx->jpeg->variant->version) { in s5p_jpeg_to_user_subsampling() 620 if (ctx->subsampling > 2) in s5p_jpeg_to_user_subsampling() 622 return ctx->subsampling; in s5p_jpeg_to_user_subsampling() 625 if (ctx->subsampling > 3) in s5p_jpeg_to_user_subsampling() 627 return exynos3250_decoded_subsampling[ctx->subsampling]; in s5p_jpeg_to_user_subsampling() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/mxm/ |
D | nv50.c | 39 struct context *ctx = info; in mxm_match_tmds_partner() local 44 desc.dig_conn == ctx->desc.dig_conn) in mxm_match_tmds_partner() 53 struct context *ctx = info; in mxm_match_dcb() local 56 mxms_output_device(mxm, data, &ctx->desc); in mxm_match_dcb() 59 if ((ctx->outp[0] & 0x0000000f) != ctx->desc.outp_type) in mxm_match_dcb() 68 u8 link = mxm_sor_map(bios, ctx->desc.dig_conn); in mxm_match_dcb() 69 if ((ctx->outp[0] & 0x0f000000) != (link & 0x0f) << 24) in mxm_match_dcb() 74 if ((link & ((ctx->outp[1] & 0x00000030) >> 4)) != link) in mxm_match_dcb() 84 if (ctx->desc.outp_type == 6 && ctx->desc.conn_type == 6 && in mxm_match_dcb() 85 mxms_foreach(mxm, 0x01, mxm_match_tmds_partner, ctx)) { in mxm_match_dcb() [all …]
|
/linux-4.4.14/net/mac80211/ |
D | chan.c | 13 struct ieee80211_chanctx *ctx) in ieee80211_chanctx_num_assigned() argument 20 list_for_each_entry(sdata, &ctx->assigned_vifs, assigned_chanctx_list) in ieee80211_chanctx_num_assigned() 27 struct ieee80211_chanctx *ctx) in ieee80211_chanctx_num_reserved() argument 34 list_for_each_entry(sdata, &ctx->reserved_vifs, reserved_chanctx_list) in ieee80211_chanctx_num_reserved() 41 struct ieee80211_chanctx *ctx) in ieee80211_chanctx_refcount() argument 43 return ieee80211_chanctx_num_assigned(local, ctx) + in ieee80211_chanctx_refcount() 44 ieee80211_chanctx_num_reserved(local, ctx); in ieee80211_chanctx_refcount() 49 struct ieee80211_chanctx *ctx; in ieee80211_num_chanctx() local 54 list_for_each_entry(ctx, &local->chanctx_list, list) in ieee80211_num_chanctx() 82 struct ieee80211_chanctx *ctx, in ieee80211_chanctx_reserved_chandef() argument [all …]
|
/linux-4.4.14/arch/s390/crypto/ |
D | sha_common.c | 23 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in s390_sha_update() local 29 index = ctx->count & (bsize - 1); in s390_sha_update() 30 ctx->count += len; in s390_sha_update() 37 memcpy(ctx->buf + index, data, bsize - index); in s390_sha_update() 38 ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize); in s390_sha_update() 48 ret = crypt_s390_kimd(ctx->func, ctx->state, data, in s390_sha_update() 57 memcpy(ctx->buf + index , data, len); in s390_sha_update() 65 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in s390_sha_final() local 75 index = ctx->count & (bsize - 1); in s390_sha_final() 79 ctx->buf[index] = 0x80; in s390_sha_final() [all …]
|
D | sha512_s390.c | 28 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in sha512_init() local 30 *(__u64 *)&ctx->state[0] = 0x6a09e667f3bcc908ULL; in sha512_init() 31 *(__u64 *)&ctx->state[2] = 0xbb67ae8584caa73bULL; in sha512_init() 32 *(__u64 *)&ctx->state[4] = 0x3c6ef372fe94f82bULL; in sha512_init() 33 *(__u64 *)&ctx->state[6] = 0xa54ff53a5f1d36f1ULL; in sha512_init() 34 *(__u64 *)&ctx->state[8] = 0x510e527fade682d1ULL; in sha512_init() 35 *(__u64 *)&ctx->state[10] = 0x9b05688c2b3e6c1fULL; in sha512_init() 36 *(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL; in sha512_init() 37 *(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL; in sha512_init() 38 ctx->count = 0; in sha512_init() [all …]
|
/linux-4.4.14/drivers/crypto/ux500/cryp/ |
D | cryp_core.c | 178 static void add_session_id(struct cryp_ctx *ctx) in add_session_id() argument 187 ctx->session_id = atomic_read(&session_id); in add_session_id() 192 struct cryp_ctx *ctx; in cryp_interrupt_handler() local 204 ctx = device_data->current_ctx; in cryp_interrupt_handler() 206 if (ctx == NULL) { in cryp_interrupt_handler() 207 BUG_ON(!ctx); in cryp_interrupt_handler() 211 dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen, in cryp_interrupt_handler() 217 if (ctx->outlen / ctx->blocksize > 0) { in cryp_interrupt_handler() 218 count = ctx->blocksize / 4; in cryp_interrupt_handler() 220 readsl(&device_data->base->dout, ctx->outdata, count); in cryp_interrupt_handler() [all …]
|
D | cryp.c | 290 struct cryp_device_context *ctx, in cryp_save_device_context() argument 309 ctx->din = readl_relaxed(&src_reg->din); in cryp_save_device_context() 311 ctx->cr = readl_relaxed(&src_reg->cr) & CRYP_CR_CONTEXT_SAVE_MASK; in cryp_save_device_context() 315 ctx->key_4_l = readl_relaxed(&src_reg->key_4_l); in cryp_save_device_context() 316 ctx->key_4_r = readl_relaxed(&src_reg->key_4_r); in cryp_save_device_context() 319 ctx->key_3_l = readl_relaxed(&src_reg->key_3_l); in cryp_save_device_context() 320 ctx->key_3_r = readl_relaxed(&src_reg->key_3_r); in cryp_save_device_context() 323 ctx->key_2_l = readl_relaxed(&src_reg->key_2_l); in cryp_save_device_context() 324 ctx->key_2_r = readl_relaxed(&src_reg->key_2_r); in cryp_save_device_context() 327 ctx->key_1_l = readl_relaxed(&src_reg->key_1_l); in cryp_save_device_context() [all …]
|
/linux-4.4.14/net/ipv4/netfilter/ |
D | nf_nat_snmp_basic.c | 152 static void asn1_open(struct asn1_ctx *ctx, in asn1_open() argument 156 ctx->begin = buf; in asn1_open() 157 ctx->end = buf + len; in asn1_open() 158 ctx->pointer = buf; in asn1_open() 159 ctx->error = ASN1_ERR_NOERROR; in asn1_open() 162 static unsigned char asn1_octet_decode(struct asn1_ctx *ctx, unsigned char *ch) in asn1_octet_decode() argument 164 if (ctx->pointer >= ctx->end) { in asn1_octet_decode() 165 ctx->error = ASN1_ERR_DEC_EMPTY; in asn1_octet_decode() 168 *ch = *(ctx->pointer)++; in asn1_octet_decode() 172 static unsigned char asn1_tag_decode(struct asn1_ctx *ctx, unsigned int *tag) in asn1_tag_decode() argument [all …]
|
/linux-4.4.14/drivers/media/platform/s5p-g2d/ |
D | g2d.c | 91 static struct g2d_frame *get_frame(struct g2d_ctx *ctx, in get_frame() argument 96 return &ctx->in; in get_frame() 98 return &ctx->out; in get_frame() 108 struct g2d_ctx *ctx = vb2_get_drv_priv(vq); in g2d_queue_setup() local 109 struct g2d_frame *f = get_frame(ctx, vq->type); in g2d_queue_setup() 116 alloc_ctxs[0] = ctx->dev->alloc_ctx; in g2d_queue_setup() 126 struct g2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); in g2d_buf_prepare() local 127 struct g2d_frame *f = get_frame(ctx, vb->vb2_queue->type); in g2d_buf_prepare() 138 struct g2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); in g2d_buf_queue() local 139 v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf); in g2d_buf_queue() [all …]
|
/linux-4.4.14/drivers/edac/ |
D | xgene_edac.c | 157 struct xgene_edac_mc_ctx *ctx = mci->pvt_info; in xgene_edac_mc_err_inject_write() local 163 ctx->mcu_csr + MCUESRRA0 + i * MCU_RANK_STRIDE); in xgene_edac_mc_err_inject_write() 188 struct xgene_edac_mc_ctx *ctx = mci->pvt_info; in xgene_edac_mc_check() local 197 xgene_edac_pcp_rd(ctx->edac, PCPHPERRINTSTS, &pcp_hp_stat); in xgene_edac_mc_check() 198 xgene_edac_pcp_rd(ctx->edac, PCPLPERRINTSTS, &pcp_lp_stat); in xgene_edac_mc_check() 205 reg = readl(ctx->mcu_csr + MCUESRR0 + rank * MCU_RANK_STRIDE); in xgene_edac_mc_check() 220 bank = readl(ctx->mcu_csr + MCUEBLRR0 + in xgene_edac_mc_check() 222 col_row = readl(ctx->mcu_csr + MCUERCRR0 + in xgene_edac_mc_check() 224 count = readl(ctx->mcu_csr + MCUSBECNT0 + in xgene_edac_mc_check() 238 writel(0x0, ctx->mcu_csr + MCUEBLRR0 + rank * MCU_RANK_STRIDE); in xgene_edac_mc_check() [all …]
|
/linux-4.4.14/drivers/vfio/pci/ |
D | vfio_pci_intrs.c | 36 eventfd_signal(vdev->ctx[0].trigger, 1); in vfio_send_intx_eventfd() 55 } else if (!vdev->ctx[0].masked) { in vfio_pci_intx_mask() 65 vdev->ctx[0].masked = true; in vfio_pci_intx_mask() 93 } else if (vdev->ctx[0].masked && !vdev->virq_disabled) { in vfio_pci_intx_unmask_handler() 105 vdev->ctx[0].masked = (ret > 0); in vfio_pci_intx_unmask_handler() 129 vdev->ctx[0].masked = true; in vfio_intx_handler() 131 } else if (!vdev->ctx[0].masked && /* may be shared */ in vfio_intx_handler() 133 vdev->ctx[0].masked = true; in vfio_intx_handler() 153 vdev->ctx = kzalloc(sizeof(struct vfio_pci_irq_ctx), GFP_KERNEL); in vfio_intx_enable() 154 if (!vdev->ctx) in vfio_intx_enable() [all …]
|
/linux-4.4.14/arch/x86/crypto/sha-mb/ |
D | sha1_mb.c | 88 static inline struct ahash_request *cast_mcryptd_ctx_to_req(struct mcryptd_hash_request_ctx *ctx) in cast_mcryptd_ctx_to_req() argument 90 return container_of((void *) ctx, struct ahash_request, __ctx); in cast_mcryptd_ctx_to_req() 134 …ic struct sha1_hash_ctx *sha1_ctx_mgr_resubmit(struct sha1_ctx_mgr *mgr, struct sha1_hash_ctx *ctx) in sha1_ctx_mgr_resubmit() argument 136 while (ctx) { in sha1_ctx_mgr_resubmit() 137 if (ctx->status & HASH_CTX_STS_COMPLETE) { in sha1_ctx_mgr_resubmit() 139 ctx->status = HASH_CTX_STS_COMPLETE; in sha1_ctx_mgr_resubmit() 140 return ctx; in sha1_ctx_mgr_resubmit() 147 if (ctx->partial_block_buffer_length == 0 && in sha1_ctx_mgr_resubmit() 148 ctx->incoming_buffer_length) { in sha1_ctx_mgr_resubmit() 150 const void *buffer = ctx->incoming_buffer; in sha1_ctx_mgr_resubmit() [all …]
|
D | sha_mb_ctx.h | 83 #define hash_ctx_user_data(ctx) ((ctx)->user_data) argument 84 #define hash_ctx_digest(ctx) ((ctx)->job.result_digest) argument 85 #define hash_ctx_processing(ctx) ((ctx)->status & HASH_CTX_STS_PROCESSING) argument 86 #define hash_ctx_complete(ctx) ((ctx)->status == HASH_CTX_STS_COMPLETE) argument 87 #define hash_ctx_status(ctx) ((ctx)->status) argument 88 #define hash_ctx_error(ctx) ((ctx)->error) argument 89 #define hash_ctx_init(ctx) \ argument 91 (ctx)->error = HASH_CTX_ERROR_NONE; \ 92 (ctx)->status = HASH_CTX_STS_COMPLETE; \
|
/linux-4.4.14/lib/mpi/ |
D | mpih-mul.c | 337 struct karatsuba_ctx *ctx) in mpihelp_mul_karatsuba_case() argument 341 if (!ctx->tspace || ctx->tspace_size < vsize) { in mpihelp_mul_karatsuba_case() 342 if (ctx->tspace) in mpihelp_mul_karatsuba_case() 343 mpi_free_limb_space(ctx->tspace); in mpihelp_mul_karatsuba_case() 344 ctx->tspace = mpi_alloc_limb_space(2 * vsize); in mpihelp_mul_karatsuba_case() 345 if (!ctx->tspace) in mpihelp_mul_karatsuba_case() 347 ctx->tspace_size = vsize; in mpihelp_mul_karatsuba_case() 350 MPN_MUL_N_RECURSE(prodp, up, vp, vsize, ctx->tspace); in mpihelp_mul_karatsuba_case() 356 if (!ctx->tp || ctx->tp_size < vsize) { in mpihelp_mul_karatsuba_case() 357 if (ctx->tp) in mpihelp_mul_karatsuba_case() [all …]
|
/linux-4.4.14/drivers/clk/samsung/ |
D | clk.c | 62 struct samsung_clk_provider *ctx; in samsung_clk_init() local 66 ctx = kzalloc(sizeof(struct samsung_clk_provider), GFP_KERNEL); in samsung_clk_init() 67 if (!ctx) in samsung_clk_init() 77 ctx->reg_base = base; in samsung_clk_init() 78 ctx->clk_data.clks = clk_table; in samsung_clk_init() 79 ctx->clk_data.clk_num = nr_clks; in samsung_clk_init() 80 spin_lock_init(&ctx->lock); in samsung_clk_init() 82 return ctx; in samsung_clk_init() 86 struct samsung_clk_provider *ctx) in samsung_clk_of_add_provider() argument 90 &ctx->clk_data)) in samsung_clk_of_add_provider() [all …]
|
/linux-4.4.14/arch/x86/include/asm/crypto/ |
D | camellia.h | 40 asmlinkage void __camellia_enc_blk(struct camellia_ctx *ctx, u8 *dst, 42 asmlinkage void camellia_dec_blk(struct camellia_ctx *ctx, u8 *dst, 46 asmlinkage void __camellia_enc_blk_2way(struct camellia_ctx *ctx, u8 *dst, 48 asmlinkage void camellia_dec_blk_2way(struct camellia_ctx *ctx, u8 *dst, 52 asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst, 54 asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst, 57 asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst, 59 asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst, 62 asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst, 64 asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst, [all …]
|
D | serpent-sse2.h | 11 asmlinkage void __serpent_enc_blk_4way(struct serpent_ctx *ctx, u8 *dst, 13 asmlinkage void serpent_dec_blk_4way(struct serpent_ctx *ctx, u8 *dst, 16 static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst, in serpent_enc_blk_xway() argument 19 __serpent_enc_blk_4way(ctx, dst, src, false); in serpent_enc_blk_xway() 22 static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst, in serpent_enc_blk_xway_xor() argument 25 __serpent_enc_blk_4way(ctx, dst, src, true); in serpent_enc_blk_xway_xor() 28 static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst, in serpent_dec_blk_xway() argument 31 serpent_dec_blk_4way(ctx, dst, src); in serpent_dec_blk_xway() 38 asmlinkage void __serpent_enc_blk_8way(struct serpent_ctx *ctx, u8 *dst, 40 asmlinkage void serpent_dec_blk_8way(struct serpent_ctx *ctx, u8 *dst, [all …]
|
/linux-4.4.14/drivers/iommu/ |
D | msm_iommu.c | 126 static void __reset_context(void __iomem *base, int ctx) in __reset_context() argument 128 SET_BPRCOSH(base, ctx, 0); in __reset_context() 129 SET_BPRCISH(base, ctx, 0); in __reset_context() 130 SET_BPRCNSH(base, ctx, 0); in __reset_context() 131 SET_BPSHCFG(base, ctx, 0); in __reset_context() 132 SET_BPMTCFG(base, ctx, 0); in __reset_context() 133 SET_ACTLR(base, ctx, 0); in __reset_context() 134 SET_SCTLR(base, ctx, 0); in __reset_context() 135 SET_FSRRESTORE(base, ctx, 0); in __reset_context() 136 SET_TTBR0(base, ctx, 0); in __reset_context() [all …]
|
/linux-4.4.14/drivers/power/reset/ |
D | syscon-reboot.c | 37 struct syscon_reboot_context *ctx = in syscon_restart_handle() local 42 regmap_write(ctx->map, ctx->offset, ctx->mask); in syscon_restart_handle() 52 struct syscon_reboot_context *ctx; in syscon_reboot_probe() local 56 ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL); in syscon_reboot_probe() 57 if (!ctx) in syscon_reboot_probe() 60 ctx->map = syscon_regmap_lookup_by_phandle(dev->of_node, "regmap"); in syscon_reboot_probe() 61 if (IS_ERR(ctx->map)) in syscon_reboot_probe() 62 return PTR_ERR(ctx->map); in syscon_reboot_probe() 64 if (of_property_read_u32(pdev->dev.of_node, "offset", &ctx->offset)) in syscon_reboot_probe() 67 if (of_property_read_u32(pdev->dev.of_node, "mask", &ctx->mask)) in syscon_reboot_probe() [all …]
|
D | xgene-reboot.c | 47 struct xgene_reboot_context *ctx = in xgene_restart_handler() local 52 writel(ctx->mask, ctx->csr); in xgene_restart_handler() 56 dev_emerg(ctx->dev, "Unable to restart system\n"); in xgene_restart_handler() 63 struct xgene_reboot_context *ctx; in xgene_reboot_probe() local 67 ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL); in xgene_reboot_probe() 68 if (!ctx) in xgene_reboot_probe() 71 ctx->csr = of_iomap(dev->of_node, 0); in xgene_reboot_probe() 72 if (!ctx->csr) { in xgene_reboot_probe() 77 if (of_property_read_u32(dev->of_node, "mask", &ctx->mask)) in xgene_reboot_probe() 78 ctx->mask = 0xFFFFFFFF; in xgene_reboot_probe() [all …]
|
/linux-4.4.14/drivers/net/ethernet/microchip/ |
D | encx24j600-regmap.c | 28 static int encx24j600_switch_bank(struct encx24j600_context *ctx, in encx24j600_switch_bank() argument 34 ret = spi_write(ctx->spi, &bank_opcode, 1); in encx24j600_switch_bank() 36 ctx->bank = bank; in encx24j600_switch_bank() 41 static int encx24j600_cmdn(struct encx24j600_context *ctx, u8 opcode, in encx24j600_cmdn() argument 51 return spi_sync(ctx->spi, &m); in encx24j600_cmdn() 56 struct encx24j600_context *ctx = context; in regmap_lock_mutex() local 57 mutex_lock(&ctx->mutex); in regmap_lock_mutex() 62 struct encx24j600_context *ctx = context; in regmap_unlock_mutex() local 63 mutex_unlock(&ctx->mutex); in regmap_unlock_mutex() 69 struct encx24j600_context *ctx = context; in regmap_encx24j600_sfr_read() local [all …]
|
/linux-4.4.14/tools/perf/util/ |
D | stat-shadow.c | 36 int ctx = 0; in evsel_context() local 39 ctx |= CTX_BIT_KERNEL; in evsel_context() 41 ctx |= CTX_BIT_USER; in evsel_context() 43 ctx |= CTX_BIT_HV; in evsel_context() 45 ctx |= CTX_BIT_HOST; in evsel_context() 47 ctx |= CTX_BIT_IDLE; in evsel_context() 49 return ctx; in evsel_context() 81 int ctx = evsel_context(counter); in perf_stat__update_shadow_stats() local 86 update_stats(&runtime_cycles_stats[ctx][cpu], count[0]); in perf_stat__update_shadow_stats() 88 update_stats(&runtime_cycles_in_tx_stats[ctx][cpu], count[0]); in perf_stat__update_shadow_stats() [all …]
|
D | parse-options.c | 363 void parse_options_start(struct parse_opt_ctx_t *ctx, in parse_options_start() argument 366 memset(ctx, 0, sizeof(*ctx)); in parse_options_start() 367 ctx->argc = argc - 1; in parse_options_start() 368 ctx->argv = argv + 1; in parse_options_start() 369 ctx->out = argv; in parse_options_start() 370 ctx->cpidx = ((flags & PARSE_OPT_KEEP_ARGV0) != 0); in parse_options_start() 371 ctx->flags = flags; in parse_options_start() 381 int parse_options_step(struct parse_opt_ctx_t *ctx, in parse_options_step() argument 385 int internal_help = !(ctx->flags & PARSE_OPT_NO_INTERNAL_HELP); in parse_options_step() 390 ctx->opt = NULL; in parse_options_step() [all …]
|
/linux-4.4.14/drivers/ata/ |
D | ahci_xgene.c | 104 static int xgene_ahci_init_memram(struct xgene_ahci_context *ctx) in xgene_ahci_init_memram() argument 106 dev_dbg(ctx->dev, "Release memory from shutdown\n"); in xgene_ahci_init_memram() 107 writel(0x0, ctx->csr_diag + CFG_MEM_RAM_SHUTDOWN); in xgene_ahci_init_memram() 108 readl(ctx->csr_diag + CFG_MEM_RAM_SHUTDOWN); /* Force a barrier */ in xgene_ahci_init_memram() 110 if (readl(ctx->csr_diag + BLOCK_MEM_RDY) != 0xFFFFFFFF) { in xgene_ahci_init_memram() 111 dev_err(ctx->dev, "failed to release memory from shutdown\n"); in xgene_ahci_init_memram() 206 struct xgene_ahci_context *ctx = hpriv->plat_data; in xgene_ahci_qc_issue() local 215 if (ctx->class[ap->port_no] == ATA_DEV_PMP) { in xgene_ahci_qc_issue() 222 if (unlikely((ctx->last_cmd[ap->port_no] == ATA_CMD_ID_ATA) || in xgene_ahci_qc_issue() 223 (ctx->last_cmd[ap->port_no] == ATA_CMD_PACKET) || in xgene_ahci_qc_issue() [all …]
|
/linux-4.4.14/drivers/acpi/apei/ |
D | erst.c | 119 static int erst_exec_load_var1(struct apei_exec_context *ctx, in erst_exec_load_var1() argument 122 return __apei_exec_read_register(entry, &ctx->var1); in erst_exec_load_var1() 125 static int erst_exec_load_var2(struct apei_exec_context *ctx, in erst_exec_load_var2() argument 128 return __apei_exec_read_register(entry, &ctx->var2); in erst_exec_load_var2() 131 static int erst_exec_store_var1(struct apei_exec_context *ctx, in erst_exec_store_var1() argument 134 return __apei_exec_write_register(entry, ctx->var1); in erst_exec_store_var1() 137 static int erst_exec_add(struct apei_exec_context *ctx, in erst_exec_add() argument 140 ctx->var1 += ctx->var2; in erst_exec_add() 144 static int erst_exec_subtract(struct apei_exec_context *ctx, in erst_exec_subtract() argument 147 ctx->var1 -= ctx->var2; in erst_exec_subtract() [all …]
|
D | apei-internal.h | 14 typedef int (*apei_exec_ins_func_t)(struct apei_exec_context *ctx, 37 void apei_exec_ctx_init(struct apei_exec_context *ctx, 43 static inline void apei_exec_ctx_set_input(struct apei_exec_context *ctx, in apei_exec_ctx_set_input() argument 46 ctx->value = input; in apei_exec_ctx_set_input() 49 static inline u64 apei_exec_ctx_get_output(struct apei_exec_context *ctx) in apei_exec_ctx_get_output() argument 51 return ctx->value; in apei_exec_ctx_get_output() 54 int __apei_exec_run(struct apei_exec_context *ctx, u8 action, bool optional); 56 static inline int apei_exec_run(struct apei_exec_context *ctx, u8 action) in apei_exec_run() argument 58 return __apei_exec_run(ctx, action, 0); in apei_exec_run() 62 static inline int apei_exec_run_optional(struct apei_exec_context *ctx, u8 action) in apei_exec_run_optional() argument [all …]
|
/linux-4.4.14/drivers/firewire/ |
D | ohci.c | 114 typedef int (*descriptor_callback_t)(struct context *ctx, 665 static inline dma_addr_t ar_buffer_bus(struct ar_context *ctx, unsigned int i) in ar_buffer_bus() argument 667 return page_private(ctx->pages[i]); in ar_buffer_bus() 670 static void ar_context_link_page(struct ar_context *ctx, unsigned int index) in ar_context_link_page() argument 674 d = &ctx->descriptors[index]; in ar_context_link_page() 680 d = &ctx->descriptors[ctx->last_buffer_index]; in ar_context_link_page() 683 ctx->last_buffer_index = index; in ar_context_link_page() 685 reg_write(ctx->ohci, CONTROL_SET(ctx->regs), CONTEXT_WAKE); in ar_context_link_page() 688 static void ar_context_release(struct ar_context *ctx) in ar_context_release() argument 692 vunmap(ctx->buffer); in ar_context_release() [all …]
|
D | core-iso.c | 167 struct fw_iso_context *ctx; in fw_iso_context_create() local 169 ctx = card->driver->allocate_iso_context(card, in fw_iso_context_create() 171 if (IS_ERR(ctx)) in fw_iso_context_create() 172 return ctx; in fw_iso_context_create() 174 ctx->card = card; in fw_iso_context_create() 175 ctx->type = type; in fw_iso_context_create() 176 ctx->channel = channel; in fw_iso_context_create() 177 ctx->speed = speed; in fw_iso_context_create() 178 ctx->header_size = header_size; in fw_iso_context_create() 179 ctx->callback.sc = callback; in fw_iso_context_create() [all …]
|
/linux-4.4.14/drivers/video/fbdev/omap2/ |
D | vrfb.c | 83 static void omap2_sms_write_rot_control(u32 val, unsigned ctx) in omap2_sms_write_rot_control() argument 85 __raw_writel(val, vrfb_base + SMS_ROT_CONTROL(ctx)); in omap2_sms_write_rot_control() 88 static void omap2_sms_write_rot_size(u32 val, unsigned ctx) in omap2_sms_write_rot_size() argument 90 __raw_writel(val, vrfb_base + SMS_ROT_SIZE(ctx)); in omap2_sms_write_rot_size() 93 static void omap2_sms_write_rot_physical_ba(u32 val, unsigned ctx) in omap2_sms_write_rot_physical_ba() argument 95 __raw_writel(val, vrfb_base + SMS_ROT_PHYSICAL_BA(ctx)); in omap2_sms_write_rot_physical_ba() 98 static inline void restore_hw_context(int ctx) in restore_hw_context() argument 100 omap2_sms_write_rot_control(ctxs[ctx].control, ctx); in restore_hw_context() 101 omap2_sms_write_rot_size(ctxs[ctx].size, ctx); in restore_hw_context() 102 omap2_sms_write_rot_physical_ba(ctxs[ctx].physical_ba, ctx); in restore_hw_context() [all …]
|
/linux-4.4.14/fs/xfs/ |
D | xfs_log_cil.c | 283 struct xfs_cil_ctx *ctx = cil->xc_ctx; in xlog_cil_insert_items() local 321 ctx->nvecs += diff_iovecs; in xlog_cil_insert_items() 325 list_splice_init(&tp->t_busy, &ctx->busy_extents); in xlog_cil_insert_items() 334 if (ctx->ticket->t_curr_res == 0) { in xlog_cil_insert_items() 335 ctx->ticket->t_curr_res = ctx->ticket->t_unit_res; in xlog_cil_insert_items() 336 tp->t_ticket->t_curr_res -= ctx->ticket->t_unit_res; in xlog_cil_insert_items() 341 if (len > 0 && (ctx->space_used / iclog_space != in xlog_cil_insert_items() 342 (ctx->space_used + len) / iclog_space)) { in xlog_cil_insert_items() 348 ctx->ticket->t_unit_res += hdrs; in xlog_cil_insert_items() 349 ctx->ticket->t_curr_res += hdrs; in xlog_cil_insert_items() [all …]
|
/linux-4.4.14/drivers/video/fbdev/omap2/dss/ |
D | dpi.c | 159 struct dpi_clk_calc_ctx *ctx = data; in dpi_calc_dispc_cb() local 166 if (ctx->pck_min >= 100000000) { in dpi_calc_dispc_cb() 174 ctx->dispc_cinfo.lck_div = lckd; in dpi_calc_dispc_cb() 175 ctx->dispc_cinfo.pck_div = pckd; in dpi_calc_dispc_cb() 176 ctx->dispc_cinfo.lck = lck; in dpi_calc_dispc_cb() 177 ctx->dispc_cinfo.pck = pck; in dpi_calc_dispc_cb() 186 struct dpi_clk_calc_ctx *ctx = data; in dpi_calc_hsdiv_cb() local 193 if (m_dispc > 1 && m_dispc % 2 != 0 && ctx->pck_min >= 100000000) in dpi_calc_hsdiv_cb() 196 ctx->dsi_cinfo.mX[HSDIV_DISPC] = m_dispc; in dpi_calc_hsdiv_cb() 197 ctx->dsi_cinfo.clkout[HSDIV_DISPC] = dispc; in dpi_calc_hsdiv_cb() [all …]
|
/linux-4.4.14/arch/frv/mm/ |
D | mmu-context.c | 44 static unsigned get_cxn(mm_context_t *ctx) in get_cxn() argument 50 if (!list_empty(&ctx->id_link)) { in get_cxn() 51 list_move_tail(&ctx->id_link, &cxn_owners_lru); in get_cxn() 78 ctx->id = cxn; in get_cxn() 79 list_add_tail(&ctx->id_link, &cxn_owners_lru); in get_cxn() 82 return ctx->id; in get_cxn() 90 void change_mm_context(mm_context_t *old, mm_context_t *ctx, pgd_t *pgd) in change_mm_context() argument 106 get_cxn(ctx); in change_mm_context() 107 ctx->id_busy = 1; in change_mm_context() 110 asm volatile("movgs %0,cxnr" : : "r"(ctx->id)); in change_mm_context() [all …]
|
/linux-4.4.14/arch/x86/crypto/ |
D | aesni-intel_glue.c | 83 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key, 85 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out, 87 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out, 89 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out, 91 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out, 93 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out, 95 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out, 106 static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out, 108 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out, 111 asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out, [all …]
|
D | cast6_avx_glue.c | 44 asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst, 46 asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst, 49 asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst, 51 asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src, 54 asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst, 56 asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst, 59 static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) in cast6_xts_enc() argument 61 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in cast6_xts_enc() 65 static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) in cast6_xts_dec() argument 67 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in cast6_xts_dec() [all …]
|
D | serpent_avx_glue.c | 44 asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst, 48 asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst, 52 asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst, 56 asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst, 60 asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst, 64 asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst, 68 void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) in __serpent_crypt_ctr() argument 75 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in __serpent_crypt_ctr() 80 void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) in serpent_xts_enc() argument 82 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in serpent_xts_enc() [all …]
|
D | twofish_avx_glue.c | 49 asmlinkage void twofish_ecb_enc_8way(struct twofish_ctx *ctx, u8 *dst, 51 asmlinkage void twofish_ecb_dec_8way(struct twofish_ctx *ctx, u8 *dst, 54 asmlinkage void twofish_cbc_dec_8way(struct twofish_ctx *ctx, u8 *dst, 56 asmlinkage void twofish_ctr_8way(struct twofish_ctx *ctx, u8 *dst, 59 asmlinkage void twofish_xts_enc_8way(struct twofish_ctx *ctx, u8 *dst, 61 asmlinkage void twofish_xts_dec_8way(struct twofish_ctx *ctx, u8 *dst, 64 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst, in twofish_enc_blk_3way() argument 67 __twofish_enc_blk_3way(ctx, dst, src, false); in twofish_enc_blk_3way() 70 static void twofish_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) in twofish_xts_enc() argument 72 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in twofish_xts_enc() [all …]
|
D | twofish_glue_3way.c | 39 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst, in twofish_enc_blk_3way() argument 42 __twofish_enc_blk_3way(ctx, dst, src, false); in twofish_enc_blk_3way() 45 static inline void twofish_enc_blk_xor_3way(struct twofish_ctx *ctx, u8 *dst, in twofish_enc_blk_xor_3way() argument 48 __twofish_enc_blk_3way(ctx, dst, src, true); in twofish_enc_blk_xor_3way() 51 void twofish_dec_blk_cbc_3way(void *ctx, u128 *dst, const u128 *src) in twofish_dec_blk_cbc_3way() argument 58 twofish_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src); in twofish_dec_blk_cbc_3way() 65 void twofish_enc_blk_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) in twofish_enc_blk_ctr() argument 75 twofish_enc_blk(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in twofish_enc_blk_ctr() 80 void twofish_enc_blk_ctr_3way(void *ctx, u128 *dst, const u128 *src, in twofish_enc_blk_ctr_3way() argument 98 twofish_enc_blk_xor_3way(ctx, (u8 *)dst, (u8 *)ctrblks); in twofish_enc_blk_ctr_3way() [all …]
|
D | serpent_sse2_glue.c | 48 static void serpent_decrypt_cbc_xway(void *ctx, u128 *dst, const u128 *src) in serpent_decrypt_cbc_xway() argument 56 serpent_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src); in serpent_decrypt_cbc_xway() 62 static void serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) in serpent_crypt_ctr() argument 69 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in serpent_crypt_ctr() 73 static void serpent_crypt_ctr_xway(void *ctx, u128 *dst, const u128 *src, in serpent_crypt_ctr_xway() argument 87 serpent_enc_blk_xway_xor(ctx, (u8 *)dst, (u8 *)ctrblks); in serpent_crypt_ctr_xway() 186 struct serpent_ctx *ctx; member 193 struct crypt_priv *ctx = priv; in encrypt_callback() local 196 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback() 199 serpent_enc_blk_xway(ctx->ctx, srcdst, srcdst); in encrypt_callback() [all …]
|
D | camellia_aesni_avx_glue.c | 29 asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst, 33 asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst, 37 asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst, 41 asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst, 45 asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst, 49 asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst, 53 void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) in camellia_xts_enc() argument 55 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in camellia_xts_enc() 60 void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) in camellia_xts_dec() argument 62 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in camellia_xts_dec() [all …]
|
D | camellia_aesni_avx2_glue.c | 30 asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst, 32 asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst, 35 asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst, 37 asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst, 40 asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst, 42 asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst, 205 struct camellia_ctx *ctx; member 212 struct crypt_priv *ctx = priv; in encrypt_callback() local 215 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback() 218 camellia_ecb_enc_32way(ctx->ctx, srcdst, srcdst); in encrypt_callback() [all …]
|
/linux-4.4.14/security/keys/ |
D | keyring.c | 426 struct keyring_read_iterator_context *ctx = data; in keyring_read_iterator() local 431 key->type->name, key->serial, ctx->count, ctx->qty); in keyring_read_iterator() 433 if (ctx->count >= ctx->qty) in keyring_read_iterator() 436 ret = put_user(key->serial, ctx->buffer); in keyring_read_iterator() 439 ctx->buffer++; in keyring_read_iterator() 440 ctx->count += sizeof(key->serial); in keyring_read_iterator() 454 struct keyring_read_iterator_context ctx; in keyring_read() local 468 ctx.qty = nr_keys * sizeof(key_serial_t); in keyring_read() 471 return ctx.qty; in keyring_read() 473 if (buflen > ctx.qty) in keyring_read() [all …]
|
D | process_keys.c | 322 key_ref_t search_my_process_keyrings(struct keyring_search_context *ctx) in search_my_process_keyrings() argument 338 if (ctx->cred->thread_keyring) { in search_my_process_keyrings() 340 make_key_ref(ctx->cred->thread_keyring, 1), ctx); in search_my_process_keyrings() 356 if (ctx->cred->process_keyring) { in search_my_process_keyrings() 358 make_key_ref(ctx->cred->process_keyring, 1), ctx); in search_my_process_keyrings() 376 if (ctx->cred->session_keyring) { in search_my_process_keyrings() 379 make_key_ref(rcu_dereference(ctx->cred->session_keyring), 1), in search_my_process_keyrings() 380 ctx); in search_my_process_keyrings() 399 else if (ctx->cred->user->session_keyring) { in search_my_process_keyrings() 401 make_key_ref(ctx->cred->user->session_keyring, 1), in search_my_process_keyrings() [all …]
|
/linux-4.4.14/drivers/media/platform/s5p-tv/ |
D | sii9234_drv.c | 105 static int sii9234_reset(struct sii9234_context *ctx) in sii9234_reset() argument 107 struct i2c_client *client = ctx->client; in sii9234_reset() 111 gpio_direction_output(ctx->gpio_n_reset, 1); in sii9234_reset() 113 gpio_direction_output(ctx->gpio_n_reset, 0); in sii9234_reset() 115 gpio_direction_output(ctx->gpio_n_reset, 1); in sii9234_reset() 216 static int sii9234_set_internal(struct sii9234_context *ctx) in sii9234_set_internal() argument 218 struct i2c_client *client = ctx->client; in sii9234_set_internal() 233 struct sii9234_context *ctx = sd_to_context(sd); in sii9234_runtime_suspend() local 234 struct i2c_client *client = ctx->client; in sii9234_runtime_suspend() 239 regulator_disable(ctx->power); in sii9234_runtime_suspend() [all …]
|
/linux-4.4.14/arch/metag/kernel/ |
D | process.c | 135 pr_info(" SaveMask = 0x%04hx\n", regs->ctx.SaveMask); in show_regs() 136 pr_info(" Flags = 0x%04hx (%c%c%c%c)\n", regs->ctx.Flags, in show_regs() 137 regs->ctx.Flags & FLAG_Z ? 'Z' : 'z', in show_regs() 138 regs->ctx.Flags & FLAG_N ? 'N' : 'n', in show_regs() 139 regs->ctx.Flags & FLAG_O ? 'O' : 'o', in show_regs() 140 regs->ctx.Flags & FLAG_C ? 'C' : 'c'); in show_regs() 141 pr_info(" TXRPT = 0x%08x\n", regs->ctx.CurrRPT); in show_regs() 142 pr_info(" PC = 0x%08x\n", regs->ctx.CurrPC); in show_regs() 148 regs->ctx.AX[i].U0); in show_regs() 151 regs->ctx.AX[i].U1); in show_regs() [all …]
|
/linux-4.4.14/fs/ntfs/ |
D | attrib.c | 84 int ntfs_map_runlist_nolock(ntfs_inode *ni, VCN vcn, ntfs_attr_search_ctx *ctx) in ntfs_map_runlist_nolock() argument 103 if (!ctx) { in ntfs_map_runlist_nolock() 108 ctx = ntfs_attr_get_search_ctx(base_ni, m); in ntfs_map_runlist_nolock() 109 if (unlikely(!ctx)) { in ntfs_map_runlist_nolock() 116 BUG_ON(IS_ERR(ctx->mrec)); in ntfs_map_runlist_nolock() 117 a = ctx->attr; in ntfs_map_runlist_nolock() 143 old_ctx = *ctx; in ntfs_map_runlist_nolock() 161 ntfs_attr_reinit_search_ctx(ctx); in ntfs_map_runlist_nolock() 167 CASE_SENSITIVE, vcn, NULL, 0, ctx); in ntfs_map_runlist_nolock() 173 BUG_ON(!ctx->attr->non_resident); in ntfs_map_runlist_nolock() [all …]
|
/linux-4.4.14/arch/arm/crypto/ |
D | sha512-armv4.pl | 56 $ctx="r0"; # parameter block 286 ldr $Elo,[$ctx,#$Eoff+$lo] 287 ldr $Ehi,[$ctx,#$Eoff+$hi] 288 ldr $t0, [$ctx,#$Goff+$lo] 289 ldr $t1, [$ctx,#$Goff+$hi] 290 ldr $t2, [$ctx,#$Hoff+$lo] 291 ldr $t3, [$ctx,#$Hoff+$hi] 297 ldr $Alo,[$ctx,#$Aoff+$lo] 298 ldr $Ahi,[$ctx,#$Aoff+$hi] 299 ldr $Tlo,[$ctx,#$Boff+$lo] [all …]
|
D | ghash-ce-glue.c | 48 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_init() local 50 *ctx = (struct ghash_desc_ctx){}; in ghash_init() 57 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_update() local 58 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_update() 60 ctx->count += len; in ghash_update() 69 memcpy(ctx->buf + partial, src, p); in ghash_update() 78 pmull_ghash_update(blocks, ctx->digest, src, key, in ghash_update() 79 partial ? ctx->buf : NULL); in ghash_update() 85 memcpy(ctx->buf + partial, src, len); in ghash_update() 91 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_final() local [all …]
|
/linux-4.4.14/drivers/crypto/ccp/ |
D | ccp-crypto-aes-cmac.c | 62 struct ccp_ctx *ctx = crypto_ahash_ctx(tfm); in ccp_do_cmac_update() local 72 if (!ctx->u.aes.key_len) in ccp_do_cmac_update() 156 cmac_key_sg = (need_pad) ? &ctx->u.aes.k2_sg in ccp_do_cmac_update() 157 : &ctx->u.aes.k1_sg; in ccp_do_cmac_update() 162 rctx->cmd.u.aes.type = ctx->u.aes.type; in ccp_do_cmac_update() 163 rctx->cmd.u.aes.mode = ctx->u.aes.mode; in ccp_do_cmac_update() 165 rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; in ccp_do_cmac_update() 166 rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; in ccp_do_cmac_update() 173 rctx->cmd.u.aes.cmac_key_len = ctx->u.aes.kn_len; in ccp_do_cmac_update() 262 struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); in ccp_aes_cmac_setkey() local [all …]
|
D | ccp-crypto-aes.c | 28 struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm); in ccp_aes_complete() local 34 if (ctx->u.aes.mode != CCP_AES_MODE_ECB) in ccp_aes_complete() 43 struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm)); in ccp_aes_setkey() local 49 ctx->u.aes.type = CCP_AES_TYPE_128; in ccp_aes_setkey() 52 ctx->u.aes.type = CCP_AES_TYPE_192; in ccp_aes_setkey() 55 ctx->u.aes.type = CCP_AES_TYPE_256; in ccp_aes_setkey() 61 ctx->u.aes.mode = alg->mode; in ccp_aes_setkey() 62 ctx->u.aes.key_len = key_len; in ccp_aes_setkey() 64 memcpy(ctx->u.aes.key, key, key_len); in ccp_aes_setkey() 65 sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len); in ccp_aes_setkey() [all …]
|
/linux-4.4.14/drivers/net/ieee802154/ |
D | at86rf230.c | 122 struct at86rf230_state_change *ctx, 348 struct at86rf230_state_change *ctx = context; in at86rf230_async_error_recover() local 349 struct at86rf230_local *lp = ctx->lp; in at86rf230_async_error_recover() 352 at86rf230_async_state_change(lp, ctx, STATE_RX_AACK_ON, NULL); in at86rf230_async_error_recover() 354 if (ctx->free) in at86rf230_async_error_recover() 355 kfree(ctx); in at86rf230_async_error_recover() 360 struct at86rf230_state_change *ctx, int rc) in at86rf230_async_error() argument 364 at86rf230_async_state_change(lp, ctx, STATE_FORCE_TRX_OFF, in at86rf230_async_error() 371 struct at86rf230_state_change *ctx, in at86rf230_async_read_reg() argument 376 u8 *tx_buf = ctx->buf; in at86rf230_async_read_reg() [all …]
|
/linux-4.4.14/kernel/events/ |
D | core.c | 320 static void update_context_time(struct perf_event_context *ctx); 341 __get_cpu_context(struct perf_event_context *ctx) in __get_cpu_context() argument 343 return this_cpu_ptr(ctx->pmu->pmu_cpu_context); in __get_cpu_context() 347 struct perf_event_context *ctx) in perf_ctx_lock() argument 349 raw_spin_lock(&cpuctx->ctx.lock); in perf_ctx_lock() 350 if (ctx) in perf_ctx_lock() 351 raw_spin_lock(&ctx->lock); in perf_ctx_lock() 355 struct perf_event_context *ctx) in perf_ctx_unlock() argument 357 if (ctx) in perf_ctx_unlock() 358 raw_spin_unlock(&ctx->lock); in perf_ctx_unlock() [all …]
|
/linux-4.4.14/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 95 struct ppc_aes_ctx *ctx = crypto_tfm_ctx(tfm); in ppc_aes_setkey() local 106 ctx->rounds = 4; in ppc_aes_setkey() 107 ppc_expand_key_128(ctx->key_enc, in_key); in ppc_aes_setkey() 110 ctx->rounds = 5; in ppc_aes_setkey() 111 ppc_expand_key_192(ctx->key_enc, in_key); in ppc_aes_setkey() 114 ctx->rounds = 6; in ppc_aes_setkey() 115 ppc_expand_key_256(ctx->key_enc, in_key); in ppc_aes_setkey() 119 ppc_generate_decrypt_key(ctx->key_dec, ctx->key_enc, key_len); in ppc_aes_setkey() 127 struct ppc_xts_ctx *ctx = crypto_tfm_ctx(tfm); in ppc_xts_setkey() local 140 ctx->rounds = 4; in ppc_xts_setkey() [all …]
|
/linux-4.4.14/kernel/locking/ |
D | mutex.c | 119 DEBUG_LOCKS_WARN_ON(ww->ctx); in ww_mutex_lock_acquired() 158 struct ww_acquire_ctx *ctx) in ww_mutex_set_context_fastpath() argument 163 ww_mutex_lock_acquired(lock, ctx); in ww_mutex_set_context_fastpath() 165 lock->ctx = ctx; in ww_mutex_set_context_fastpath() 202 struct ww_acquire_ctx *ctx) in ww_mutex_set_context_slowpath() argument 206 ww_mutex_lock_acquired(lock, ctx); in ww_mutex_set_context_slowpath() 207 lock->ctx = ctx; in ww_mutex_set_context_slowpath() 337 if (READ_ONCE(ww->ctx)) in mutex_optimistic_spin() 459 if (lock->ctx) { in ww_mutex_unlock() 461 DEBUG_LOCKS_WARN_ON(!lock->ctx->acquired); in ww_mutex_unlock() [all …]
|
/linux-4.4.14/drivers/crypto/ux500/hash/ |
D | hash_core.c | 148 struct hash_ctx *ctx = data; in hash_dma_callback() local 150 complete(&ctx->device->dma.complete); in hash_dma_callback() 153 static int hash_set_dma_transfer(struct hash_ctx *ctx, struct scatterlist *sg, in hash_set_dma_transfer() argument 161 dev_err(ctx->device->dev, "%s: Invalid DMA direction\n", in hash_set_dma_transfer() 168 channel = ctx->device->dma.chan_mem2hash; in hash_set_dma_transfer() 169 ctx->device->dma.sg = sg; in hash_set_dma_transfer() 170 ctx->device->dma.sg_len = dma_map_sg(channel->device->dev, in hash_set_dma_transfer() 171 ctx->device->dma.sg, ctx->device->dma.nents, in hash_set_dma_transfer() 174 if (!ctx->device->dma.sg_len) { in hash_set_dma_transfer() 175 dev_err(ctx->device->dev, "%s: Could not map the sg list (TO_DEVICE)\n", in hash_set_dma_transfer() [all …]
|
/linux-4.4.14/drivers/crypto/nx/ |
D | nx-842.c | 115 struct nx842_crypto_ctx *ctx = crypto_tfm_ctx(tfm); in nx842_crypto_init() local 117 spin_lock_init(&ctx->lock); in nx842_crypto_init() 118 ctx->driver = driver; in nx842_crypto_init() 119 ctx->wmem = kmalloc(driver->workmem_size, GFP_KERNEL); in nx842_crypto_init() 120 ctx->sbounce = (u8 *)__get_free_pages(GFP_KERNEL, BOUNCE_BUFFER_ORDER); in nx842_crypto_init() 121 ctx->dbounce = (u8 *)__get_free_pages(GFP_KERNEL, BOUNCE_BUFFER_ORDER); in nx842_crypto_init() 122 if (!ctx->wmem || !ctx->sbounce || !ctx->dbounce) { in nx842_crypto_init() 123 kfree(ctx->wmem); in nx842_crypto_init() 124 free_page((unsigned long)ctx->sbounce); in nx842_crypto_init() 125 free_page((unsigned long)ctx->dbounce); in nx842_crypto_init() [all …]
|
/linux-4.4.14/drivers/crypto/vmx/ |
D | aes.c | 42 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); in p8_aes_init() local 63 ctx->fallback = fallback; in p8_aes_init() 70 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); in p8_aes_exit() local 72 if (ctx->fallback) { in p8_aes_exit() 73 crypto_free_cipher(ctx->fallback); in p8_aes_exit() 74 ctx->fallback = NULL; in p8_aes_exit() 82 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); in p8_aes_setkey() local 88 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); in p8_aes_setkey() 89 ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); in p8_aes_setkey() 93 ret += crypto_cipher_setkey(ctx->fallback, key, keylen); in p8_aes_setkey() [all …]
|
/linux-4.4.14/drivers/mtd/nand/ |
D | au1550nd.c | 200 struct au1550nd_ctx *ctx = container_of(mtd, struct au1550nd_ctx, info); in au1550_hwcontrol() local 206 this->IO_ADDR_W = ctx->base + MEM_STNAND_CMD; in au1550_hwcontrol() 210 this->IO_ADDR_W = ctx->base + MEM_STNAND_DATA; in au1550_hwcontrol() 214 this->IO_ADDR_W = ctx->base + MEM_STNAND_ADDR; in au1550_hwcontrol() 218 this->IO_ADDR_W = ctx->base + MEM_STNAND_DATA; in au1550_hwcontrol() 226 alchemy_wrsmem((1 << (4 + ctx->cs)), AU1000_MEM_STNDCTL); in au1550_hwcontrol() 270 struct au1550nd_ctx *ctx = container_of(mtd, struct au1550nd_ctx, info); in au1550_command() local 294 ctx->write_byte(mtd, readcmd); in au1550_command() 296 ctx->write_byte(mtd, command); in au1550_command() 310 ctx->write_byte(mtd, column); in au1550_command() [all …]
|
/linux-4.4.14/arch/arm64/crypto/ |
D | ghash-ce-glue.c | 41 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_init() local 43 *ctx = (struct ghash_desc_ctx){}; in ghash_init() 50 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_update() local 51 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_update() 53 ctx->count += len; in ghash_update() 62 memcpy(ctx->buf + partial, src, p); in ghash_update() 71 pmull_ghash_update(blocks, ctx->digest, src, key, in ghash_update() 72 partial ? ctx->buf : NULL); in ghash_update() 78 memcpy(ctx->buf + partial, src, len); in ghash_update() 84 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_final() local [all …]
|
/linux-4.4.14/tools/perf/bench/ |
D | sched-messaging.c | 83 static void *sender(struct sender_context *ctx) in sender() argument 88 ready(ctx->ready_out, ctx->wakefd); in sender() 92 for (j = 0; j < ctx->num_fds; j++) { in sender() 96 ret = write(ctx->out_fds[j], data + done, in sender() 111 static void *receiver(struct receiver_context* ctx) in receiver() argument 116 close(ctx->in_fds[1]); in receiver() 119 ready(ctx->ready_out, ctx->wakefd); in receiver() 122 for (i = 0; i < ctx->num_packets; i++) { in receiver() 127 ret = read(ctx->in_fds[0], data + done, DATASIZE - done); in receiver() 138 static pthread_t create_worker(void *ctx, void *(*func)(void *)) in create_worker() argument [all …]
|
/linux-4.4.14/drivers/infiniband/hw/mlx4/ |
D | mad.c | 99 __be64 mlx4_ib_get_new_demux_tid(struct mlx4_ib_demux_ctx *ctx) in mlx4_ib_get_new_demux_tid() argument 101 return cpu_to_be64(atomic_inc_return(&ctx->tid)) | in mlx4_ib_get_new_demux_tid() 1127 struct mlx4_ib_demux_pv_ctx *ctx = cq->cq_context; in mlx4_ib_tunnel_comp_handler() local 1128 struct mlx4_ib_dev *dev = to_mdev(ctx->ib_dev); in mlx4_ib_tunnel_comp_handler() 1130 if (!dev->sriov.is_going_down && ctx->state == DEMUX_PV_STATE_ACTIVE) in mlx4_ib_tunnel_comp_handler() 1131 queue_work(ctx->wq, &ctx->work); in mlx4_ib_tunnel_comp_handler() 1135 static int mlx4_ib_post_pv_qp_buf(struct mlx4_ib_demux_pv_ctx *ctx, in mlx4_ib_post_pv_qp_buf() argument 1148 sg_list.lkey = ctx->pd->local_dma_lkey; in mlx4_ib_post_pv_qp_buf() 1155 ib_dma_sync_single_for_device(ctx->ib_dev, tun_qp->ring[index].map, in mlx4_ib_post_pv_qp_buf() 1303 static void mlx4_ib_multiplex_mad(struct mlx4_ib_demux_pv_ctx *ctx, struct ib_wc *wc) in mlx4_ib_multiplex_mad() argument [all …]
|
/linux-4.4.14/net/netfilter/ |
D | nf_tables_api.c | 91 static void nft_ctx_init(struct nft_ctx *ctx, in nft_ctx_init() argument 100 ctx->net = net; in nft_ctx_init() 101 ctx->afi = afi; in nft_ctx_init() 102 ctx->table = table; in nft_ctx_init() 103 ctx->chain = chain; in nft_ctx_init() 104 ctx->nla = nla; in nft_ctx_init() 105 ctx->portid = NETLINK_CB(skb).portid; in nft_ctx_init() 106 ctx->report = nlmsg_report(nlh); in nft_ctx_init() 107 ctx->seq = nlh->nlmsg_seq; in nft_ctx_init() 110 static struct nft_trans *nft_trans_alloc(struct nft_ctx *ctx, int msg_type, in nft_trans_alloc() argument [all …]
|
/linux-4.4.14/arch/sparc/crypto/ |
D | aes_glue.c | 169 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm); in aes_set_key() local 174 ctx->expanded_key_length = 0xb0; in aes_set_key() 175 ctx->ops = &aes128_ops; in aes_set_key() 179 ctx->expanded_key_length = 0xd0; in aes_set_key() 180 ctx->ops = &aes192_ops; in aes_set_key() 184 ctx->expanded_key_length = 0xf0; in aes_set_key() 185 ctx->ops = &aes256_ops; in aes_set_key() 193 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len); in aes_set_key() 194 ctx->key_length = key_len; in aes_set_key() 201 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm); in aes_encrypt() local [all …]
|
/linux-4.4.14/drivers/media/firewire/ |
D | firedtv-fw.c | 84 static int queue_iso(struct fdtv_ir_context *ctx, int index) in queue_iso() argument 89 p.interrupt = !(++ctx->interrupt_packet & (IRQ_INTERVAL - 1)); in queue_iso() 93 return fw_iso_context_queue(ctx->context, &p, &ctx->buffer, in queue_iso() 101 struct fdtv_ir_context *ctx = fdtv->ir_context; in handle_iso() local 103 int length, err, i = ctx->current_packet; in handle_iso() 113 p = ctx->pages[i / PACKETS_PER_PAGE] in handle_iso() 121 err = queue_iso(ctx, i); in handle_iso() 127 fw_iso_context_queue_flush(ctx->context); in handle_iso() 128 ctx->current_packet = i; in handle_iso() 133 struct fdtv_ir_context *ctx; in fdtv_start_iso() local [all …]
|
/linux-4.4.14/arch/powerpc/mm/ |
D | mmu_context_hash32.c | 66 unsigned long ctx = next_mmu_context; in __init_new_context() local 68 while (test_and_set_bit(ctx, context_map)) { in __init_new_context() 69 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx); in __init_new_context() 70 if (ctx > LAST_CONTEXT) in __init_new_context() 71 ctx = 0; in __init_new_context() 73 next_mmu_context = (ctx + 1) & LAST_CONTEXT; in __init_new_context() 75 return ctx; in __init_new_context() 92 void __destroy_context(unsigned long ctx) in __destroy_context() argument 94 clear_bit(ctx, context_map); in __destroy_context()
|
/linux-4.4.14/arch/microblaze/include/asm/ |
D | mmu_context_mm.h | 26 # define CTX_TO_VSID(ctx, va) (((ctx) * (897 * 16) + ((va) >> 28) * 0x111) \ argument 83 mm_context_t ctx; in get_mmu_context() local 89 ctx = next_mmu_context; in get_mmu_context() 90 while (test_and_set_bit(ctx, context_map)) { in get_mmu_context() 91 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx); in get_mmu_context() 92 if (ctx > LAST_CONTEXT) in get_mmu_context() 93 ctx = 0; in get_mmu_context() 95 next_mmu_context = (ctx + 1) & LAST_CONTEXT; in get_mmu_context() 96 mm->context = ctx; in get_mmu_context() 97 context_mm[ctx] = mm; in get_mmu_context()
|