Home
last modified time | relevance | path

Searched refs:ctx (Results 1 – 200 of 899) sorted by relevance

12345

/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dctxnv50.c170 static void nv50_gr_construct_mmio(struct nvkm_grctx *ctx);
171 static void nv50_gr_construct_xfer1(struct nvkm_grctx *ctx);
172 static void nv50_gr_construct_xfer2(struct nvkm_grctx *ctx);
177 nv50_grctx_generate(struct nvkm_grctx *ctx) in nv50_grctx_generate() argument
179 cp_set (ctx, STATE, RUNNING); in nv50_grctx_generate()
180 cp_set (ctx, XFER_SWITCH, ENABLE); in nv50_grctx_generate()
182 cp_bra (ctx, AUTO_SAVE, PENDING, cp_setup_save); in nv50_grctx_generate()
183 cp_bra (ctx, USER_SAVE, PENDING, cp_setup_save); in nv50_grctx_generate()
185 cp_name(ctx, cp_check_load); in nv50_grctx_generate()
186 cp_bra (ctx, AUTO_LOAD, PENDING, cp_setup_auto_load); in nv50_grctx_generate()
[all …]
Dctxnv40.c160 nv40_gr_construct_general(struct nvkm_grctx *ctx) in nv40_gr_construct_general() argument
162 struct nvkm_device *device = ctx->device; in nv40_gr_construct_general()
165 cp_ctx(ctx, 0x4000a4, 1); in nv40_gr_construct_general()
166 gr_def(ctx, 0x4000a4, 0x00000008); in nv40_gr_construct_general()
167 cp_ctx(ctx, 0x400144, 58); in nv40_gr_construct_general()
168 gr_def(ctx, 0x400144, 0x00000001); in nv40_gr_construct_general()
169 cp_ctx(ctx, 0x400314, 1); in nv40_gr_construct_general()
170 gr_def(ctx, 0x400314, 0x00000000); in nv40_gr_construct_general()
171 cp_ctx(ctx, 0x400400, 10); in nv40_gr_construct_general()
172 cp_ctx(ctx, 0x400480, 10); in nv40_gr_construct_general()
[all …]
Dctxnv40.h23 cp_out(struct nvkm_grctx *ctx, u32 inst) in cp_out() argument
25 u32 *ctxprog = ctx->data; in cp_out()
27 if (ctx->mode != NVKM_GRCTX_PROG) in cp_out()
30 BUG_ON(ctx->ctxprog_len == ctx->ctxprog_max); in cp_out()
31 ctxprog[ctx->ctxprog_len++] = inst; in cp_out()
35 cp_lsr(struct nvkm_grctx *ctx, u32 val) in cp_lsr() argument
37 cp_out(ctx, CP_LOAD_SR | val); in cp_lsr()
41 cp_ctx(struct nvkm_grctx *ctx, u32 reg, u32 length) in cp_ctx() argument
43 ctx->ctxprog_reg = (reg - 0x00400000) >> 2; in cp_ctx()
45 ctx->ctxvals_base = ctx->ctxvals_pos; in cp_ctx()
[all …]
/linux-4.1.27/arch/powerpc/platforms/cell/spufs/
Dcontext.c38 struct spu_context *ctx; in alloc_spu_context() local
40 ctx = kzalloc(sizeof *ctx, GFP_KERNEL); in alloc_spu_context()
41 if (!ctx) in alloc_spu_context()
46 if (spu_init_csa(&ctx->csa)) in alloc_spu_context()
48 spin_lock_init(&ctx->mmio_lock); in alloc_spu_context()
49 mutex_init(&ctx->mapping_lock); in alloc_spu_context()
50 kref_init(&ctx->kref); in alloc_spu_context()
51 mutex_init(&ctx->state_mutex); in alloc_spu_context()
52 mutex_init(&ctx->run_mutex); in alloc_spu_context()
53 init_waitqueue_head(&ctx->ibox_wq); in alloc_spu_context()
[all …]
Drun.c16 struct spu_context *ctx = spu->ctx; in spufs_stop_callback() local
25 if (ctx) { in spufs_stop_callback()
29 ctx->csa.class_0_pending = spu->class_0_pending; in spufs_stop_callback()
30 ctx->csa.class_0_dar = spu->class_0_dar; in spufs_stop_callback()
33 ctx->csa.class_1_dsisr = spu->class_1_dsisr; in spufs_stop_callback()
34 ctx->csa.class_1_dar = spu->class_1_dar; in spufs_stop_callback()
44 wake_up_all(&ctx->stop_wq); in spufs_stop_callback()
48 int spu_stopped(struct spu_context *ctx, u32 *stat) in spu_stopped() argument
57 *stat = ctx->ops->status_read(ctx); in spu_stopped()
68 if (test_bit(SPU_SCHED_NOTIFY_ACTIVE, &ctx->sched_flags)) in spu_stopped()
[all …]
Dbacking_ops.c47 static void gen_spu_event(struct spu_context *ctx, u32 event) in gen_spu_event() argument
53 ch0_cnt = ctx->csa.spu_chnlcnt_RW[0]; in gen_spu_event()
54 ch0_data = ctx->csa.spu_chnldata_RW[0]; in gen_spu_event()
55 ch1_data = ctx->csa.spu_chnldata_RW[1]; in gen_spu_event()
56 ctx->csa.spu_chnldata_RW[0] |= event; in gen_spu_event()
58 ctx->csa.spu_chnlcnt_RW[0] = 1; in gen_spu_event()
62 static int spu_backing_mbox_read(struct spu_context *ctx, u32 * data) in spu_backing_mbox_read() argument
67 spin_lock(&ctx->csa.register_lock); in spu_backing_mbox_read()
68 mbox_stat = ctx->csa.prob.mb_stat_R; in spu_backing_mbox_read()
74 *data = ctx->csa.prob.pu_mb_R; in spu_backing_mbox_read()
[all …]
Dsched.c97 void spu_set_timeslice(struct spu_context *ctx) in spu_set_timeslice() argument
99 if (ctx->prio < NORMAL_PRIO) in spu_set_timeslice()
100 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE * 4, ctx->prio); in spu_set_timeslice()
102 ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE, ctx->prio); in spu_set_timeslice()
108 void __spu_update_sched_info(struct spu_context *ctx) in __spu_update_sched_info() argument
114 BUG_ON(!list_empty(&ctx->rq)); in __spu_update_sched_info()
121 ctx->tid = current->pid; in __spu_update_sched_info()
130 ctx->prio = current->prio; in __spu_update_sched_info()
132 ctx->prio = current->static_prio; in __spu_update_sched_info()
133 ctx->policy = current->policy; in __spu_update_sched_info()
[all …]
Dfile.c164 struct spu_context *ctx = i->i_ctx; in spufs_mem_open() local
166 mutex_lock(&ctx->mapping_lock); in spufs_mem_open()
167 file->private_data = ctx; in spufs_mem_open()
169 ctx->local_store = inode->i_mapping; in spufs_mem_open()
170 mutex_unlock(&ctx->mapping_lock); in spufs_mem_open()
178 struct spu_context *ctx = i->i_ctx; in spufs_mem_release() local
180 mutex_lock(&ctx->mapping_lock); in spufs_mem_release()
182 ctx->local_store = NULL; in spufs_mem_release()
183 mutex_unlock(&ctx->mapping_lock); in spufs_mem_release()
188 __spufs_mem_read(struct spu_context *ctx, char __user *buffer, in __spufs_mem_read() argument
[all …]
Dfault.c36 static void spufs_handle_event(struct spu_context *ctx, in spufs_handle_event() argument
41 if (ctx->flags & SPU_CREATE_EVENTS_ENABLED) { in spufs_handle_event()
42 ctx->event_return |= type; in spufs_handle_event()
43 wake_up_all(&ctx->stop_wq); in spufs_handle_event()
58 ctx->ops->restart_dma(ctx); in spufs_handle_event()
68 ctx->ops->npc_read(ctx) - 4; in spufs_handle_event()
77 int spufs_handle_class0(struct spu_context *ctx) in spufs_handle_class0() argument
79 unsigned long stat = ctx->csa.class_0_pending & CLASS0_INTR_MASK; in spufs_handle_class0()
85 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0()
89 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0()
[all …]
Dspufs.h188 int (*mbox_read) (struct spu_context * ctx, u32 * data);
189 u32(*mbox_stat_read) (struct spu_context * ctx);
190 unsigned int (*mbox_stat_poll)(struct spu_context *ctx,
192 int (*ibox_read) (struct spu_context * ctx, u32 * data);
193 int (*wbox_write) (struct spu_context * ctx, u32 data);
194 u32(*signal1_read) (struct spu_context * ctx);
195 void (*signal1_write) (struct spu_context * ctx, u32 data);
196 u32(*signal2_read) (struct spu_context * ctx);
197 void (*signal2_write) (struct spu_context * ctx, u32 data);
198 void (*signal1_type_set) (struct spu_context * ctx, u64 val);
[all …]
Dhw_ops.c37 static int spu_hw_mbox_read(struct spu_context *ctx, u32 * data) in spu_hw_mbox_read() argument
39 struct spu *spu = ctx->spu; in spu_hw_mbox_read()
54 static u32 spu_hw_mbox_stat_read(struct spu_context *ctx) in spu_hw_mbox_stat_read() argument
56 return in_be32(&ctx->spu->problem->mb_stat_R); in spu_hw_mbox_stat_read()
59 static unsigned int spu_hw_mbox_stat_poll(struct spu_context *ctx, in spu_hw_mbox_stat_poll() argument
62 struct spu *spu = ctx->spu; in spu_hw_mbox_stat_poll()
96 static int spu_hw_ibox_read(struct spu_context *ctx, u32 * data) in spu_hw_ibox_read() argument
98 struct spu *spu = ctx->spu; in spu_hw_ibox_read()
117 static int spu_hw_wbox_write(struct spu_context *ctx, u32 data) in spu_hw_wbox_write() argument
119 struct spu *spu = ctx->spu; in spu_hw_wbox_write()
[all …]
Dcoredump.c37 static ssize_t do_coredump_read(int num, struct spu_context *ctx, void *buffer, in do_coredump_read() argument
44 return spufs_coredump_read[num].read(ctx, buffer, size, off); in do_coredump_read()
46 data = spufs_coredump_read[num].get(ctx); in do_coredump_read()
53 static int spufs_ctx_note_size(struct spu_context *ctx, int dfd) in spufs_ctx_note_size() argument
75 struct spu_context *ctx; in match_context() local
78 ctx = SPUFS_I(file_inode(file))->i_ctx; in match_context()
79 if (ctx->flags & SPU_CREATE_NOSCHED) in match_context()
109 struct spu_context *ctx; in spufs_coredump_extra_notes_size() local
113 while ((ctx = coredump_next_context(&fd)) != NULL) { in spufs_coredump_extra_notes_size()
114 rc = spu_acquire_saved(ctx); in spufs_coredump_extra_notes_size()
[all …]
/linux-4.1.27/drivers/staging/skein/
Dskein_base.c23 int skein_256_init(struct skein_256_ctx *ctx, size_t hash_bit_len) in skein_256_init() argument
31 ctx->h.hash_bit_len = hash_bit_len; /* output hash bit count */ in skein_256_init()
35 memcpy(ctx->x, SKEIN_256_IV_256, sizeof(ctx->x)); in skein_256_init()
38 memcpy(ctx->x, SKEIN_256_IV_224, sizeof(ctx->x)); in skein_256_init()
41 memcpy(ctx->x, SKEIN_256_IV_160, sizeof(ctx->x)); in skein_256_init()
44 memcpy(ctx->x, SKEIN_256_IV_128, sizeof(ctx->x)); in skein_256_init()
53 skein_start_new_type(ctx, CFG_FINAL); in skein_256_init()
65 memset(ctx->x, 0, sizeof(ctx->x)); in skein_256_init()
66 skein_256_process_block(ctx, cfg.b, 1, SKEIN_CFG_STR_LEN); in skein_256_init()
71 skein_start_new_type(ctx, MSG); /* T0=0, T1= MSG type */ in skein_256_init()
[all …]
Dskein_api.c30 int skein_ctx_prepare(struct skein_ctx *ctx, enum skein_size size) in skein_ctx_prepare() argument
32 skein_assert_ret(ctx && size, SKEIN_FAIL); in skein_ctx_prepare()
34 memset(ctx, 0, sizeof(struct skein_ctx)); in skein_ctx_prepare()
35 ctx->skein_size = size; in skein_ctx_prepare()
40 int skein_init(struct skein_ctx *ctx, size_t hash_bit_len) in skein_init() argument
47 skein_assert_ret(ctx, SKEIN_FAIL); in skein_init()
53 x = ctx->m.s256.x; in skein_init()
54 x_len = ctx->skein_size/8; in skein_init()
59 switch (ctx->skein_size) { in skein_init()
61 ret = skein_256_init_ext(&ctx->m.s256, hash_bit_len, in skein_init()
[all …]
Dskein_block.c36 #define debug_save_tweak(ctx) \ argument
38 ctx->h.tweak[0] = ts[0]; \
39 ctx->h.tweak[1] = ts[1]; \
42 #define debug_save_tweak(ctx) argument
345 void skein_256_process_block(struct skein_256_ctx *ctx, const u8 *blk_ptr, in skein_256_process_block() argument
370 ts[0] = ctx->h.tweak[0]; in skein_256_process_block()
371 ts[1] = ctx->h.tweak[1]; in skein_256_process_block()
380 ks[0] = ctx->x[0]; in skein_256_process_block()
381 ks[1] = ctx->x[1]; in skein_256_process_block()
382 ks[2] = ctx->x[2]; in skein_256_process_block()
[all …]
/linux-4.1.27/arch/mips/net/
Dbpf_jit.c154 static inline void emit_jit_reg_move(ptr dst, ptr src, struct jit_ctx *ctx);
157 #define emit_instr(ctx, func, ...) \ argument
159 if ((ctx)->target != NULL) { \
160 u32 *p = &(ctx)->target[ctx->idx]; \
163 (ctx)->idx++; \
170 #define emit_long_instr(ctx, func, ...) \ argument
172 if ((ctx)->target != NULL) { \
173 u32 *p = &(ctx)->target[ctx->idx]; \
176 (ctx)->idx++; \
186 unsigned int src2, struct jit_ctx *ctx) in emit_addu() argument
[all …]
/linux-4.1.27/arch/arm/net/
Dbpf_jit_32.c116 static inline void _emit(int cond, u32 inst, struct jit_ctx *ctx) in _emit() argument
121 if (ctx->target != NULL) in _emit()
122 ctx->target[ctx->idx] = inst; in _emit()
124 ctx->idx++; in _emit()
130 static inline void emit(u32 inst, struct jit_ctx *ctx) in emit() argument
132 _emit(ARM_COND_AL, inst, ctx); in emit()
135 static u16 saved_regs(struct jit_ctx *ctx) in saved_regs() argument
139 if ((ctx->skf->len > 1) || in saved_regs()
140 (ctx->skf->insns[0].code == (BPF_RET | BPF_A))) in saved_regs()
146 if (ctx->seen & SEEN_CALL) in saved_regs()
[all …]
/linux-4.1.27/drivers/gpu/drm/panel/
Dpanel-s6e8aa0.c128 static int s6e8aa0_clear_error(struct s6e8aa0 *ctx) in s6e8aa0_clear_error() argument
130 int ret = ctx->error; in s6e8aa0_clear_error()
132 ctx->error = 0; in s6e8aa0_clear_error()
136 static void s6e8aa0_dcs_write(struct s6e8aa0 *ctx, const void *data, size_t len) in s6e8aa0_dcs_write() argument
138 struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev); in s6e8aa0_dcs_write()
141 if (ctx->error < 0) in s6e8aa0_dcs_write()
146 dev_err(ctx->dev, "error %zd writing dcs seq: %*ph\n", ret, in s6e8aa0_dcs_write()
148 ctx->error = ret; in s6e8aa0_dcs_write()
152 static int s6e8aa0_dcs_read(struct s6e8aa0 *ctx, u8 cmd, void *data, size_t len) in s6e8aa0_dcs_read() argument
154 struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev); in s6e8aa0_dcs_read()
[all …]
Dpanel-ld9040.c118 static int ld9040_clear_error(struct ld9040 *ctx) in ld9040_clear_error() argument
120 int ret = ctx->error; in ld9040_clear_error()
122 ctx->error = 0; in ld9040_clear_error()
126 static int ld9040_spi_write_word(struct ld9040 *ctx, u16 data) in ld9040_spi_write_word() argument
128 struct spi_device *spi = to_spi_device(ctx->dev); in ld9040_spi_write_word()
141 static void ld9040_dcs_write(struct ld9040 *ctx, const u8 *data, size_t len) in ld9040_dcs_write() argument
145 if (ctx->error < 0 || len == 0) in ld9040_dcs_write()
148 dev_dbg(ctx->dev, "writing dcs seq: %*ph\n", (int)len, data); in ld9040_dcs_write()
149 ret = ld9040_spi_write_word(ctx, *data); in ld9040_dcs_write()
153 ret = ld9040_spi_write_word(ctx, *data | 0x100); in ld9040_dcs_write()
[all …]
/linux-4.1.27/drivers/misc/cxl/
Dcontext.c37 int cxl_context_init(struct cxl_context *ctx, struct cxl_afu *afu, bool master, in cxl_context_init() argument
42 spin_lock_init(&ctx->sste_lock); in cxl_context_init()
43 ctx->afu = afu; in cxl_context_init()
44 ctx->master = master; in cxl_context_init()
45 ctx->pid = NULL; /* Set in start work ioctl */ in cxl_context_init()
46 mutex_init(&ctx->mapping_lock); in cxl_context_init()
47 ctx->mapping = mapping; in cxl_context_init()
56 i = cxl_alloc_sst(ctx); in cxl_context_init()
60 INIT_WORK(&ctx->fault_work, cxl_handle_fault); in cxl_context_init()
62 init_waitqueue_head(&ctx->wq); in cxl_context_init()
[all …]
Dfault.c35 static struct cxl_sste* find_free_sste(struct cxl_context *ctx, in find_free_sste() argument
39 unsigned int mask = (ctx->sst_size >> 7) - 1; /* SSTP0[SegTableSize] */ in find_free_sste()
48 primary = ctx->sstp + (hash << 3); in find_free_sste()
60 ret = primary + ctx->sst_lru; in find_free_sste()
61 ctx->sst_lru = (ctx->sst_lru + 1) & 0x7; in find_free_sste()
66 static void cxl_load_segment(struct cxl_context *ctx, struct copro_slb *slb) in cxl_load_segment() argument
72 spin_lock_irqsave(&ctx->sste_lock, flags); in cxl_load_segment()
73 sste = find_free_sste(ctx, slb); in cxl_load_segment()
78 sste - ctx->sstp, slb->vsid, slb->esid); in cxl_load_segment()
79 trace_cxl_ste_write(ctx, sste - ctx->sstp, slb->esid, slb->vsid); in cxl_load_segment()
[all …]
Dnative.c269 static void slb_invalid(struct cxl_context *ctx) in slb_invalid() argument
271 struct cxl *adapter = ctx->afu->adapter; in slb_invalid()
274 WARN_ON(!mutex_is_locked(&ctx->afu->spa_mutex)); in slb_invalid()
277 ((u64)be32_to_cpu(ctx->elem->common.pid) << 32) | in slb_invalid()
278 be32_to_cpu(ctx->elem->lpid)); in slb_invalid()
289 static int do_process_element_cmd(struct cxl_context *ctx, in do_process_element_cmd() argument
296 trace_cxl_llcmd(ctx, cmd); in do_process_element_cmd()
298 WARN_ON(!ctx->afu->enabled); in do_process_element_cmd()
300 ctx->elem->software_state = cpu_to_be32(pe_state); in do_process_element_cmd()
302 *(ctx->afu->sw_command_status) = cpu_to_be64(cmd | 0 | ctx->pe); in do_process_element_cmd()
[all …]
Dirq.c23 static irqreturn_t handle_psl_slice_error(struct cxl_context *ctx, u64 dsisr, u64 errstat) in handle_psl_slice_error() argument
27 fir1 = cxl_p1_read(ctx->afu->adapter, CXL_PSL_FIR1); in handle_psl_slice_error()
28 fir2 = cxl_p1_read(ctx->afu->adapter, CXL_PSL_FIR2); in handle_psl_slice_error()
29 fir_slice = cxl_p1n_read(ctx->afu, CXL_PSL_FIR_SLICE_An); in handle_psl_slice_error()
30 serr = cxl_p1n_read(ctx->afu, CXL_PSL_SERR_An); in handle_psl_slice_error()
31 afu_debug = cxl_p1n_read(ctx->afu, CXL_AFU_DEBUG_An); in handle_psl_slice_error()
33 dev_crit(&ctx->afu->dev, "PSL ERROR STATUS: 0x%.16llx\n", errstat); in handle_psl_slice_error()
34 dev_crit(&ctx->afu->dev, "PSL_FIR1: 0x%.16llx\n", fir1); in handle_psl_slice_error()
35 dev_crit(&ctx->afu->dev, "PSL_FIR2: 0x%.16llx\n", fir2); in handle_psl_slice_error()
36 dev_crit(&ctx->afu->dev, "PSL_SERR_An: 0x%.16llx\n", serr); in handle_psl_slice_error()
[all …]
Dfile.c52 struct cxl_context *ctx; in __afu_open() local
76 if (!(ctx = cxl_context_alloc())) { in __afu_open()
81 if ((rc = cxl_context_init(ctx, afu, master, inode->i_mapping))) in __afu_open()
84 pr_devel("afu_open pe: %i\n", ctx->pe); in __afu_open()
85 file->private_data = ctx; in __afu_open()
111 struct cxl_context *ctx = file->private_data; in afu_release() local
114 __func__, ctx->pe); in afu_release()
115 cxl_context_detach(ctx); in afu_release()
117 mutex_lock(&ctx->mapping_lock); in afu_release()
118 ctx->mapping = NULL; in afu_release()
[all …]
Dtrace.h59 TP_PROTO(struct cxl_context *ctx),
61 TP_ARGS(ctx),
70 __entry->card = ctx->afu->adapter->adapter_num;
71 __entry->afu = ctx->afu->slice;
72 __entry->pe = ctx->pe;
84 TP_PROTO(struct cxl_context *ctx, u64 wed, s16 num_interrupts, u64 amr),
86 TP_ARGS(ctx, wed, num_interrupts, amr),
99 __entry->card = ctx->afu->adapter->adapter_num;
100 __entry->afu = ctx->afu->slice;
101 __entry->pe = ctx->pe;
[all …]
Dmain.c35 static inline void _cxl_slbia(struct cxl_context *ctx, struct mm_struct *mm) in _cxl_slbia() argument
39 if (!(task = get_pid_task(ctx->pid, PIDTYPE_PID))) { in _cxl_slbia()
41 __func__, pid_nr(ctx->pid)); in _cxl_slbia()
49 ctx->afu->adapter->adapter_num, ctx->afu->slice, ctx->pe); in _cxl_slbia()
51 spin_lock_irqsave(&ctx->sste_lock, flags); in _cxl_slbia()
52 trace_cxl_slbia(ctx); in _cxl_slbia()
53 memset(ctx->sstp, 0, ctx->sst_size); in _cxl_slbia()
54 spin_unlock_irqrestore(&ctx->sste_lock, flags); in _cxl_slbia()
56 cxl_afu_slbia(ctx->afu); in _cxl_slbia()
65 struct cxl_context *ctx; in cxl_slbia_core() local
[all …]
/linux-4.1.27/fs/
Dtimerfd.c49 static inline bool isalarm(struct timerfd_ctx *ctx) in isalarm() argument
51 return ctx->clockid == CLOCK_REALTIME_ALARM || in isalarm()
52 ctx->clockid == CLOCK_BOOTTIME_ALARM; in isalarm()
60 static void timerfd_triggered(struct timerfd_ctx *ctx) in timerfd_triggered() argument
64 spin_lock_irqsave(&ctx->wqh.lock, flags); in timerfd_triggered()
65 ctx->expired = 1; in timerfd_triggered()
66 ctx->ticks++; in timerfd_triggered()
67 wake_up_locked(&ctx->wqh); in timerfd_triggered()
68 spin_unlock_irqrestore(&ctx->wqh.lock, flags); in timerfd_triggered()
73 struct timerfd_ctx *ctx = container_of(htmr, struct timerfd_ctx, in timerfd_tmrproc() local
[all …]
Deventfd.c54 __u64 eventfd_signal(struct eventfd_ctx *ctx, __u64 n) in eventfd_signal() argument
58 spin_lock_irqsave(&ctx->wqh.lock, flags); in eventfd_signal()
59 if (ULLONG_MAX - ctx->count < n) in eventfd_signal()
60 n = ULLONG_MAX - ctx->count; in eventfd_signal()
61 ctx->count += n; in eventfd_signal()
62 if (waitqueue_active(&ctx->wqh)) in eventfd_signal()
63 wake_up_locked_poll(&ctx->wqh, POLLIN); in eventfd_signal()
64 spin_unlock_irqrestore(&ctx->wqh.lock, flags); in eventfd_signal()
70 static void eventfd_free_ctx(struct eventfd_ctx *ctx) in eventfd_free_ctx() argument
72 kfree(ctx); in eventfd_free_ctx()
[all …]
Daio.c205 static struct file *aio_private_file(struct kioctx *ctx, loff_t nr_pages) in aio_private_file() argument
215 inode->i_mapping->private_data = ctx; in aio_private_file()
269 static void put_aio_ring_file(struct kioctx *ctx) in put_aio_ring_file() argument
271 struct file *aio_ring_file = ctx->aio_ring_file; in put_aio_ring_file()
278 ctx->aio_ring_file = NULL; in put_aio_ring_file()
285 static void aio_free_ring(struct kioctx *ctx) in aio_free_ring() argument
292 put_aio_ring_file(ctx); in aio_free_ring()
294 for (i = 0; i < ctx->nr_pages; i++) { in aio_free_ring()
297 page_count(ctx->ring_pages[i])); in aio_free_ring()
298 page = ctx->ring_pages[i]; in aio_free_ring()
[all …]
/linux-4.1.27/sound/soc/intel/atom/sst/
Dsst.c200 struct intel_sst_drv *ctx = container_of(work, in sst_process_pending_msg() local
203 ctx->ops->post_message(ctx, NULL, false); in sst_process_pending_msg()
206 static int sst_workqueue_init(struct intel_sst_drv *ctx) in sst_workqueue_init() argument
208 INIT_LIST_HEAD(&ctx->memcpy_list); in sst_workqueue_init()
209 INIT_LIST_HEAD(&ctx->rx_list); in sst_workqueue_init()
210 INIT_LIST_HEAD(&ctx->ipc_dispatch_list); in sst_workqueue_init()
211 INIT_LIST_HEAD(&ctx->block_list); in sst_workqueue_init()
212 INIT_WORK(&ctx->ipc_post_msg_wq, sst_process_pending_msg); in sst_workqueue_init()
213 init_waitqueue_head(&ctx->wait_queue); in sst_workqueue_init()
215 ctx->post_msg_wq = in sst_workqueue_init()
[all …]
Dsst_acpi.c143 static int sst_platform_get_resources(struct intel_sst_drv *ctx) in sst_platform_get_resources() argument
146 struct platform_device *pdev = to_platform_device(ctx->dev); in sst_platform_get_resources()
151 ctx->pdata->res_info->acpi_lpe_res_index); in sst_platform_get_resources()
153 dev_err(ctx->dev, "Invalid SHIM base from IFWI"); in sst_platform_get_resources()
156 dev_info(ctx->dev, "LPE base: %#x size:%#x", (unsigned int) rsrc->start, in sst_platform_get_resources()
159 ctx->iram_base = rsrc->start + ctx->pdata->res_info->iram_offset; in sst_platform_get_resources()
160 ctx->iram_end = ctx->iram_base + ctx->pdata->res_info->iram_size - 1; in sst_platform_get_resources()
161 dev_info(ctx->dev, "IRAM base: %#x", ctx->iram_base); in sst_platform_get_resources()
162 ctx->iram = devm_ioremap_nocache(ctx->dev, ctx->iram_base, in sst_platform_get_resources()
163 ctx->pdata->res_info->iram_size); in sst_platform_get_resources()
[all …]
Dsst_drv_interface.c51 int free_stream_context(struct intel_sst_drv *ctx, unsigned int str_id) in free_stream_context() argument
56 stream = get_stream_info(ctx, str_id); in free_stream_context()
59 ret = sst_free_stream(ctx, str_id); in free_stream_context()
61 sst_clean_stream(&ctx->streams[str_id]); in free_stream_context()
64 dev_err(ctx->dev, "we tried to free stream context %d which was freed!!!\n", str_id); in free_stream_context()
69 int sst_get_stream_allocated(struct intel_sst_drv *ctx, in sst_get_stream_allocated() argument
75 retval = ctx->ops->alloc_stream(ctx, str_param); in sst_get_stream_allocated()
77 dev_dbg(ctx->dev, "Stream allocated %d\n", retval); in sst_get_stream_allocated()
125 int sst_get_stream(struct intel_sst_drv *ctx, in sst_get_stream() argument
132 retval = ctx->ops->alloc_stream(ctx, str_param); in sst_get_stream()
[all …]
Dsst_pci.c33 static int sst_platform_get_resources(struct intel_sst_drv *ctx) in sst_platform_get_resources() argument
36 struct pci_dev *pci = ctx->pci; in sst_platform_get_resources()
44 if (ctx->dev_id == SST_MRFLD_PCI_ID) { in sst_platform_get_resources()
45 ctx->ddr_base = pci_resource_start(pci, 0); in sst_platform_get_resources()
47 ddr_base = relocate_imr_addr_mrfld(ctx->ddr_base); in sst_platform_get_resources()
48 if (!ctx->pdata->lib_info) { in sst_platform_get_resources()
49 dev_err(ctx->dev, "lib_info pointer NULL\n"); in sst_platform_get_resources()
53 if (ddr_base != ctx->pdata->lib_info->mod_base) { in sst_platform_get_resources()
54 dev_err(ctx->dev, in sst_platform_get_resources()
59 ctx->ddr_end = pci_resource_end(pci, 0); in sst_platform_get_resources()
[all …]
/linux-4.1.27/crypto/asymmetric_keys/
Dx509_cert_parser.c71 struct x509_parse_context *ctx; in x509_cert_parse() local
82 ctx = kzalloc(sizeof(struct x509_parse_context), GFP_KERNEL); in x509_cert_parse()
83 if (!ctx) in x509_cert_parse()
86 ctx->cert = cert; in x509_cert_parse()
87 ctx->data = (unsigned long)data; in x509_cert_parse()
90 ret = asn1_ber_decoder(&x509_decoder, ctx, data, datalen); in x509_cert_parse()
95 if (ctx->raw_akid) { in x509_cert_parse()
97 ctx->raw_akid_size, ctx->raw_akid_size, ctx->raw_akid); in x509_cert_parse()
98 ret = asn1_ber_decoder(&x509_akid_decoder, ctx, in x509_cert_parse()
99 ctx->raw_akid, ctx->raw_akid_size); in x509_cert_parse()
[all …]
Dpkcs7_parser.c88 struct pkcs7_parse_context *ctx; in pkcs7_parse_message() local
92 ctx = kzalloc(sizeof(struct pkcs7_parse_context), GFP_KERNEL); in pkcs7_parse_message()
93 if (!ctx) in pkcs7_parse_message()
95 ctx->msg = kzalloc(sizeof(struct pkcs7_message), GFP_KERNEL); in pkcs7_parse_message()
96 if (!ctx->msg) in pkcs7_parse_message()
98 ctx->sinfo = kzalloc(sizeof(struct pkcs7_signed_info), GFP_KERNEL); in pkcs7_parse_message()
99 if (!ctx->sinfo) in pkcs7_parse_message()
102 ctx->data = (unsigned long)data; in pkcs7_parse_message()
103 ctx->ppcerts = &ctx->certs; in pkcs7_parse_message()
104 ctx->ppsinfo = &ctx->msg->signed_infos; in pkcs7_parse_message()
[all …]
Dverify_pefile.c27 struct pefile_context *ctx) in pefile_parse_binary() argument
64 ctx->image_checksum_offset = in pefile_parse_binary()
66 ctx->header_size = pe32->header_size; in pefile_parse_binary()
68 ctx->n_data_dirents = pe32->data_dirs; in pefile_parse_binary()
73 ctx->image_checksum_offset = in pefile_parse_binary()
75 ctx->header_size = pe64->header_size; in pefile_parse_binary()
77 ctx->n_data_dirents = pe64->data_dirs; in pefile_parse_binary()
85 pr_debug("checksum @ %x\n", ctx->image_checksum_offset); in pefile_parse_binary()
86 pr_debug("header size = %x\n", ctx->header_size); in pefile_parse_binary()
88 if (cursor >= ctx->header_size || ctx->header_size >= datalen) in pefile_parse_binary()
[all …]
Dmscode_parser.c24 int mscode_parse(struct pefile_context *ctx) in mscode_parse() argument
30 ret = pkcs7_get_content_data(ctx->pkcs7, &content_data, &data_len, 1); in mscode_parse()
40 return asn1_ber_decoder(&mscode_decoder, ctx, content_data, data_len); in mscode_parse()
82 struct pefile_context *ctx = context; in mscode_note_digest_algo() local
89 ctx->digest_algo = HASH_ALGO_MD4; in mscode_note_digest_algo()
92 ctx->digest_algo = HASH_ALGO_MD5; in mscode_note_digest_algo()
95 ctx->digest_algo = HASH_ALGO_SHA1; in mscode_note_digest_algo()
98 ctx->digest_algo = HASH_ALGO_SHA256; in mscode_note_digest_algo()
121 struct pefile_context *ctx = context; in mscode_note_digest() local
123 ctx->digest = value; in mscode_note_digest()
[all …]
/linux-4.1.27/drivers/gpu/drm/exynos/
Dexynos7_drm_decon.c75 struct decon_context *ctx = crtc->ctx; in decon_wait_for_vblank() local
77 if (ctx->suspended) in decon_wait_for_vblank()
80 atomic_set(&ctx->wait_vsync_event, 1); in decon_wait_for_vblank()
86 if (!wait_event_timeout(ctx->wait_vsync_queue, in decon_wait_for_vblank()
87 !atomic_read(&ctx->wait_vsync_event), in decon_wait_for_vblank()
92 static void decon_clear_channel(struct decon_context *ctx) in decon_clear_channel() argument
100 u32 val = readl(ctx->regs + WINCON(win)); in decon_clear_channel()
104 writel(val, ctx->regs + WINCON(win)); in decon_clear_channel()
111 unsigned int state = ctx->suspended; in decon_clear_channel()
113 ctx->suspended = 0; in decon_clear_channel()
[all …]
Dexynos_drm_fimd.c201 struct fimd_context *ctx = crtc->ctx; in fimd_wait_for_vblank() local
203 if (ctx->suspended) in fimd_wait_for_vblank()
206 atomic_set(&ctx->wait_vsync_event, 1); in fimd_wait_for_vblank()
212 if (!wait_event_timeout(ctx->wait_vsync_queue, in fimd_wait_for_vblank()
213 !atomic_read(&ctx->wait_vsync_event), in fimd_wait_for_vblank()
218 static void fimd_enable_video_output(struct fimd_context *ctx, unsigned int win, in fimd_enable_video_output() argument
221 u32 val = readl(ctx->regs + WINCON(win)); in fimd_enable_video_output()
228 writel(val, ctx->regs + WINCON(win)); in fimd_enable_video_output()
231 static void fimd_enable_shadow_channel_path(struct fimd_context *ctx, in fimd_enable_shadow_channel_path() argument
235 u32 val = readl(ctx->regs + SHADOWCON); in fimd_enable_shadow_channel_path()
[all …]
Dexynos_drm_vidi.c89 struct vidi_context *ctx = crtc->ctx; in vidi_enable_vblank() local
91 if (ctx->suspended) in vidi_enable_vblank()
94 if (!test_and_set_bit(0, &ctx->irq_flags)) in vidi_enable_vblank()
95 ctx->vblank_on = true; in vidi_enable_vblank()
97 ctx->direct_vblank = true; in vidi_enable_vblank()
104 schedule_work(&ctx->work); in vidi_enable_vblank()
111 struct vidi_context *ctx = crtc->ctx; in vidi_disable_vblank() local
113 if (ctx->suspended) in vidi_disable_vblank()
116 if (test_and_clear_bit(0, &ctx->irq_flags)) in vidi_disable_vblank()
117 ctx->vblank_on = false; in vidi_disable_vblank()
[all …]
Dexynos_drm_fimc.c172 static u32 fimc_read(struct fimc_context *ctx, u32 reg) in fimc_read() argument
174 return readl(ctx->regs + reg); in fimc_read()
177 static void fimc_write(struct fimc_context *ctx, u32 val, u32 reg) in fimc_write() argument
179 writel(val, ctx->regs + reg); in fimc_write()
182 static void fimc_set_bits(struct fimc_context *ctx, u32 reg, u32 bits) in fimc_set_bits() argument
184 void __iomem *r = ctx->regs + reg; in fimc_set_bits()
189 static void fimc_clear_bits(struct fimc_context *ctx, u32 reg, u32 bits) in fimc_clear_bits() argument
191 void __iomem *r = ctx->regs + reg; in fimc_clear_bits()
196 static void fimc_sw_reset(struct fimc_context *ctx) in fimc_sw_reset() argument
201 cfg = fimc_read(ctx, EXYNOS_CISTATUS); in fimc_sw_reset()
[all …]
Dexynos_drm_dpi.c47 struct exynos_dpi *ctx = connector_to_dpi(connector); in exynos_dpi_detect() local
49 if (ctx->panel && !ctx->panel->connector) in exynos_dpi_detect()
50 drm_panel_attach(ctx->panel, &ctx->connector); in exynos_dpi_detect()
70 struct exynos_dpi *ctx = connector_to_dpi(connector); in exynos_dpi_get_modes() local
73 if (ctx->vm) { in exynos_dpi_get_modes()
81 drm_display_mode_from_videomode(ctx->vm, mode); in exynos_dpi_get_modes()
87 if (ctx->panel) in exynos_dpi_get_modes()
88 return ctx->panel->funcs->get_modes(ctx->panel); in exynos_dpi_get_modes()
96 struct exynos_dpi *ctx = connector_to_dpi(connector); in exynos_dpi_best_encoder() local
98 return ctx->encoder; in exynos_dpi_best_encoder()
[all …]
Dexynos_mixer.c167 static void mixer_regs_dump(struct mixer_context *ctx) in mixer_regs_dump() argument
172 (u32)readl(ctx->mixer_res.mixer_regs + reg_id)); \ in mixer_regs_dump()
199 static void vp_regs_dump(struct mixer_context *ctx) in vp_regs_dump() argument
204 (u32) readl(ctx->mixer_res.vp_regs + reg_id)); \ in vp_regs_dump()
256 static void mixer_vsync_set_update(struct mixer_context *ctx, bool enable) in mixer_vsync_set_update() argument
258 struct mixer_resources *res = &ctx->mixer_res; in mixer_vsync_set_update()
264 if (ctx->vp_enabled) in mixer_vsync_set_update()
269 static void mixer_cfg_scan(struct mixer_context *ctx, unsigned int height) in mixer_cfg_scan() argument
271 struct mixer_resources *res = &ctx->mixer_res; in mixer_cfg_scan()
275 val = (ctx->interlace ? MXR_CFG_SCAN_INTERLACE : in mixer_cfg_scan()
[all …]
Dexynos_drm_gsc.c83 #define gsc_read(offset) readl(ctx->regs + (offset))
84 #define gsc_write(cfg, offset) writel(cfg, ctx->regs + (offset))
398 static int gsc_sw_reset(struct gsc_context *ctx) in gsc_sw_reset() argument
438 static void gsc_set_gscblk_fimd_wb(struct gsc_context *ctx, bool enable) in gsc_set_gscblk_fimd_wb() argument
445 gscblk_cfg |= GSC_BLK_DISP1WB_DEST(ctx->id) | in gsc_set_gscblk_fimd_wb()
446 GSC_BLK_GSCL_WB_IN_SRC_SEL(ctx->id) | in gsc_set_gscblk_fimd_wb()
447 GSC_BLK_SW_RESET_WB_DEST(ctx->id); in gsc_set_gscblk_fimd_wb()
449 gscblk_cfg |= GSC_BLK_PXLASYNC_LO_MASK_WB(ctx->id); in gsc_set_gscblk_fimd_wb()
454 static void gsc_handle_irq(struct gsc_context *ctx, bool enable, in gsc_handle_irq() argument
486 struct gsc_context *ctx = get_gsc_context(dev); in gsc_src_set_fmt() local
[all …]
/linux-4.1.27/drivers/media/platform/coda/
Dcoda-bit.c43 static void coda_free_bitstream_buffer(struct coda_ctx *ctx);
66 static void coda_command_async(struct coda_ctx *ctx, int cmd) in coda_command_async() argument
68 struct coda_dev *dev = ctx->dev; in coda_command_async()
73 coda_write(dev, ctx->bit_stream_param, in coda_command_async()
75 coda_write(dev, ctx->frm_dis_flg, in coda_command_async()
76 CODA_REG_BIT_FRM_DIS_FLG(ctx->reg_idx)); in coda_command_async()
77 coda_write(dev, ctx->frame_mem_ctrl, in coda_command_async()
79 coda_write(dev, ctx->workbuf.paddr, CODA_REG_BIT_WORK_BUF_ADDR); in coda_command_async()
89 coda_write(dev, ctx->idx, CODA_REG_BIT_RUN_INDEX); in coda_command_async()
90 coda_write(dev, ctx->params.codec_mode, CODA_REG_BIT_RUN_COD_STD); in coda_command_async()
[all …]
Dtrace.h15 TP_PROTO(struct coda_ctx *ctx, int cmd),
17 TP_ARGS(ctx, cmd),
21 __field(int, ctx)
26 __entry->minor = ctx->fh.vdev->minor;
27 __entry->ctx = ctx->idx;
32 __entry->minor, __entry->ctx, __entry->cmd)
36 TP_PROTO(struct coda_ctx *ctx),
38 TP_ARGS(ctx),
42 __field(int, ctx)
46 __entry->minor = ctx->fh.vdev->minor;
[all …]
Dcoda-common.c86 void coda_write_base(struct coda_ctx *ctx, struct coda_q_data *q_data, in coda_write_base() argument
109 coda_write(ctx->dev, base_y, reg_y); in coda_write_base()
110 coda_write(ctx->dev, base_cb, reg_y + 4); in coda_write_base()
111 coda_write(ctx->dev, base_cr, reg_y + 8); in coda_write_base()
381 struct coda_ctx *ctx = fh_to_ctx(priv); in coda_querycap() local
384 strlcpy(cap->card, coda_product_name(ctx->dev->devtype->product), in coda_querycap()
424 struct coda_ctx *ctx = fh_to_ctx(priv); in coda_g_fmt() local
426 q_data = get_q_data(ctx, f->type); in coda_g_fmt()
440 f->fmt.pix.colorspace = ctx->colorspace; in coda_g_fmt()
445 static int coda_try_pixelformat(struct coda_ctx *ctx, struct v4l2_format *f) in coda_try_pixelformat() argument
[all …]
Dcoda-jpeg.c151 int coda_jpeg_write_tables(struct coda_ctx *ctx) in coda_jpeg_write_tables() argument
165 { 512, ctx->params.jpeg_qmat_tab[0], 64 }, in coda_jpeg_write_tables()
166 { 576, ctx->params.jpeg_qmat_tab[1], 64 }, in coda_jpeg_write_tables()
167 { 640, ctx->params.jpeg_qmat_tab[1], 64 }, in coda_jpeg_write_tables()
172 coda_memcpy_parabuf(ctx->parabuf.vaddr, huff + i); in coda_jpeg_write_tables()
176 coda_memcpy_parabuf(ctx->parabuf.vaddr, qmat + i); in coda_jpeg_write_tables()
181 bool coda_jpeg_check_buffer(struct coda_ctx *ctx, struct vb2_buffer *vb) in coda_jpeg_check_buffer() argument
210 void coda_set_jpeg_compression_quality(struct coda_ctx *ctx, int quality) in coda_set_jpeg_compression_quality() argument
214 ctx->params.jpeg_quality = quality; in coda_set_jpeg_compression_quality()
231 if (ctx->params.jpeg_qmat_tab[0]) { in coda_set_jpeg_compression_quality()
[all …]
/linux-4.1.27/drivers/media/platform/s5p-mfc/
Ds5p_mfc_opr_v5.c37 static int s5p_mfc_alloc_dec_temp_buffers_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_dec_temp_buffers_v5() argument
39 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_dec_temp_buffers_v5()
43 ctx->dsc.size = buf_size->dsc; in s5p_mfc_alloc_dec_temp_buffers_v5()
44 ret = s5p_mfc_alloc_priv_buf(dev->mem_dev_l, &ctx->dsc); in s5p_mfc_alloc_dec_temp_buffers_v5()
50 BUG_ON(ctx->dsc.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1)); in s5p_mfc_alloc_dec_temp_buffers_v5()
51 memset(ctx->dsc.virt, 0, ctx->dsc.size); in s5p_mfc_alloc_dec_temp_buffers_v5()
58 static void s5p_mfc_release_dec_desc_buffer_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_release_dec_desc_buffer_v5() argument
60 s5p_mfc_release_priv_buf(ctx->dev->mem_dev_l, &ctx->dsc); in s5p_mfc_release_dec_desc_buffer_v5()
64 static int s5p_mfc_alloc_codec_buffers_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_codec_buffers_v5() argument
66 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_codec_buffers_v5()
[all …]
Ds5p_mfc_dec.c224 static int s5p_mfc_ctx_ready(struct s5p_mfc_ctx *ctx) in s5p_mfc_ctx_ready() argument
227 if (ctx->src_queue_cnt >= 1 && ctx->state == MFCINST_GOT_INST) in s5p_mfc_ctx_ready()
230 if (ctx->src_queue_cnt >= 1 && in s5p_mfc_ctx_ready()
231 ctx->state == MFCINST_RUNNING && in s5p_mfc_ctx_ready()
232 ctx->dst_queue_cnt >= ctx->pb_count) in s5p_mfc_ctx_ready()
235 if (ctx->state == MFCINST_FINISHING && in s5p_mfc_ctx_ready()
236 ctx->dst_queue_cnt >= ctx->pb_count) in s5p_mfc_ctx_ready()
239 if (ctx->src_queue_cnt >= 1 && in s5p_mfc_ctx_ready()
240 ctx->state == MFCINST_HEAD_PARSED && in s5p_mfc_ctx_ready()
241 ctx->capture_state == QUEUE_BUFS_MMAPED) in s5p_mfc_ctx_ready()
[all …]
Ds5p_mfc_opr_v6.c49 static int s5p_mfc_alloc_dec_temp_buffers_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_dec_temp_buffers_v6() argument
57 static void s5p_mfc_release_dec_desc_buffer_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_release_dec_desc_buffer_v6() argument
63 static int s5p_mfc_alloc_codec_buffers_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_codec_buffers_v6() argument
65 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_codec_buffers_v6()
69 mb_width = MB_WIDTH(ctx->img_width); in s5p_mfc_alloc_codec_buffers_v6()
70 mb_height = MB_HEIGHT(ctx->img_height); in s5p_mfc_alloc_codec_buffers_v6()
72 if (ctx->type == MFCINST_DECODER) { in s5p_mfc_alloc_codec_buffers_v6()
74 ctx->luma_size, ctx->chroma_size, ctx->mv_size); in s5p_mfc_alloc_codec_buffers_v6()
75 mfc_debug(2, "Totals bufs: %d\n", ctx->total_dpb_count); in s5p_mfc_alloc_codec_buffers_v6()
76 } else if (ctx->type == MFCINST_ENCODER) { in s5p_mfc_alloc_codec_buffers_v6()
[all …]
Ds5p_mfc.c47 void clear_work_bit(struct s5p_mfc_ctx *ctx) in clear_work_bit() argument
49 struct s5p_mfc_dev *dev = ctx->dev; in clear_work_bit()
52 __clear_bit(ctx->num, &dev->ctx_work_bits); in clear_work_bit()
57 void set_work_bit(struct s5p_mfc_ctx *ctx) in set_work_bit() argument
59 struct s5p_mfc_dev *dev = ctx->dev; in set_work_bit()
62 __set_bit(ctx->num, &dev->ctx_work_bits); in set_work_bit()
67 void clear_work_bit_irqsave(struct s5p_mfc_ctx *ctx) in clear_work_bit_irqsave() argument
69 struct s5p_mfc_dev *dev = ctx->dev; in clear_work_bit_irqsave()
73 __clear_bit(ctx->num, &dev->ctx_work_bits); in clear_work_bit_irqsave()
78 void set_work_bit_irqsave(struct s5p_mfc_ctx *ctx) in set_work_bit_irqsave() argument
[all …]
Ds5p_mfc_enc.c727 static int s5p_mfc_ctx_ready(struct s5p_mfc_ctx *ctx) in s5p_mfc_ctx_ready() argument
730 ctx->src_queue_cnt, ctx->dst_queue_cnt, ctx->state); in s5p_mfc_ctx_ready()
732 if (ctx->state == MFCINST_GOT_INST && ctx->dst_queue_cnt >= 1) in s5p_mfc_ctx_ready()
735 if ((ctx->state == MFCINST_RUNNING || in s5p_mfc_ctx_ready()
736 ctx->state == MFCINST_HEAD_PRODUCED) && in s5p_mfc_ctx_ready()
737 ctx->src_queue_cnt >= 1 && ctx->dst_queue_cnt >= 1) in s5p_mfc_ctx_ready()
740 if (ctx->state == MFCINST_FINISHING && in s5p_mfc_ctx_ready()
741 ctx->dst_queue_cnt >= 1) in s5p_mfc_ctx_ready()
747 static void cleanup_ref_queue(struct s5p_mfc_ctx *ctx) in cleanup_ref_queue() argument
752 while (!list_empty(&ctx->ref_queue)) { in cleanup_ref_queue()
[all …]
Ds5p_mfc_intr.c54 int s5p_mfc_wait_for_done_ctx(struct s5p_mfc_ctx *ctx, in s5p_mfc_wait_for_done_ctx() argument
60 ret = wait_event_interruptible_timeout(ctx->queue, in s5p_mfc_wait_for_done_ctx()
61 (ctx->int_cond && (ctx->int_type == command in s5p_mfc_wait_for_done_ctx()
62 || ctx->int_type == S5P_MFC_R2H_CMD_ERR_RET)), in s5p_mfc_wait_for_done_ctx()
65 ret = wait_event_timeout(ctx->queue, in s5p_mfc_wait_for_done_ctx()
66 (ctx->int_cond && (ctx->int_type == command in s5p_mfc_wait_for_done_ctx()
67 || ctx->int_type == S5P_MFC_R2H_CMD_ERR_RET)), in s5p_mfc_wait_for_done_ctx()
72 ctx->int_type, command); in s5p_mfc_wait_for_done_ctx()
79 ctx->int_type, command); in s5p_mfc_wait_for_done_ctx()
80 if (ctx->int_type == S5P_MFC_R2H_CMD_ERR_RET) in s5p_mfc_wait_for_done_ctx()
[all …]
Ds5p_mfc_cmd_v5.c75 static int s5p_mfc_open_inst_cmd_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_open_inst_cmd_v5() argument
77 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_open_inst_cmd_v5()
82 mfc_debug(2, "Getting instance number (codec: %d)\n", ctx->codec_mode); in s5p_mfc_open_inst_cmd_v5()
83 dev->curr_ctx = ctx->num; in s5p_mfc_open_inst_cmd_v5()
85 switch (ctx->codec_mode) { in s5p_mfc_open_inst_cmd_v5()
117 h2r_args.arg[2] = ctx->ctx.ofs; in s5p_mfc_open_inst_cmd_v5()
118 h2r_args.arg[3] = ctx->ctx.size; in s5p_mfc_open_inst_cmd_v5()
123 ctx->state = MFCINST_ERROR; in s5p_mfc_open_inst_cmd_v5()
128 static int s5p_mfc_close_inst_cmd_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_close_inst_cmd_v5() argument
130 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_close_inst_cmd_v5()
[all …]
Ds5p_mfc_cmd_v6.c67 static int s5p_mfc_open_inst_cmd_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_open_inst_cmd_v6() argument
69 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_open_inst_cmd_v6()
73 mfc_debug(2, "Requested codec mode: %d\n", ctx->codec_mode); in s5p_mfc_open_inst_cmd_v6()
74 dev->curr_ctx = ctx->num; in s5p_mfc_open_inst_cmd_v6()
75 switch (ctx->codec_mode) { in s5p_mfc_open_inst_cmd_v6()
119 mfc_write(dev, ctx->ctx.dma, S5P_FIMV_CONTEXT_MEM_ADDR_V6); in s5p_mfc_open_inst_cmd_v6()
120 mfc_write(dev, ctx->ctx.size, S5P_FIMV_CONTEXT_MEM_SIZE_V6); in s5p_mfc_open_inst_cmd_v6()
128 static int s5p_mfc_close_inst_cmd_v6(struct s5p_mfc_ctx *ctx) in s5p_mfc_close_inst_cmd_v6() argument
130 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_close_inst_cmd_v6()
134 dev->curr_ctx = ctx->num; in s5p_mfc_close_inst_cmd_v6()
[all …]
/linux-4.1.27/drivers/hwmon/
Dpwm-fan.c41 static int __set_pwm(struct pwm_fan_ctx *ctx, unsigned long pwm) in __set_pwm() argument
46 mutex_lock(&ctx->lock); in __set_pwm()
47 if (ctx->pwm_value == pwm) in __set_pwm()
50 duty = DIV_ROUND_UP(pwm * (ctx->pwm->period - 1), MAX_PWM); in __set_pwm()
51 ret = pwm_config(ctx->pwm, duty, ctx->pwm->period); in __set_pwm()
56 pwm_disable(ctx->pwm); in __set_pwm()
58 if (ctx->pwm_value == 0) { in __set_pwm()
59 ret = pwm_enable(ctx->pwm); in __set_pwm()
64 ctx->pwm_value = pwm; in __set_pwm()
66 mutex_unlock(&ctx->lock); in __set_pwm()
[all …]
/linux-4.1.27/drivers/char/hw_random/
Dxgene-rng.c105 struct xgene_rng_dev *ctx = (struct xgene_rng_dev *) arg; in xgene_rng_expired_timer() local
108 disable_irq(ctx->irq); in xgene_rng_expired_timer()
109 ctx->failure_cnt = 0; in xgene_rng_expired_timer()
110 del_timer(&ctx->failure_timer); in xgene_rng_expired_timer()
111 enable_irq(ctx->irq); in xgene_rng_expired_timer()
114 static void xgene_rng_start_timer(struct xgene_rng_dev *ctx) in xgene_rng_start_timer() argument
116 ctx->failure_timer.data = (unsigned long) ctx; in xgene_rng_start_timer()
117 ctx->failure_timer.function = xgene_rng_expired_timer; in xgene_rng_start_timer()
118 ctx->failure_timer.expires = jiffies + 120 * HZ; in xgene_rng_start_timer()
119 add_timer(&ctx->failure_timer); in xgene_rng_start_timer()
[all …]
/linux-4.1.27/drivers/gpu/drm/
Ddrm_modeset_lock.c74 struct drm_modeset_acquire_ctx *ctx; in __drm_modeset_lock_all() local
77 ctx = kzalloc(sizeof(*ctx), in __drm_modeset_lock_all()
79 if (!ctx) in __drm_modeset_lock_all()
89 drm_modeset_acquire_init(ctx, 0); in __drm_modeset_lock_all()
90 ctx->trylock_only = trylock; in __drm_modeset_lock_all()
93 ret = drm_modeset_lock(&config->connection_mutex, ctx); in __drm_modeset_lock_all()
96 ret = drm_modeset_lock_all_crtcs(dev, ctx); in __drm_modeset_lock_all()
105 config->acquire_ctx = ctx; in __drm_modeset_lock_all()
113 drm_modeset_backoff(ctx); in __drm_modeset_lock_all()
144 struct drm_modeset_acquire_ctx *ctx = config->acquire_ctx; in drm_modeset_unlock_all() local
[all …]
Ddrm_context.c305 struct drm_ctx ctx; in drm_legacy_resctx() local
309 memset(&ctx, 0, sizeof(ctx)); in drm_legacy_resctx()
311 ctx.handle = i; in drm_legacy_resctx()
312 if (copy_to_user(&res->contexts[i], &ctx, sizeof(ctx))) in drm_legacy_resctx()
336 struct drm_ctx *ctx = data; in drm_legacy_addctx() local
338 ctx->handle = drm_legacy_ctxbitmap_next(dev); in drm_legacy_addctx()
339 if (ctx->handle == DRM_KERNEL_CONTEXT) { in drm_legacy_addctx()
341 ctx->handle = drm_legacy_ctxbitmap_next(dev); in drm_legacy_addctx()
343 DRM_DEBUG("%d\n", ctx->handle); in drm_legacy_addctx()
344 if (ctx->handle == -1) { in drm_legacy_addctx()
[all …]
/linux-4.1.27/drivers/base/regmap/
Dregmap-mmio.c92 struct regmap_mmio_context *ctx = context; in regmap_mmio_gather_write() local
98 if (!IS_ERR(ctx->clk)) { in regmap_mmio_gather_write()
99 ret = clk_enable(ctx->clk); in regmap_mmio_gather_write()
107 switch (ctx->val_bytes) { in regmap_mmio_gather_write()
109 writeb(*(u8 *)val, ctx->regs + offset); in regmap_mmio_gather_write()
112 writew(*(u16 *)val, ctx->regs + offset); in regmap_mmio_gather_write()
115 writel(*(u32 *)val, ctx->regs + offset); in regmap_mmio_gather_write()
119 writeq(*(u64 *)val, ctx->regs + offset); in regmap_mmio_gather_write()
126 val_size -= ctx->val_bytes; in regmap_mmio_gather_write()
127 val += ctx->val_bytes; in regmap_mmio_gather_write()
[all …]
/linux-4.1.27/drivers/crypto/
Datmel-sha.c158 static size_t atmel_sha_append_sg(struct atmel_sha_reqctx *ctx) in atmel_sha_append_sg() argument
162 while ((ctx->bufcnt < ctx->buflen) && ctx->total) { in atmel_sha_append_sg()
163 count = min(ctx->sg->length - ctx->offset, ctx->total); in atmel_sha_append_sg()
164 count = min(count, ctx->buflen - ctx->bufcnt); in atmel_sha_append_sg()
173 if ((ctx->sg->length == 0) && !sg_is_last(ctx->sg)) { in atmel_sha_append_sg()
174 ctx->sg = sg_next(ctx->sg); in atmel_sha_append_sg()
181 scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, ctx->sg, in atmel_sha_append_sg()
182 ctx->offset, count, 0); in atmel_sha_append_sg()
184 ctx->bufcnt += count; in atmel_sha_append_sg()
185 ctx->offset += count; in atmel_sha_append_sg()
[all …]
Dbfin_crc.c152 struct bfin_crypto_crc_reqctx *ctx = ahash_request_ctx(req); in bfin_crypto_crc_init() local
155 dev_dbg(ctx->crc->dev, "crc_init\n"); in bfin_crypto_crc_init()
164 dev_dbg(ctx->crc->dev, "init: requested sg list is too big > %d\n", in bfin_crypto_crc_init()
169 ctx->crc = crc; in bfin_crypto_crc_init()
170 ctx->bufnext_len = 0; in bfin_crypto_crc_init()
171 ctx->buflast_len = 0; in bfin_crypto_crc_init()
172 ctx->sg_buflen = 0; in bfin_crypto_crc_init()
173 ctx->total = 0; in bfin_crypto_crc_init()
174 ctx->flag = 0; in bfin_crypto_crc_init()
179 dev_dbg(ctx->crc->dev, "init: digest size: %d\n", in bfin_crypto_crc_init()
[all …]
Dimg-hash.c165 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_start() local
168 if (ctx->flags & DRIVER_FLAGS_MD5) in img_hash_start()
170 else if (ctx->flags & DRIVER_FLAGS_SHA1) in img_hash_start()
172 else if (ctx->flags & DRIVER_FLAGS_SHA224) in img_hash_start()
174 else if (ctx->flags & DRIVER_FLAGS_SHA256) in img_hash_start()
211 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_dma_callback() local
213 if (ctx->bufcnt) { in img_hash_dma_callback()
214 img_hash_xmit_cpu(hdev, ctx->buffer, ctx->bufcnt, 0); in img_hash_dma_callback()
215 ctx->bufcnt = 0; in img_hash_dma_callback()
217 if (ctx->sg) in img_hash_dma_callback()
[all …]
Domap-sham.c281 struct omap_sham_reqctx *ctx = ahash_request_ctx(req); in omap_sham_copy_hash_omap2() local
282 struct omap_sham_dev *dd = ctx->dd; in omap_sham_copy_hash_omap2()
283 u32 *hash = (u32 *)ctx->digest; in omap_sham_copy_hash_omap2()
296 struct omap_sham_reqctx *ctx = ahash_request_ctx(req); in omap_sham_copy_hash_omap4() local
297 struct omap_sham_dev *dd = ctx->dd; in omap_sham_copy_hash_omap4()
300 if (ctx->flags & BIT(FLAGS_HMAC)) { in omap_sham_copy_hash_omap4()
321 struct omap_sham_reqctx *ctx = ahash_request_ctx(req); in omap_sham_copy_ready_hash() local
322 u32 *in = (u32 *)ctx->digest; in omap_sham_copy_ready_hash()
329 switch (ctx->flags & FLAGS_MODE_MASK) { in omap_sham_copy_ready_hash()
335 if (test_bit(FLAGS_BE32_SHA1, &ctx->dd->flags)) in omap_sham_copy_ready_hash()
[all …]
Dpadlock-aes.c84 static inline struct aes_ctx *aes_ctx_common(void *ctx) in aes_ctx_common() argument
86 unsigned long addr = (unsigned long)ctx; in aes_ctx_common()
107 struct aes_ctx *ctx = aes_ctx(tfm); in aes_set_key() local
123 ctx->D = ctx->E; in aes_set_key()
125 ctx->E[0] = le32_to_cpu(key[0]); in aes_set_key()
126 ctx->E[1] = le32_to_cpu(key[1]); in aes_set_key()
127 ctx->E[2] = le32_to_cpu(key[2]); in aes_set_key()
128 ctx->E[3] = le32_to_cpu(key[3]); in aes_set_key()
131 memset(&ctx->cword, 0, sizeof(ctx->cword)); in aes_set_key()
133 ctx->cword.decrypt.encdec = 1; in aes_set_key()
[all …]
Dmv_cesa.c169 static void compute_aes_dec_key(struct mv_ctx *ctx) in compute_aes_dec_key() argument
174 if (!ctx->need_calc_aes_dkey) in compute_aes_dec_key()
177 crypto_aes_expand_key(&gen_aes_key, ctx->aes_enc_key, ctx->key_len); in compute_aes_dec_key()
179 key_pos = ctx->key_len + 24; in compute_aes_dec_key()
180 memcpy(ctx->aes_dec_key, &gen_aes_key.key_enc[key_pos], 4 * 4); in compute_aes_dec_key()
181 switch (ctx->key_len) { in compute_aes_dec_key()
187 memcpy(&ctx->aes_dec_key[4], &gen_aes_key.key_enc[key_pos], in compute_aes_dec_key()
191 ctx->need_calc_aes_dkey = 0; in compute_aes_dec_key()
198 struct mv_ctx *ctx = crypto_tfm_ctx(tfm); in mv_setkey_aes() local
209 ctx->key_len = len; in mv_setkey_aes()
[all …]
/linux-4.1.27/arch/arm64/net/
Dbpf_jit_comp.c68 static inline void emit(const u32 insn, struct jit_ctx *ctx) in emit() argument
70 if (ctx->image != NULL) in emit()
71 ctx->image[ctx->idx] = cpu_to_le32(insn); in emit()
73 ctx->idx++; in emit()
77 struct jit_ctx *ctx) in emit_a64_mov_i64() argument
82 emit(A64_MOVZ(1, reg, tmp & 0xffff, shift), ctx); in emit_a64_mov_i64()
87 emit(A64_MOVK(1, reg, tmp & 0xffff, shift), ctx); in emit_a64_mov_i64()
94 const s32 val, struct jit_ctx *ctx) in emit_a64_mov_i() argument
101 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx); in emit_a64_mov_i()
103 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx); in emit_a64_mov_i()
[all …]
/linux-4.1.27/drivers/net/wireless/iwlwifi/dvm/
Drxon.c38 struct iwl_rxon_context *ctx) in iwl_connection_init_rx_config() argument
40 memset(&ctx->staging, 0, sizeof(ctx->staging)); in iwl_connection_init_rx_config()
42 if (!ctx->vif) { in iwl_connection_init_rx_config()
43 ctx->staging.dev_type = ctx->unused_devtype; in iwl_connection_init_rx_config()
45 switch (ctx->vif->type) { in iwl_connection_init_rx_config()
47 ctx->staging.dev_type = ctx->ap_devtype; in iwl_connection_init_rx_config()
51 ctx->staging.dev_type = ctx->station_devtype; in iwl_connection_init_rx_config()
52 ctx->staging.filter_flags = RXON_FILTER_ACCEPT_GRP_MSK; in iwl_connection_init_rx_config()
56 ctx->staging.dev_type = ctx->ibss_devtype; in iwl_connection_init_rx_config()
57 ctx->staging.flags = RXON_FLG_SHORT_PREAMBLE_MSK; in iwl_connection_init_rx_config()
[all …]
Dmac80211.c102 struct iwl_rxon_context *ctx; in iwlagn_mac_setup_register() local
143 for_each_context(priv, ctx) { in iwlagn_mac_setup_register()
144 hw->wiphy->interface_modes |= ctx->interface_modes; in iwlagn_mac_setup_register()
145 hw->wiphy->interface_modes |= ctx->exclusive_interface_modes; in iwlagn_mac_setup_register()
234 struct iwl_rxon_context *ctx; in __iwl_up() local
244 for_each_context(priv, ctx) { in __iwl_up()
245 ret = iwlagn_alloc_bcast_station(priv, ctx); in __iwl_up()
359 struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS]; in iwlagn_mac_suspend() local
369 if (!ctx->vif || ctx->vif->type != NL80211_IFTYPE_STATION || in iwlagn_mac_suspend()
370 !iwl_is_associated_ctx(ctx)) { in iwlagn_mac_suspend()
[all …]
/linux-4.1.27/drivers/media/platform/exynos-gsc/
Dgsc-m2m.c31 static int gsc_m2m_ctx_stop_req(struct gsc_ctx *ctx) in gsc_m2m_ctx_stop_req() argument
34 struct gsc_dev *gsc = ctx->gsc_dev; in gsc_m2m_ctx_stop_req()
38 if (!gsc_m2m_pending(gsc) || (curr_ctx != ctx)) in gsc_m2m_ctx_stop_req()
41 gsc_ctx_state_lock_set(GSC_CTX_STOP_REQ, ctx); in gsc_m2m_ctx_stop_req()
43 !gsc_ctx_state_is_set(GSC_CTX_STOP_REQ, ctx), in gsc_m2m_ctx_stop_req()
49 static void __gsc_m2m_job_abort(struct gsc_ctx *ctx) in __gsc_m2m_job_abort() argument
53 ret = gsc_m2m_ctx_stop_req(ctx); in __gsc_m2m_job_abort()
54 if ((ret == -ETIMEDOUT) || (ctx->state & GSC_CTX_ABORT)) { in __gsc_m2m_job_abort()
55 gsc_ctx_state_lock_clear(GSC_CTX_STOP_REQ | GSC_CTX_ABORT, ctx); in __gsc_m2m_job_abort()
56 gsc_m2m_job_finish(ctx, VB2_BUF_STATE_ERROR); in __gsc_m2m_job_abort()
[all …]
Dgsc-core.h221 struct gsc_ctx *ctx; member
388 void gsc_m2m_job_finish(struct gsc_ctx *ctx, int vb_state);
394 int gsc_try_fmt_mplane(struct gsc_ctx *ctx, struct v4l2_format *f);
396 int gsc_g_fmt_mplane(struct gsc_ctx *ctx, struct v4l2_format *f);
398 int gsc_g_crop(struct gsc_ctx *ctx, struct v4l2_crop *cr);
399 int gsc_try_crop(struct gsc_ctx *ctx, struct v4l2_crop *cr);
408 int gsc_set_scaler_info(struct gsc_ctx *ctx);
409 int gsc_ctrls_create(struct gsc_ctx *ctx);
410 void gsc_ctrls_delete(struct gsc_ctx *ctx);
411 int gsc_prepare_addr(struct gsc_ctx *ctx, struct vb2_buffer *vb,
[all …]
Dgsc-regs.c111 void gsc_hw_set_input_path(struct gsc_ctx *ctx) in gsc_hw_set_input_path() argument
113 struct gsc_dev *dev = ctx->gsc_dev; in gsc_hw_set_input_path()
118 if (ctx->in_path == GSC_DMA) in gsc_hw_set_input_path()
124 void gsc_hw_set_in_size(struct gsc_ctx *ctx) in gsc_hw_set_in_size() argument
126 struct gsc_dev *dev = ctx->gsc_dev; in gsc_hw_set_in_size()
127 struct gsc_frame *frame = &ctx->s_frame; in gsc_hw_set_in_size()
146 void gsc_hw_set_in_image_rgb(struct gsc_ctx *ctx) in gsc_hw_set_in_image_rgb() argument
148 struct gsc_dev *dev = ctx->gsc_dev; in gsc_hw_set_in_image_rgb()
149 struct gsc_frame *frame = &ctx->s_frame; in gsc_hw_set_in_image_rgb()
166 void gsc_hw_set_in_image_format(struct gsc_ctx *ctx) in gsc_hw_set_in_image_format() argument
[all …]
/linux-4.1.27/drivers/gpu/drm/radeon/
Datom.c56 struct atom_context *ctx; member
66 static int atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params);
67 int atom_execute_table(struct atom_context *ctx, int index, uint32_t * params);
102 static uint32_t atom_iio_execute(struct atom_context *ctx, int base, in atom_iio_execute() argument
105 struct radeon_device *rdev = ctx->card->dev->dev_private; in atom_iio_execute()
114 temp = ctx->card->ioreg_read(ctx->card, CU16(base + 1)); in atom_iio_execute()
119 (void)ctx->card->ioreg_read(ctx->card, CU16(base + 1)); in atom_iio_execute()
120 ctx->card->ioreg_write(ctx->card, CU16(base + 1), temp); in atom_iio_execute()
160 ((ctx-> in atom_iio_execute()
177 static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr, in atom_get_src_int() argument
[all …]
Datom-bits.h32 #define U8(ptr) get_u8(ctx->ctx->bios, (ptr))
33 #define CU8(ptr) get_u8(ctx->bios, (ptr))
38 #define U16(ptr) get_u16(ctx->ctx->bios, (ptr))
39 #define CU16(ptr) get_u16(ctx->bios, (ptr))
44 #define U32(ptr) get_u32(ctx->ctx->bios, (ptr))
45 #define CU32(ptr) get_u32(ctx->bios, (ptr))
46 #define CSTR(ptr) (((char *)(ctx->bios))+(ptr))
/linux-4.1.27/net/sunrpc/auth_gss/
Dgss_krb5_mech.c220 struct krb5_ctx *ctx, struct crypto_blkcipher **res) in get_key() argument
248 *res = crypto_alloc_blkcipher(ctx->gk5e->encrypt_name, 0, in get_key()
252 "crypto algorithm %s\n", ctx->gk5e->encrypt_name); in get_key()
258 "crypto algorithm %s\n", ctx->gk5e->encrypt_name); in get_key()
275 gss_import_v1_context(const void *p, const void *end, struct krb5_ctx *ctx) in gss_import_v1_context() argument
279 p = simple_get_bytes(p, end, &ctx->initiate, sizeof(ctx->initiate)); in gss_import_v1_context()
284 ctx->enctype = ENCTYPE_DES_CBC_RAW; in gss_import_v1_context()
286 ctx->gk5e = get_gss_krb5_enctype(ctx->enctype); in gss_import_v1_context()
287 if (ctx->gk5e == NULL) { in gss_import_v1_context()
315 p = simple_get_bytes(p, end, &ctx->endtime, sizeof(ctx->endtime)); in gss_import_v1_context()
[all …]
Dgss_krb5_seal.c74 setup_token(struct krb5_ctx *ctx, struct xdr_netobj *token) in setup_token() argument
78 int body_size = GSS_KRB5_TOK_HDR_LEN + ctx->gk5e->cksumlength; in setup_token()
80 token->len = g_token_size(&ctx->mech_used, body_size); in setup_token()
83 g_make_token_header(&ctx->mech_used, body_size, (unsigned char **)&ptr); in setup_token()
92 *ptr++ = (__force u16)cpu_to_le16(ctx->gk5e->signalg); in setup_token()
100 setup_token_v2(struct krb5_ctx *ctx, struct xdr_netobj *token) in setup_token_v2() argument
106 if ((ctx->flags & KRB5_CTX_FLAG_INITIATOR) == 0) in setup_token_v2()
108 if (ctx->flags & KRB5_CTX_FLAG_ACCEPTOR_SUBKEY) in setup_token_v2()
123 token->len = GSS_KRB5_TOK_HDR_LEN + ctx->gk5e->cksumlength; in setup_token_v2()
128 gss_get_mic_v1(struct krb5_ctx *ctx, struct xdr_buf *text, in gss_get_mic_v1() argument
[all …]
Dgss_krb5_unseal.c74 gss_verify_mic_v1(struct krb5_ctx *ctx, in gss_verify_mic_v1() argument
91 if (g_verify_token_header(&ctx->mech_used, &bodysize, &ptr, in gss_verify_mic_v1()
102 if (signalg != ctx->gk5e->signalg) in gss_verify_mic_v1()
112 if (ctx->gk5e->keyed_cksum) in gss_verify_mic_v1()
113 cksumkey = ctx->cksum; in gss_verify_mic_v1()
117 if (make_checksum(ctx, ptr, 8, message_buffer, 0, in gss_verify_mic_v1()
122 ctx->gk5e->cksumlength)) in gss_verify_mic_v1()
129 if (now > ctx->endtime) in gss_verify_mic_v1()
134 if (krb5_get_seq_num(ctx, ptr + GSS_KRB5_TOK_HDR_LEN, ptr + 8, in gss_verify_mic_v1()
138 if ((ctx->initiate && direction != 0xff) || in gss_verify_mic_v1()
[all …]
/linux-4.1.27/arch/mips/pci/
Dpci-alchemy.c91 static void alchemy_pci_wired_entry(struct alchemy_pci_context *ctx) in alchemy_pci_wired_entry() argument
93 ctx->wired_entry = read_c0_wired(); in alchemy_pci_wired_entry()
94 add_wired_entry(0, 0, (unsigned long)ctx->pci_cfg_vm->addr, PM_4K); in alchemy_pci_wired_entry()
95 ctx->last_elo0 = ctx->last_elo1 = ~0; in alchemy_pci_wired_entry()
101 struct alchemy_pci_context *ctx = bus->sysdata; in config_access() local
113 r = __raw_readl(ctx->regs + PCI_REG_STATCMD) & 0x0000ffff; in config_access()
115 __raw_writel(r, ctx->regs + PCI_REG_STATCMD); in config_access()
121 if (ctx->board_pci_idsel(device, 1) == 0) { in config_access()
146 if ((entryLo0 != ctx->last_elo0) || (entryLo1 != ctx->last_elo1)) { in config_access()
147 mod_wired_entry(ctx->wired_entry, entryLo0, entryLo1, in config_access()
[all …]
/linux-4.1.27/drivers/net/usb/
Dcdc_ncm.c65 static void cdc_ncm_tx_timeout_start(struct cdc_ncm_ctx *ctx);
107 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_get_ethtool_stats() local
112 p = (char *)ctx + cdc_ncm_gstrings_stats[i].stat_offset; in cdc_ncm_get_ethtool_stats()
149 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_check_rx_max() local
154 max = min_t(u32, CDC_NCM_NTB_MAX_SIZE_RX, le32_to_cpu(ctx->ncm_parm.dwNtbInMaxSize)); in cdc_ncm_check_rx_max()
159 le32_to_cpu(ctx->ncm_parm.dwNtbInMaxSize), min); in cdc_ncm_check_rx_max()
172 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_check_tx_max() local
176 min = ctx->max_datagram_size + ctx->max_ndp_size + sizeof(struct usb_cdc_ncm_nth16); in cdc_ncm_check_tx_max()
177 max = min_t(u32, CDC_NCM_NTB_MAX_SIZE_TX, le32_to_cpu(ctx->ncm_parm.dwNtbOutMaxSize)); in cdc_ncm_check_tx_max()
192 struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; in cdc_ncm_show_min_tx_pkt() local
[all …]
/linux-4.1.27/crypto/
Dansi_cprng.c88 static int _get_more_prng_bytes(struct prng_context *ctx, int cont_test) in _get_more_prng_bytes() argument
96 ctx); in _get_more_prng_bytes()
98 hexdump("Input DT: ", ctx->DT, DEFAULT_BLK_SZ); in _get_more_prng_bytes()
99 hexdump("Input I: ", ctx->I, DEFAULT_BLK_SZ); in _get_more_prng_bytes()
100 hexdump("Input V: ", ctx->V, DEFAULT_BLK_SZ); in _get_more_prng_bytes()
113 memcpy(tmp, ctx->DT, DEFAULT_BLK_SZ); in _get_more_prng_bytes()
114 output = ctx->I; in _get_more_prng_bytes()
124 xor_vectors(ctx->I, ctx->V, tmp, DEFAULT_BLK_SZ); in _get_more_prng_bytes()
126 output = ctx->rand_data; in _get_more_prng_bytes()
133 if (!memcmp(ctx->rand_data, ctx->last_rand_data, in _get_more_prng_bytes()
[all …]
Dalgif_aead.c59 struct aead_ctx *ctx = ask->private; in aead_sndbuf() local
62 ctx->used, 0); in aead_sndbuf()
70 static inline bool aead_sufficient_data(struct aead_ctx *ctx) in aead_sufficient_data() argument
72 unsigned as = crypto_aead_authsize(crypto_aead_reqtfm(&ctx->aead_req)); in aead_sufficient_data()
74 return (ctx->used >= (ctx->aead_assoclen + (ctx->enc ? 0 : as))); in aead_sufficient_data()
80 struct aead_ctx *ctx = ask->private; in aead_put_sgl() local
81 struct aead_sg_list *sgl = &ctx->tsgl; in aead_put_sgl()
93 ctx->used = 0; in aead_put_sgl()
94 ctx->more = 0; in aead_put_sgl()
95 ctx->merge = 0; in aead_put_sgl()
[all …]
Daes_generic.c1150 t ^= ctx->key_enc[4 * i]; \
1151 ctx->key_enc[4 * i + 4] = t; \
1152 t ^= ctx->key_enc[4 * i + 1]; \
1153 ctx->key_enc[4 * i + 5] = t; \
1154 t ^= ctx->key_enc[4 * i + 2]; \
1155 ctx->key_enc[4 * i + 6] = t; \
1156 t ^= ctx->key_enc[4 * i + 3]; \
1157 ctx->key_enc[4 * i + 7] = t; \
1163 t ^= ctx->key_enc[6 * i]; \
1164 ctx->key_enc[6 * i + 6] = t; \
[all …]
Dchainiv.c51 struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); in chainiv_givencrypt() local
64 spin_lock_bh(&ctx->lock); in chainiv_givencrypt()
68 memcpy(req->giv, ctx->iv, ivsize); in chainiv_givencrypt()
69 memcpy(subreq->info, ctx->iv, ivsize); in chainiv_givencrypt()
75 memcpy(ctx->iv, subreq->info, ivsize); in chainiv_givencrypt()
78 spin_unlock_bh(&ctx->lock); in chainiv_givencrypt()
86 struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); in chainiv_givencrypt_first() local
89 spin_lock_bh(&ctx->lock); in chainiv_givencrypt_first()
95 err = crypto_rng_get_bytes(crypto_default_rng, ctx->iv, in chainiv_givencrypt_first()
99 spin_unlock_bh(&ctx->lock); in chainiv_givencrypt_first()
[all …]
Dalgif_hash.c48 struct hash_ctx *ctx = ask->private; in hash_sendmsg() local
56 if (!ctx->more) { in hash_sendmsg()
57 err = af_alg_wait_for_completion(crypto_ahash_init(&ctx->req), in hash_sendmsg()
58 &ctx->completion); in hash_sendmsg()
63 ctx->more = 0; in hash_sendmsg()
71 len = af_alg_make_sg(&ctx->sgl, &msg->msg_iter, len); in hash_sendmsg()
77 ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, NULL, len); in hash_sendmsg()
79 err = af_alg_wait_for_completion(crypto_ahash_update(&ctx->req), in hash_sendmsg()
80 &ctx->completion); in hash_sendmsg()
81 af_alg_free_sg(&ctx->sgl); in hash_sendmsg()
[all …]
Dxcbc.c42 u8 ctx[]; member
58 u8 ctx[]; member
65 struct xcbc_tfm_ctx *ctx = crypto_shash_ctx(parent); in crypto_xcbc_digest_setkey() local
67 u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); in crypto_xcbc_digest_setkey()
71 if ((err = crypto_cipher_setkey(ctx->child, inkey, keylen))) in crypto_xcbc_digest_setkey()
74 crypto_cipher_encrypt_one(ctx->child, consts, (u8 *)ks + bs); in crypto_xcbc_digest_setkey()
75 crypto_cipher_encrypt_one(ctx->child, consts + bs, (u8 *)ks + bs * 2); in crypto_xcbc_digest_setkey()
76 crypto_cipher_encrypt_one(ctx->child, key1, (u8 *)ks); in crypto_xcbc_digest_setkey()
78 return crypto_cipher_setkey(ctx->child, key1, bs); in crypto_xcbc_digest_setkey()
85 struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); in crypto_xcbc_digest_init() local
[all …]
Dalgif_skcipher.c71 #define GET_SREQ(areq, ctx) (struct skcipher_async_req *)((char *)areq + \ argument
72 crypto_ablkcipher_reqsize(crypto_ablkcipher_reqtfm(&ctx->req)))
74 #define GET_REQ_SIZE(ctx) \ argument
75 crypto_ablkcipher_reqsize(crypto_ablkcipher_reqtfm(&ctx->req))
77 #define GET_IV_SIZE(ctx) \ argument
78 crypto_ablkcipher_ivsize(crypto_ablkcipher_reqtfm(&ctx->req))
107 struct skcipher_ctx *ctx = ask->private; in skcipher_async_cb() local
108 struct skcipher_async_req *sreq = GET_SREQ(req, ctx); in skcipher_async_cb()
111 atomic_dec(&ctx->inflight); in skcipher_async_cb()
120 struct skcipher_ctx *ctx = ask->private; in skcipher_sndbuf() local
[all …]
Dcmac.c35 u8 ctx[]; member
51 u8 ctx[]; member
58 struct cmac_tfm_ctx *ctx = crypto_shash_ctx(parent); in crypto_cmac_digest_setkey() local
60 __be64 *consts = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); in crypto_cmac_digest_setkey()
65 err = crypto_cipher_setkey(ctx->child, inkey, keylen); in crypto_cmac_digest_setkey()
71 crypto_cipher_encrypt_one(ctx->child, (u8 *)consts, (u8 *)consts); in crypto_cmac_digest_setkey()
111 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); in crypto_cmac_digest_init() local
113 u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs; in crypto_cmac_digest_init()
115 ctx->len = 0; in crypto_cmac_digest_init()
127 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); in crypto_cmac_digest_update() local
[all …]
Dsalsa20_generic.c107 static void salsa20_keysetup(struct salsa20_ctx *ctx, const u8 *k, u32 kbytes) in salsa20_keysetup() argument
111 ctx->input[1] = U8TO32_LITTLE(k + 0); in salsa20_keysetup()
112 ctx->input[2] = U8TO32_LITTLE(k + 4); in salsa20_keysetup()
113 ctx->input[3] = U8TO32_LITTLE(k + 8); in salsa20_keysetup()
114 ctx->input[4] = U8TO32_LITTLE(k + 12); in salsa20_keysetup()
121 ctx->input[11] = U8TO32_LITTLE(k + 0); in salsa20_keysetup()
122 ctx->input[12] = U8TO32_LITTLE(k + 4); in salsa20_keysetup()
123 ctx->input[13] = U8TO32_LITTLE(k + 8); in salsa20_keysetup()
124 ctx->input[14] = U8TO32_LITTLE(k + 12); in salsa20_keysetup()
125 ctx->input[0] = U8TO32_LITTLE(constants + 0); in salsa20_keysetup()
[all …]
Dvmac.c321 static void vhash_abort(struct vmac_ctx *ctx) in vhash_abort() argument
323 ctx->polytmp[0] = ctx->polykey[0] ; in vhash_abort()
324 ctx->polytmp[1] = ctx->polykey[1] ; in vhash_abort()
325 ctx->first_block_processed = 0; in vhash_abort()
369 struct vmac_ctx *ctx) in vhash_update() argument
372 const u64 *kptr = (u64 *)ctx->nhkey; in vhash_update()
375 u64 pkh = ctx->polykey[0]; in vhash_update()
376 u64 pkl = ctx->polykey[1]; in vhash_update()
386 ch = ctx->polytmp[0]; in vhash_update()
387 cl = ctx->polytmp[1]; in vhash_update()
[all …]
Dtea.c50 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); in tea_setkey() local
53 ctx->KEY[0] = le32_to_cpu(key[0]); in tea_setkey()
54 ctx->KEY[1] = le32_to_cpu(key[1]); in tea_setkey()
55 ctx->KEY[2] = le32_to_cpu(key[2]); in tea_setkey()
56 ctx->KEY[3] = le32_to_cpu(key[3]); in tea_setkey()
66 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); in tea_encrypt() local
73 k0 = ctx->KEY[0]; in tea_encrypt()
74 k1 = ctx->KEY[1]; in tea_encrypt()
75 k2 = ctx->KEY[2]; in tea_encrypt()
76 k3 = ctx->KEY[3]; in tea_encrypt()
[all …]
Dghash-generic.c47 struct ghash_ctx *ctx = crypto_shash_ctx(tfm); in ghash_setkey() local
54 if (ctx->gf128) in ghash_setkey()
55 gf128mul_free_4k(ctx->gf128); in ghash_setkey()
56 ctx->gf128 = gf128mul_init_4k_lle((be128 *)key); in ghash_setkey()
57 if (!ctx->gf128) in ghash_setkey()
67 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm); in ghash_update() local
70 if (!ctx->gf128) in ghash_update()
84 gf128mul_4k_lle((be128 *)dst, ctx->gf128); in ghash_update()
89 gf128mul_4k_lle((be128 *)dst, ctx->gf128); in ghash_update()
103 static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx) in ghash_flush() argument
[all …]
Dlrw.c48 int lrw_init_table(struct lrw_table_ctx *ctx, const u8 *tweak) in lrw_init_table() argument
53 if (ctx->table) in lrw_init_table()
54 gf128mul_free_64k(ctx->table); in lrw_init_table()
57 ctx->table = gf128mul_init_64k_bbe((be128 *)tweak); in lrw_init_table()
58 if (!ctx->table) in lrw_init_table()
64 ctx->mulinc[i] = tmp; in lrw_init_table()
65 gf128mul_64k_bbe(&ctx->mulinc[i], ctx->table); in lrw_init_table()
72 void lrw_free_table(struct lrw_table_ctx *ctx) in lrw_free_table() argument
74 if (ctx->table) in lrw_free_table()
75 gf128mul_free_64k(ctx->table); in lrw_free_table()
[all …]
Darc4.c32 struct arc4_ctx *ctx = crypto_tfm_ctx(tfm); in arc4_set_key() local
35 ctx->x = 1; in arc4_set_key()
36 ctx->y = 0; in arc4_set_key()
39 ctx->S[i] = i; in arc4_set_key()
42 u32 a = ctx->S[i]; in arc4_set_key()
44 ctx->S[i] = ctx->S[j]; in arc4_set_key()
45 ctx->S[j] = a; in arc4_set_key()
53 static void arc4_crypt(struct arc4_ctx *ctx, u8 *out, const u8 *in, in arc4_crypt() argument
56 u32 *const S = ctx->S; in arc4_crypt()
63 x = ctx->x; in arc4_crypt()
[all …]
Ddeflate.c45 static int deflate_comp_init(struct deflate_ctx *ctx) in deflate_comp_init() argument
48 struct z_stream_s *stream = &ctx->comp_stream; in deflate_comp_init()
70 static int deflate_decomp_init(struct deflate_ctx *ctx) in deflate_decomp_init() argument
73 struct z_stream_s *stream = &ctx->decomp_stream; in deflate_decomp_init()
92 static void deflate_comp_exit(struct deflate_ctx *ctx) in deflate_comp_exit() argument
94 zlib_deflateEnd(&ctx->comp_stream); in deflate_comp_exit()
95 vfree(ctx->comp_stream.workspace); in deflate_comp_exit()
98 static void deflate_decomp_exit(struct deflate_ctx *ctx) in deflate_decomp_exit() argument
100 zlib_inflateEnd(&ctx->decomp_stream); in deflate_decomp_exit()
101 vfree(ctx->decomp_stream.workspace); in deflate_decomp_exit()
[all …]
Dfcrypt.c238 const struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm); in fcrypt_encrypt() local
245 F_ENCRYPT(X.r, X.l, ctx->sched[0x0]); in fcrypt_encrypt()
246 F_ENCRYPT(X.l, X.r, ctx->sched[0x1]); in fcrypt_encrypt()
247 F_ENCRYPT(X.r, X.l, ctx->sched[0x2]); in fcrypt_encrypt()
248 F_ENCRYPT(X.l, X.r, ctx->sched[0x3]); in fcrypt_encrypt()
249 F_ENCRYPT(X.r, X.l, ctx->sched[0x4]); in fcrypt_encrypt()
250 F_ENCRYPT(X.l, X.r, ctx->sched[0x5]); in fcrypt_encrypt()
251 F_ENCRYPT(X.r, X.l, ctx->sched[0x6]); in fcrypt_encrypt()
252 F_ENCRYPT(X.l, X.r, ctx->sched[0x7]); in fcrypt_encrypt()
253 F_ENCRYPT(X.r, X.l, ctx->sched[0x8]); in fcrypt_encrypt()
[all …]
/linux-4.1.27/drivers/crypto/amcc/
Dcrypto4xx_alg.c75 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); in crypto4xx_encrypt() local
77 ctx->direction = DIR_OUTBOUND; in crypto4xx_encrypt()
78 ctx->hash_final = 0; in crypto4xx_encrypt()
79 ctx->is_hash = 0; in crypto4xx_encrypt()
80 ctx->pd_ctl = 0x1; in crypto4xx_encrypt()
82 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, in crypto4xx_encrypt()
84 get_dynamic_sa_iv_size(ctx)); in crypto4xx_encrypt()
89 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); in crypto4xx_decrypt() local
91 ctx->direction = DIR_INBOUND; in crypto4xx_decrypt()
92 ctx->hash_final = 0; in crypto4xx_decrypt()
[all …]
Dcrypto4xx_sa.c37 u32 get_dynamic_sa_offset_state_ptr_field(struct crypto4xx_ctx *ctx) in get_dynamic_sa_offset_state_ptr_field() argument
42 if (ctx->direction == DIR_INBOUND) in get_dynamic_sa_offset_state_ptr_field()
43 cts.w = ((struct dynamic_sa_ctl *) ctx->sa_in)->sa_contents; in get_dynamic_sa_offset_state_ptr_field()
45 cts.w = ((struct dynamic_sa_ctl *) ctx->sa_out)->sa_contents; in get_dynamic_sa_offset_state_ptr_field()
64 u32 get_dynamic_sa_iv_size(struct crypto4xx_ctx *ctx) in get_dynamic_sa_iv_size() argument
68 if (ctx->direction == DIR_INBOUND) in get_dynamic_sa_iv_size()
69 cts.w = ((struct dynamic_sa_ctl *) ctx->sa_in)->sa_contents; in get_dynamic_sa_iv_size()
71 cts.w = ((struct dynamic_sa_ctl *) ctx->sa_out)->sa_contents; in get_dynamic_sa_iv_size()
75 u32 get_dynamic_sa_offset_key_field(struct crypto4xx_ctx *ctx) in get_dynamic_sa_offset_key_field() argument
79 if (ctx->direction == DIR_INBOUND) in get_dynamic_sa_offset_key_field()
[all …]
/linux-4.1.27/fs/ext4/
Dcrypto.c67 void ext4_release_crypto_ctx(struct ext4_crypto_ctx *ctx) in ext4_release_crypto_ctx() argument
71 if (ctx->bounce_page) { in ext4_release_crypto_ctx()
72 if (ctx->flags & EXT4_BOUNCE_PAGE_REQUIRES_FREE_ENCRYPT_FL) in ext4_release_crypto_ctx()
73 __free_page(ctx->bounce_page); in ext4_release_crypto_ctx()
75 mempool_free(ctx->bounce_page, ext4_bounce_page_pool); in ext4_release_crypto_ctx()
76 ctx->bounce_page = NULL; in ext4_release_crypto_ctx()
78 ctx->control_page = NULL; in ext4_release_crypto_ctx()
79 if (ctx->flags & EXT4_CTX_REQUIRES_FREE_ENCRYPT_FL) { in ext4_release_crypto_ctx()
80 if (ctx->tfm) in ext4_release_crypto_ctx()
81 crypto_free_tfm(ctx->tfm); in ext4_release_crypto_ctx()
[all …]
Dcrypto_fname.c58 static int ext4_fname_encrypt(struct ext4_fname_crypto_ctx *ctx, in ext4_fname_encrypt() argument
65 struct crypto_ablkcipher *tfm = ctx->ctfm; in ext4_fname_encrypt()
69 int padding = 4 << (ctx->flags & EXT4_POLICY_FLAGS_PAD_MASK); in ext4_fname_encrypt()
72 if (iname->len <= 0 || iname->len > ctx->lim) in ext4_fname_encrypt()
78 ciphertext_len = (ciphertext_len > ctx->lim) in ext4_fname_encrypt()
79 ? ctx->lim : ciphertext_len; in ext4_fname_encrypt()
93 workbuf = kmap(ctx->workpage); in ext4_fname_encrypt()
105 sg_set_page(sg, ctx->workpage, PAGE_SIZE, 0); in ext4_fname_encrypt()
118 kunmap(ctx->workpage); in ext4_fname_encrypt()
135 static int ext4_fname_decrypt(struct ext4_fname_crypto_ctx *ctx, in ext4_fname_decrypt() argument
[all …]
Dcrypto_policy.c32 struct ext4_encryption_context ctx; in ext4_is_encryption_context_consistent_with_policy() local
34 EXT4_XATTR_NAME_ENCRYPTION_CONTEXT, &ctx, in ext4_is_encryption_context_consistent_with_policy()
35 sizeof(ctx)); in ext4_is_encryption_context_consistent_with_policy()
36 if (res != sizeof(ctx)) in ext4_is_encryption_context_consistent_with_policy()
38 return (memcmp(ctx.master_key_descriptor, policy->master_key_descriptor, in ext4_is_encryption_context_consistent_with_policy()
40 (ctx.flags == in ext4_is_encryption_context_consistent_with_policy()
42 (ctx.contents_encryption_mode == in ext4_is_encryption_context_consistent_with_policy()
44 (ctx.filenames_encryption_mode == in ext4_is_encryption_context_consistent_with_policy()
51 struct ext4_encryption_context ctx; in ext4_create_encryption_context_from_policy() local
54 ctx.format = EXT4_ENCRYPTION_CONTEXT_FORMAT_V1; in ext4_create_encryption_context_from_policy()
[all …]
/linux-4.1.27/drivers/media/platform/ti-vpe/
Dvpe.c409 static struct vpe_q_data *get_q_data(struct vpe_ctx *ctx, in get_q_data() argument
415 return &ctx->q_data[Q_DATA_SRC]; in get_q_data()
418 return &ctx->q_data[Q_DATA_DST]; in get_q_data()
494 #define GET_OFFSET_TOP(ctx, obj, reg) \ argument
495 ((obj)->res->start - ctx->dev->res->start + reg)
497 #define VPE_SET_MMR_ADB_HDR(ctx, hdr, regs, offset_a) \ argument
498 VPDMA_SET_MMR_ADB_HDR(ctx->mmr_adb, vpe_mmr_adb, hdr, regs, offset_a)
502 static void init_adb_hdrs(struct vpe_ctx *ctx) in init_adb_hdrs() argument
504 VPE_SET_MMR_ADB_HDR(ctx, out_fmt_hdr, out_fmt_reg, VPE_CLK_FORMAT_SELECT); in init_adb_hdrs()
505 VPE_SET_MMR_ADB_HDR(ctx, us1_hdr, us1_regs, VPE_US1_R0); in init_adb_hdrs()
[all …]
/linux-4.1.27/drivers/mmc/core/
Dslot-gpio.c47 struct mmc_gpio *ctx = devm_kzalloc(host->parent, in mmc_gpio_alloc() local
48 sizeof(*ctx) + 2 * len, GFP_KERNEL); in mmc_gpio_alloc()
50 if (ctx) { in mmc_gpio_alloc()
51 ctx->ro_label = ctx->cd_label + len; in mmc_gpio_alloc()
52 snprintf(ctx->cd_label, len, "%s cd", dev_name(host->parent)); in mmc_gpio_alloc()
53 snprintf(ctx->ro_label, len, "%s ro", dev_name(host->parent)); in mmc_gpio_alloc()
54 host->slot.handler_priv = ctx; in mmc_gpio_alloc()
58 return ctx ? 0 : -ENOMEM; in mmc_gpio_alloc()
63 struct mmc_gpio *ctx = host->slot.handler_priv; in mmc_gpio_get_ro() local
65 if (!ctx || !ctx->ro_gpio) in mmc_gpio_get_ro()
[all …]
/linux-4.1.27/arch/ia64/kernel/
Dperfmon.c99 #define PMC_OVFL_NOTIFY(ctx, i) ((ctx)->ctx_pmds[i].flags & PFM_REGFL_OVFL_NOTIFY) argument
126 #define CTX_USED_PMD(ctx, mask) (ctx)->ctx_used_pmds[0] |= (mask) argument
127 #define CTX_IS_USED_PMD(ctx, c) (((ctx)->ctx_used_pmds[0] & (1UL << (c))) != 0UL) argument
129 #define CTX_USED_MONITOR(ctx, mask) (ctx)->ctx_used_monitors[0] |= (mask) argument
131 #define CTX_USED_IBR(ctx,n) (ctx)->ctx_used_ibrs[(n)>>6] |= 1UL<< ((n) % 64) argument
132 #define CTX_USED_DBR(ctx,n) (ctx)->ctx_used_dbrs[(n)>>6] |= 1UL<< ((n) % 64) argument
133 #define CTX_USES_DBREGS(ctx) (((pfm_context_t *)(ctx))->ctx_fl_using_dbreg==1) argument
348 #define SET_LAST_CPU(ctx, v) (ctx)->ctx_last_cpu = (v) argument
349 #define GET_LAST_CPU(ctx) (ctx)->ctx_last_cpu argument
351 #define SET_LAST_CPU(ctx, v) do {} while(0) argument
[all …]
/linux-4.1.27/drivers/infiniband/core/
Ducma.c95 struct ucma_context *ctx; member
105 struct ucma_context *ctx; member
119 struct ucma_context *ctx; in _ucma_find_context() local
121 ctx = idr_find(&ctx_idr, id); in _ucma_find_context()
122 if (!ctx) in _ucma_find_context()
123 ctx = ERR_PTR(-ENOENT); in _ucma_find_context()
124 else if (ctx->file != file) in _ucma_find_context()
125 ctx = ERR_PTR(-EINVAL); in _ucma_find_context()
126 return ctx; in _ucma_find_context()
131 struct ucma_context *ctx; in ucma_get_ctx() local
[all …]
Ducm.c92 struct ib_ucm_context *ctx; member
127 struct ib_ucm_context *ctx; in ib_ucm_ctx_get() local
130 ctx = idr_find(&ctx_id_table, id); in ib_ucm_ctx_get()
131 if (!ctx) in ib_ucm_ctx_get()
132 ctx = ERR_PTR(-ENOENT); in ib_ucm_ctx_get()
133 else if (ctx->file != file) in ib_ucm_ctx_get()
134 ctx = ERR_PTR(-EINVAL); in ib_ucm_ctx_get()
136 atomic_inc(&ctx->ref); in ib_ucm_ctx_get()
139 return ctx; in ib_ucm_ctx_get()
142 static void ib_ucm_ctx_put(struct ib_ucm_context *ctx) in ib_ucm_ctx_put() argument
[all …]
/linux-4.1.27/include/linux/
Dww_mutex.h47 struct ww_acquire_ctx *ctx; member
89 lock->ctx = NULL; in ww_mutex_init()
119 static inline void ww_acquire_init(struct ww_acquire_ctx *ctx, in ww_acquire_init() argument
122 ctx->task = current; in ww_acquire_init()
123 ctx->stamp = atomic_long_inc_return(&ww_class->stamp); in ww_acquire_init()
124 ctx->acquired = 0; in ww_acquire_init()
126 ctx->ww_class = ww_class; in ww_acquire_init()
127 ctx->done_acquire = 0; in ww_acquire_init()
128 ctx->contending_lock = NULL; in ww_acquire_init()
131 debug_check_no_locks_freed((void *)ctx, sizeof(*ctx)); in ww_acquire_init()
[all …]
/linux-4.1.27/sound/soc/au1x/
Dac97c.c71 static inline unsigned long RD(struct au1xpsc_audio_data *ctx, int reg) in RD() argument
73 return __raw_readl(ctx->mmio + reg); in RD()
76 static inline void WR(struct au1xpsc_audio_data *ctx, int reg, unsigned long v) in WR() argument
78 __raw_writel(v, ctx->mmio + reg); in WR()
85 struct au1xpsc_audio_data *ctx = ac97_to_ctx(ac97); in au1xac97c_ac97_read() local
92 mutex_lock(&ctx->lock); in au1xac97c_ac97_read()
95 while ((RD(ctx, AC97_STATUS) & STAT_CP) && tmo--) in au1xac97c_ac97_read()
102 WR(ctx, AC97_CMDRESP, CMD_IDX(r) | CMD_READ); in au1xac97c_ac97_read()
108 while ((RD(ctx, AC97_STATUS) & STAT_CP) && tmo--) in au1xac97c_ac97_read()
110 data = RD(ctx, AC97_CMDRESP); in au1xac97c_ac97_read()
[all …]
Di2sc.c69 static inline unsigned long RD(struct au1xpsc_audio_data *ctx, int reg) in RD() argument
71 return __raw_readl(ctx->mmio + reg); in RD()
74 static inline void WR(struct au1xpsc_audio_data *ctx, int reg, unsigned long v) in WR() argument
76 __raw_writel(v, ctx->mmio + reg); in WR()
82 struct au1xpsc_audio_data *ctx = snd_soc_dai_get_drvdata(cpu_dai); in au1xi2s_set_fmt() local
87 c = ctx->cfg; in au1xi2s_set_fmt()
130 ctx->cfg = c; in au1xi2s_set_fmt()
138 struct au1xpsc_audio_data *ctx = snd_soc_dai_get_drvdata(dai); in au1xi2s_trigger() local
145 WR(ctx, I2S_ENABLE, EN_D | EN_CE); in au1xi2s_trigger()
146 WR(ctx, I2S_ENABLE, EN_CE); in au1xi2s_trigger()
[all …]
/linux-4.1.27/drivers/media/platform/exynos4-is/
Dfimc-m2m.c43 void fimc_m2m_job_finish(struct fimc_ctx *ctx, int vb_state) in fimc_m2m_job_finish() argument
47 if (!ctx || !ctx->fh.m2m_ctx) in fimc_m2m_job_finish()
50 src_vb = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx); in fimc_m2m_job_finish()
51 dst_vb = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx); in fimc_m2m_job_finish()
56 v4l2_m2m_job_finish(ctx->fimc_dev->m2m.m2m_dev, in fimc_m2m_job_finish()
57 ctx->fh.m2m_ctx); in fimc_m2m_job_finish()
62 static int fimc_m2m_shutdown(struct fimc_ctx *ctx) in fimc_m2m_shutdown() argument
64 struct fimc_dev *fimc = ctx->fimc_dev; in fimc_m2m_shutdown()
70 fimc_ctx_state_set(FIMC_CTX_SHUT, ctx); in fimc_m2m_shutdown()
73 !fimc_ctx_state_is_set(FIMC_CTX_SHUT, ctx), in fimc_m2m_shutdown()
[all …]
Dfimc-reg.c44 static u32 fimc_hw_get_in_flip(struct fimc_ctx *ctx) in fimc_hw_get_in_flip() argument
48 if (ctx->hflip) in fimc_hw_get_in_flip()
50 if (ctx->vflip) in fimc_hw_get_in_flip()
53 if (ctx->rotation <= 90) in fimc_hw_get_in_flip()
59 static u32 fimc_hw_get_target_flip(struct fimc_ctx *ctx) in fimc_hw_get_target_flip() argument
63 if (ctx->hflip) in fimc_hw_get_target_flip()
65 if (ctx->vflip) in fimc_hw_get_target_flip()
68 if (ctx->rotation <= 90) in fimc_hw_get_target_flip()
74 void fimc_hw_set_rotation(struct fimc_ctx *ctx) in fimc_hw_set_rotation() argument
77 struct fimc_dev *dev = ctx->fimc_dev; in fimc_hw_set_rotation()
[all …]
Dfimc-capture.c38 struct fimc_ctx *ctx = fimc->vid_cap.ctx; in fimc_capture_hw_init() local
42 if (ctx == NULL || ctx->s_frame.fmt == NULL) in fimc_capture_hw_init()
52 fimc_prepare_dma_offset(ctx, &ctx->d_frame); in fimc_capture_hw_init()
53 fimc_set_yuv_order(ctx); in fimc_capture_hw_init()
58 fimc_hw_set_camera_offset(fimc, &ctx->s_frame); in fimc_capture_hw_init()
60 ret = fimc_set_scaler_info(ctx); in fimc_capture_hw_init()
62 fimc_hw_set_input_path(ctx); in fimc_capture_hw_init()
63 fimc_hw_set_prescaler(ctx); in fimc_capture_hw_init()
64 fimc_hw_set_mainscaler(ctx); in fimc_capture_hw_init()
65 fimc_hw_set_target_format(ctx); in fimc_capture_hw_init()
[all …]
Dfimc-core.c216 int fimc_check_scaler_ratio(struct fimc_ctx *ctx, int sw, int sh, in fimc_check_scaler_ratio() argument
222 if (!ctx->scaler.enabled) in fimc_check_scaler_ratio()
249 int fimc_set_scaler_info(struct fimc_ctx *ctx) in fimc_set_scaler_info() argument
251 const struct fimc_variant *variant = ctx->fimc_dev->variant; in fimc_set_scaler_info()
252 struct device *dev = &ctx->fimc_dev->pdev->dev; in fimc_set_scaler_info()
253 struct fimc_scaler *sc = &ctx->scaler; in fimc_set_scaler_info()
254 struct fimc_frame *s_frame = &ctx->s_frame; in fimc_set_scaler_info()
255 struct fimc_frame *d_frame = &ctx->d_frame; in fimc_set_scaler_info()
259 if (ctx->rotation == 90 || ctx->rotation == 270) { in fimc_set_scaler_info()
317 struct fimc_ctx *ctx; in fimc_irq_handler() local
[all …]
/linux-4.1.27/fs/cifs/
Dasn1.c109 asn1_open(struct asn1_ctx *ctx, unsigned char *buf, unsigned int len) in asn1_open() argument
111 ctx->begin = buf; in asn1_open()
112 ctx->end = buf + len; in asn1_open()
113 ctx->pointer = buf; in asn1_open()
114 ctx->error = ASN1_ERR_NOERROR; in asn1_open()
118 asn1_octet_decode(struct asn1_ctx *ctx, unsigned char *ch) in asn1_octet_decode() argument
120 if (ctx->pointer >= ctx->end) { in asn1_octet_decode()
121 ctx->error = ASN1_ERR_DEC_EMPTY; in asn1_octet_decode()
124 *ch = *(ctx->pointer)++; in asn1_octet_decode()
130 asn1_enum_decode(struct asn1_ctx *ctx, __le32 *val)
[all …]
/linux-4.1.27/drivers/media/platform/
Dm2m-deinterlace.c158 struct deinterlace_ctx *ctx = priv; in deinterlace_job_ready() local
159 struct deinterlace_dev *pcdev = ctx->dev; in deinterlace_job_ready()
161 if ((v4l2_m2m_num_src_bufs_ready(ctx->m2m_ctx) > 0) in deinterlace_job_ready()
162 && (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) > 0) in deinterlace_job_ready()
163 && (atomic_read(&ctx->dev->busy) == 0)) { in deinterlace_job_ready()
175 struct deinterlace_ctx *ctx = priv; in deinterlace_job_abort() local
176 struct deinterlace_dev *pcdev = ctx->dev; in deinterlace_job_abort()
178 ctx->aborting = 1; in deinterlace_job_abort()
182 v4l2_m2m_job_finish(pcdev->m2m_dev, ctx->m2m_ctx); in deinterlace_job_abort()
187 struct deinterlace_ctx *ctx = priv; in deinterlace_lock() local
[all …]
Dvim2m.c187 static struct vim2m_q_data *get_q_data(struct vim2m_ctx *ctx, in get_q_data() argument
192 return &ctx->q_data[V4L2_M2M_SRC]; in get_q_data()
194 return &ctx->q_data[V4L2_M2M_DST]; in get_q_data()
202 static int device_process(struct vim2m_ctx *ctx, in device_process() argument
206 struct vim2m_dev *dev = ctx->dev; in device_process()
213 q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT); in device_process()
237 out_vb->v4l2_buf.sequence = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE)->sequence++; in device_process()
253 switch (ctx->mode) { in device_process()
351 struct vim2m_ctx *ctx = priv; in job_ready() local
353 if (v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx) < ctx->translen in job_ready()
[all …]
Dmx2_emmaprp.c225 static struct emmaprp_q_data *get_q_data(struct emmaprp_ctx *ctx, in get_q_data() argument
230 return &(ctx->q_data[V4L2_M2M_SRC]); in get_q_data()
232 return &(ctx->q_data[V4L2_M2M_DST]); in get_q_data()
244 struct emmaprp_ctx *ctx = priv; in emmaprp_job_abort() local
245 struct emmaprp_dev *pcdev = ctx->dev; in emmaprp_job_abort()
247 ctx->aborting = 1; in emmaprp_job_abort()
251 v4l2_m2m_job_finish(pcdev->m2m_dev, ctx->m2m_ctx); in emmaprp_job_abort()
256 struct emmaprp_ctx *ctx = priv; in emmaprp_lock() local
257 struct emmaprp_dev *pcdev = ctx->dev; in emmaprp_lock()
263 struct emmaprp_ctx *ctx = priv; in emmaprp_unlock() local
[all …]
/linux-4.1.27/drivers/staging/unisys/visorchipset/
Dparser.c49 struct parser_context *ctx = NULL; in parser_init_guts() local
67 ctx = kzalloc(allocbytes, GFP_KERNEL|__GFP_NORETRY); in parser_init_guts()
68 if (!ctx) { in parser_init_guts()
75 ctx->allocbytes = allocbytes; in parser_init_guts()
76 ctx->param_bytes = bytes; in parser_init_guts()
77 ctx->curr = NULL; in parser_init_guts()
78 ctx->bytes_remaining = 0; in parser_init_guts()
79 ctx->byte_stream = FALSE; in parser_init_guts()
88 memcpy(ctx->data, p, bytes); in parser_init_guts()
95 if (visor_memregion_read(rgn, 0, ctx->data, bytes) < 0) { in parser_init_guts()
[all …]
/linux-4.1.27/drivers/phy/
Dphy-xgene.c605 static void cmu_wr(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type, in cmu_wr() argument
608 void __iomem *sds_base = ctx->sds_base; in cmu_wr()
622 static void cmu_rd(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type, in cmu_rd() argument
625 void __iomem *sds_base = ctx->sds_base; in cmu_rd()
636 static void cmu_toggle1to0(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type, in cmu_toggle1to0() argument
641 cmu_rd(ctx, cmu_type, reg, &val); in cmu_toggle1to0()
643 cmu_wr(ctx, cmu_type, reg, val); in cmu_toggle1to0()
644 cmu_rd(ctx, cmu_type, reg, &val); in cmu_toggle1to0()
646 cmu_wr(ctx, cmu_type, reg, val); in cmu_toggle1to0()
649 static void cmu_clrbits(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type, in cmu_clrbits() argument
[all …]
/linux-4.1.27/drivers/crypto/caam/
Dcaamrng.c88 static inline void rng_unmap_ctx(struct caam_rng_ctx *ctx) in rng_unmap_ctx() argument
90 struct device *jrdev = ctx->jrdev; in rng_unmap_ctx()
92 if (ctx->sh_desc_dma) in rng_unmap_ctx()
93 dma_unmap_single(jrdev, ctx->sh_desc_dma, in rng_unmap_ctx()
94 desc_bytes(ctx->sh_desc), DMA_TO_DEVICE); in rng_unmap_ctx()
95 rng_unmap_buf(jrdev, &ctx->bufs[0]); in rng_unmap_ctx()
96 rng_unmap_buf(jrdev, &ctx->bufs[1]); in rng_unmap_ctx()
117 static inline int submit_job(struct caam_rng_ctx *ctx, int to_current) in submit_job() argument
119 struct buf_data *bd = &ctx->bufs[!(to_current ^ ctx->current_buf)]; in submit_job()
120 struct device *jrdev = ctx->jrdev; in submit_job()
[all …]
Dcaamalg.c200 static void append_key_aead(u32 *desc, struct caam_ctx *ctx, in append_key_aead() argument
204 unsigned int enckeylen = ctx->enckeylen; in append_key_aead()
215 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len, in append_key_aead()
216 ctx->split_key_len, CLASS_2 | in append_key_aead()
218 append_key_as_imm(desc, (void *)ctx->key + in append_key_aead()
219 ctx->split_key_pad_len, enckeylen, in append_key_aead()
222 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 | in append_key_aead()
224 append_key(desc, ctx->key_dma + ctx->split_key_pad_len, in append_key_aead()
230 nonce = (u32 *)((void *)ctx->key + ctx->split_key_pad_len + in append_key_aead()
242 static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx, in init_sh_desc_key_aead() argument
[all …]
Dcaamhash.c227 static inline void append_key_ahash(u32 *desc, struct caam_hash_ctx *ctx) in append_key_ahash() argument
229 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len, in append_key_ahash()
230 ctx->split_key_len, CLASS_2 | in append_key_ahash()
235 static inline void init_sh_desc_key_ahash(u32 *desc, struct caam_hash_ctx *ctx) in init_sh_desc_key_ahash() argument
241 if (ctx->split_key_len) { in init_sh_desc_key_ahash()
246 append_key_ahash(desc, ctx); in init_sh_desc_key_ahash()
279 struct caam_hash_ctx *ctx) in ahash_ctx_data_to_out() argument
281 init_sh_desc_key_ahash(desc, ctx); in ahash_ctx_data_to_out()
285 LDST_CLASS_2_CCB | ctx->ctx_len); in ahash_ctx_data_to_out()
298 int digestsize, struct caam_hash_ctx *ctx) in ahash_data_to_out() argument
[all …]
/linux-4.1.27/net/mac80211/
Dchan.c13 struct ieee80211_chanctx *ctx) in ieee80211_chanctx_num_assigned() argument
20 list_for_each_entry(sdata, &ctx->assigned_vifs, assigned_chanctx_list) in ieee80211_chanctx_num_assigned()
27 struct ieee80211_chanctx *ctx) in ieee80211_chanctx_num_reserved() argument
34 list_for_each_entry(sdata, &ctx->reserved_vifs, reserved_chanctx_list) in ieee80211_chanctx_num_reserved()
41 struct ieee80211_chanctx *ctx) in ieee80211_chanctx_refcount() argument
43 return ieee80211_chanctx_num_assigned(local, ctx) + in ieee80211_chanctx_refcount()
44 ieee80211_chanctx_num_reserved(local, ctx); in ieee80211_chanctx_refcount()
49 struct ieee80211_chanctx *ctx; in ieee80211_num_chanctx() local
54 list_for_each_entry(ctx, &local->chanctx_list, list) in ieee80211_num_chanctx()
82 struct ieee80211_chanctx *ctx, in ieee80211_chanctx_reserved_chandef() argument
[all …]
/linux-4.1.27/security/selinux/
Dxfrm.c61 static inline int selinux_authorizable_ctx(struct xfrm_sec_ctx *ctx) in selinux_authorizable_ctx() argument
63 return (ctx && in selinux_authorizable_ctx()
64 (ctx->ctx_doi == XFRM_SC_DOI_LSM) && in selinux_authorizable_ctx()
65 (ctx->ctx_alg == XFRM_SC_ALG_SELINUX)); in selinux_authorizable_ctx()
86 struct xfrm_sec_ctx *ctx = NULL; in selinux_xfrm_alloc_user() local
98 ctx = kmalloc(sizeof(*ctx) + str_len + 1, gfp); in selinux_xfrm_alloc_user()
99 if (!ctx) in selinux_xfrm_alloc_user()
102 ctx->ctx_doi = XFRM_SC_DOI_LSM; in selinux_xfrm_alloc_user()
103 ctx->ctx_alg = XFRM_SC_ALG_SELINUX; in selinux_xfrm_alloc_user()
104 ctx->ctx_len = str_len; in selinux_xfrm_alloc_user()
[all …]
/linux-4.1.27/drivers/net/wireless/orinoco/
Dorinoco_usb.c287 static void ezusb_ctx_complete(struct request_context *ctx);
299 static void ezusb_request_context_put(struct request_context *ctx) in ezusb_request_context_put() argument
301 if (!atomic_dec_and_test(&ctx->refcount)) in ezusb_request_context_put()
304 WARN_ON(!ctx->done.done); in ezusb_request_context_put()
305 BUG_ON(ctx->outurb->status == -EINPROGRESS); in ezusb_request_context_put()
306 BUG_ON(timer_pending(&ctx->timer)); in ezusb_request_context_put()
307 usb_free_urb(ctx->outurb); in ezusb_request_context_put()
308 kfree(ctx->buf); in ezusb_request_context_put()
309 kfree(ctx); in ezusb_request_context_put()
323 struct request_context *ctx = (void *) _ctx; in ezusb_request_timerfn() local
[all …]
/linux-4.1.27/drivers/media/platform/s5p-jpeg/
Djpeg-core.c552 struct s5p_jpeg_ctx *ctx) in s5p_jpeg_adjust_fourcc_to_subsampling() argument
556 if (ctx->subsampling != V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY) { in s5p_jpeg_adjust_fourcc_to_subsampling()
563 switch (ctx->subsampling) { in s5p_jpeg_adjust_fourcc_to_subsampling()
614 static int s5p_jpeg_to_user_subsampling(struct s5p_jpeg_ctx *ctx) in s5p_jpeg_to_user_subsampling() argument
616 WARN_ON(ctx->subsampling > 3); in s5p_jpeg_to_user_subsampling()
618 switch (ctx->jpeg->variant->version) { in s5p_jpeg_to_user_subsampling()
620 if (ctx->subsampling > 2) in s5p_jpeg_to_user_subsampling()
622 return ctx->subsampling; in s5p_jpeg_to_user_subsampling()
625 if (ctx->subsampling > 3) in s5p_jpeg_to_user_subsampling()
627 return exynos3250_decoded_subsampling[ctx->subsampling]; in s5p_jpeg_to_user_subsampling()
[all …]
/linux-4.1.27/drivers/crypto/qat/qat_common/
Dqat_algs.c154 struct qat_alg_aead_ctx *ctx, in qat_alg_do_precomputes() argument
158 SHASH_DESC_ON_STACK(shash, ctx->hash_tfm); in qat_alg_do_precomputes()
162 int block_size = crypto_shash_blocksize(ctx->hash_tfm); in qat_alg_do_precomputes()
163 int digest_size = crypto_shash_digestsize(ctx->hash_tfm); in qat_alg_do_precomputes()
172 shash->tfm = ctx->hash_tfm; in qat_alg_do_precomputes()
203 switch (ctx->qat_hash_alg) { in qat_alg_do_precomputes()
232 offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8); in qat_alg_do_precomputes()
236 switch (ctx->qat_hash_alg) { in qat_alg_do_precomputes()
281 static int qat_alg_aead_init_enc_session(struct qat_alg_aead_ctx *ctx, in qat_alg_aead_init_enc_session() argument
285 struct crypto_aead *aead_tfm = __crypto_aead_cast(ctx->tfm); in qat_alg_aead_init_enc_session()
[all …]
Dqat_hal.c151 unsigned char ae, unsigned char ctx, in qat_hal_get_wakeup_event() argument
157 qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx); in qat_hal_get_wakeup_event()
328 unsigned int ctx, cur_ctx; in qat_hal_wr_indr_csr() local
332 for (ctx = 0; ctx < ICP_QAT_UCLO_MAX_CTX; ctx++) { in qat_hal_wr_indr_csr()
333 if (!(ctx_mask & (1 << ctx))) in qat_hal_wr_indr_csr()
335 qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx); in qat_hal_wr_indr_csr()
343 unsigned char ae, unsigned char ctx, in qat_hal_rd_indr_csr() argument
349 qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx); in qat_hal_rd_indr_csr()
358 unsigned int ctx, cur_ctx; in qat_hal_put_sig_event() local
361 for (ctx = 0; ctx < ICP_QAT_UCLO_MAX_CTX; ctx++) { in qat_hal_put_sig_event()
[all …]
/linux-4.1.27/arch/s390/crypto/
Dsha_common.c23 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in s390_sha_update() local
29 index = ctx->count & (bsize - 1); in s390_sha_update()
30 ctx->count += len; in s390_sha_update()
37 memcpy(ctx->buf + index, data, bsize - index); in s390_sha_update()
38 ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize); in s390_sha_update()
48 ret = crypt_s390_kimd(ctx->func, ctx->state, data, in s390_sha_update()
57 memcpy(ctx->buf + index , data, len); in s390_sha_update()
65 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in s390_sha_final() local
75 index = ctx->count & (bsize - 1); in s390_sha_final()
79 ctx->buf[index] = 0x80; in s390_sha_final()
[all …]
Dsha512_s390.c27 struct s390_sha_ctx *ctx = shash_desc_ctx(desc); in sha512_init() local
29 *(__u64 *)&ctx->state[0] = 0x6a09e667f3bcc908ULL; in sha512_init()
30 *(__u64 *)&ctx->state[2] = 0xbb67ae8584caa73bULL; in sha512_init()
31 *(__u64 *)&ctx->state[4] = 0x3c6ef372fe94f82bULL; in sha512_init()
32 *(__u64 *)&ctx->state[6] = 0xa54ff53a5f1d36f1ULL; in sha512_init()
33 *(__u64 *)&ctx->state[8] = 0x510e527fade682d1ULL; in sha512_init()
34 *(__u64 *)&ctx->state[10] = 0x9b05688c2b3e6c1fULL; in sha512_init()
35 *(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL; in sha512_init()
36 *(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL; in sha512_init()
37 ctx->count = 0; in sha512_init()
[all …]
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/mxm/
Dnv50.c43 struct context *ctx = info; in mxm_match_tmds_partner() local
48 desc.dig_conn == ctx->desc.dig_conn) in mxm_match_tmds_partner()
57 struct context *ctx = info; in mxm_match_dcb() local
60 mxms_output_device(mxm, data, &ctx->desc); in mxm_match_dcb()
63 if ((ctx->outp[0] & 0x0000000f) != ctx->desc.outp_type) in mxm_match_dcb()
72 u8 link = mxm_sor_map(bios, ctx->desc.dig_conn); in mxm_match_dcb()
73 if ((ctx->outp[0] & 0x0f000000) != (link & 0x0f) << 24) in mxm_match_dcb()
78 if ((link & ((ctx->outp[1] & 0x00000030) >> 4)) != link) in mxm_match_dcb()
88 if (ctx->desc.outp_type == 6 && ctx->desc.conn_type == 6 && in mxm_match_dcb()
89 mxms_foreach(mxm, 0x01, mxm_match_tmds_partner, ctx)) { in mxm_match_dcb()
[all …]
/linux-4.1.27/drivers/crypto/ux500/cryp/
Dcryp_core.c178 static void add_session_id(struct cryp_ctx *ctx) in add_session_id() argument
187 ctx->session_id = atomic_read(&session_id); in add_session_id()
192 struct cryp_ctx *ctx; in cryp_interrupt_handler() local
204 ctx = device_data->current_ctx; in cryp_interrupt_handler()
206 if (ctx == NULL) { in cryp_interrupt_handler()
207 BUG_ON(!ctx); in cryp_interrupt_handler()
211 dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen, in cryp_interrupt_handler()
217 if (ctx->outlen / ctx->blocksize > 0) { in cryp_interrupt_handler()
218 count = ctx->blocksize / 4; in cryp_interrupt_handler()
220 readsl(&device_data->base->dout, ctx->outdata, count); in cryp_interrupt_handler()
[all …]
Dcryp.c290 struct cryp_device_context *ctx, in cryp_save_device_context() argument
309 ctx->din = readl_relaxed(&src_reg->din); in cryp_save_device_context()
311 ctx->cr = readl_relaxed(&src_reg->cr) & CRYP_CR_CONTEXT_SAVE_MASK; in cryp_save_device_context()
315 ctx->key_4_l = readl_relaxed(&src_reg->key_4_l); in cryp_save_device_context()
316 ctx->key_4_r = readl_relaxed(&src_reg->key_4_r); in cryp_save_device_context()
319 ctx->key_3_l = readl_relaxed(&src_reg->key_3_l); in cryp_save_device_context()
320 ctx->key_3_r = readl_relaxed(&src_reg->key_3_r); in cryp_save_device_context()
323 ctx->key_2_l = readl_relaxed(&src_reg->key_2_l); in cryp_save_device_context()
324 ctx->key_2_r = readl_relaxed(&src_reg->key_2_r); in cryp_save_device_context()
327 ctx->key_1_l = readl_relaxed(&src_reg->key_1_l); in cryp_save_device_context()
[all …]
/linux-4.1.27/net/ipv4/netfilter/
Dnf_nat_snmp_basic.c152 static void asn1_open(struct asn1_ctx *ctx, in asn1_open() argument
156 ctx->begin = buf; in asn1_open()
157 ctx->end = buf + len; in asn1_open()
158 ctx->pointer = buf; in asn1_open()
159 ctx->error = ASN1_ERR_NOERROR; in asn1_open()
162 static unsigned char asn1_octet_decode(struct asn1_ctx *ctx, unsigned char *ch) in asn1_octet_decode() argument
164 if (ctx->pointer >= ctx->end) { in asn1_octet_decode()
165 ctx->error = ASN1_ERR_DEC_EMPTY; in asn1_octet_decode()
168 *ch = *(ctx->pointer)++; in asn1_octet_decode()
172 static unsigned char asn1_tag_decode(struct asn1_ctx *ctx, unsigned int *tag) in asn1_tag_decode() argument
[all …]
/linux-4.1.27/arch/x86/crypto/sha-mb/
Dsha1_mb.c91 static inline struct ahash_request *cast_mcryptd_ctx_to_req(struct mcryptd_hash_request_ctx *ctx) in cast_mcryptd_ctx_to_req() argument
93 return container_of((void *) ctx, struct ahash_request, __ctx); in cast_mcryptd_ctx_to_req()
137 …ic struct sha1_hash_ctx *sha1_ctx_mgr_resubmit(struct sha1_ctx_mgr *mgr, struct sha1_hash_ctx *ctx) in sha1_ctx_mgr_resubmit() argument
139 while (ctx) { in sha1_ctx_mgr_resubmit()
140 if (ctx->status & HASH_CTX_STS_COMPLETE) { in sha1_ctx_mgr_resubmit()
142 ctx->status = HASH_CTX_STS_COMPLETE; in sha1_ctx_mgr_resubmit()
143 return ctx; in sha1_ctx_mgr_resubmit()
150 if (ctx->partial_block_buffer_length == 0 && in sha1_ctx_mgr_resubmit()
151 ctx->incoming_buffer_length) { in sha1_ctx_mgr_resubmit()
153 const void *buffer = ctx->incoming_buffer; in sha1_ctx_mgr_resubmit()
[all …]
Dsha_mb_ctx.h83 #define hash_ctx_user_data(ctx) ((ctx)->user_data) argument
84 #define hash_ctx_digest(ctx) ((ctx)->job.result_digest) argument
85 #define hash_ctx_processing(ctx) ((ctx)->status & HASH_CTX_STS_PROCESSING) argument
86 #define hash_ctx_complete(ctx) ((ctx)->status == HASH_CTX_STS_COMPLETE) argument
87 #define hash_ctx_status(ctx) ((ctx)->status) argument
88 #define hash_ctx_error(ctx) ((ctx)->error) argument
89 #define hash_ctx_init(ctx) \ argument
91 (ctx)->error = HASH_CTX_ERROR_NONE; \
92 (ctx)->status = HASH_CTX_STS_COMPLETE; \
/linux-4.1.27/lib/mpi/
Dmpih-mul.c337 struct karatsuba_ctx *ctx) in mpihelp_mul_karatsuba_case() argument
341 if (!ctx->tspace || ctx->tspace_size < vsize) { in mpihelp_mul_karatsuba_case()
342 if (ctx->tspace) in mpihelp_mul_karatsuba_case()
343 mpi_free_limb_space(ctx->tspace); in mpihelp_mul_karatsuba_case()
344 ctx->tspace = mpi_alloc_limb_space(2 * vsize); in mpihelp_mul_karatsuba_case()
345 if (!ctx->tspace) in mpihelp_mul_karatsuba_case()
347 ctx->tspace_size = vsize; in mpihelp_mul_karatsuba_case()
350 MPN_MUL_N_RECURSE(prodp, up, vp, vsize, ctx->tspace); in mpihelp_mul_karatsuba_case()
356 if (!ctx->tp || ctx->tp_size < vsize) { in mpihelp_mul_karatsuba_case()
357 if (ctx->tp) in mpihelp_mul_karatsuba_case()
[all …]
/linux-4.1.27/drivers/gpu/drm/i915/
Di915_gem_context.c138 struct intel_context *ctx = container_of(ctx_ref, in i915_gem_context_free() local
139 typeof(*ctx), ref); in i915_gem_context_free()
141 trace_i915_context_free(ctx); in i915_gem_context_free()
144 intel_lr_context_free(ctx); in i915_gem_context_free()
146 i915_ppgtt_put(ctx->ppgtt); in i915_gem_context_free()
148 if (ctx->legacy_hw_ctx.rcs_state) in i915_gem_context_free()
149 drm_gem_object_unreference(&ctx->legacy_hw_ctx.rcs_state->base); in i915_gem_context_free()
150 list_del(&ctx->link); in i915_gem_context_free()
151 kfree(ctx); in i915_gem_context_free()
189 struct intel_context *ctx; in __create_hw_context() local
[all …]
/linux-4.1.27/drivers/ata/
Dahci_xgene.c98 static int xgene_ahci_init_memram(struct xgene_ahci_context *ctx) in xgene_ahci_init_memram() argument
100 dev_dbg(ctx->dev, "Release memory from shutdown\n"); in xgene_ahci_init_memram()
101 writel(0x0, ctx->csr_diag + CFG_MEM_RAM_SHUTDOWN); in xgene_ahci_init_memram()
102 readl(ctx->csr_diag + CFG_MEM_RAM_SHUTDOWN); /* Force a barrier */ in xgene_ahci_init_memram()
104 if (readl(ctx->csr_diag + BLOCK_MEM_RDY) != 0xFFFFFFFF) { in xgene_ahci_init_memram()
105 dev_err(ctx->dev, "failed to release memory from shutdown\n"); in xgene_ahci_init_memram()
200 struct xgene_ahci_context *ctx = hpriv->plat_data; in xgene_ahci_qc_issue() local
209 if (ctx->class[ap->port_no] == ATA_DEV_PMP) { in xgene_ahci_qc_issue()
216 if (unlikely((ctx->last_cmd[ap->port_no] == ATA_CMD_ID_ATA) || in xgene_ahci_qc_issue()
217 (ctx->last_cmd[ap->port_no] == ATA_CMD_PACKET) || in xgene_ahci_qc_issue()
[all …]
/linux-4.1.27/drivers/media/platform/s5p-g2d/
Dg2d.c91 static struct g2d_frame *get_frame(struct g2d_ctx *ctx, in get_frame() argument
96 return &ctx->in; in get_frame()
98 return &ctx->out; in get_frame()
108 struct g2d_ctx *ctx = vb2_get_drv_priv(vq); in g2d_queue_setup() local
109 struct g2d_frame *f = get_frame(ctx, vq->type); in g2d_queue_setup()
116 alloc_ctxs[0] = ctx->dev->alloc_ctx; in g2d_queue_setup()
126 struct g2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); in g2d_buf_prepare() local
127 struct g2d_frame *f = get_frame(ctx, vb->vb2_queue->type); in g2d_buf_prepare()
137 struct g2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); in g2d_buf_queue() local
138 v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vb); in g2d_buf_queue()
[all …]
/linux-4.1.27/drivers/clk/samsung/
Dclk.c58 struct samsung_clk_provider *ctx; in samsung_clk_init() local
62 ctx = kzalloc(sizeof(struct samsung_clk_provider), GFP_KERNEL); in samsung_clk_init()
63 if (!ctx) in samsung_clk_init()
73 ctx->reg_base = base; in samsung_clk_init()
74 ctx->clk_data.clks = clk_table; in samsung_clk_init()
75 ctx->clk_data.clk_num = nr_clks; in samsung_clk_init()
76 spin_lock_init(&ctx->lock); in samsung_clk_init()
78 return ctx; in samsung_clk_init()
82 struct samsung_clk_provider *ctx) in samsung_clk_of_add_provider() argument
86 &ctx->clk_data)) in samsung_clk_of_add_provider()
[all …]
/linux-4.1.27/arch/x86/include/asm/crypto/
Dcamellia.h40 asmlinkage void __camellia_enc_blk(struct camellia_ctx *ctx, u8 *dst,
42 asmlinkage void camellia_dec_blk(struct camellia_ctx *ctx, u8 *dst,
46 asmlinkage void __camellia_enc_blk_2way(struct camellia_ctx *ctx, u8 *dst,
48 asmlinkage void camellia_dec_blk_2way(struct camellia_ctx *ctx, u8 *dst,
52 asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
54 asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
57 asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
59 asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
62 asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst,
64 asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst,
[all …]
Dserpent-sse2.h11 asmlinkage void __serpent_enc_blk_4way(struct serpent_ctx *ctx, u8 *dst,
13 asmlinkage void serpent_dec_blk_4way(struct serpent_ctx *ctx, u8 *dst,
16 static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst, in serpent_enc_blk_xway() argument
19 __serpent_enc_blk_4way(ctx, dst, src, false); in serpent_enc_blk_xway()
22 static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst, in serpent_enc_blk_xway_xor() argument
25 __serpent_enc_blk_4way(ctx, dst, src, true); in serpent_enc_blk_xway_xor()
28 static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst, in serpent_dec_blk_xway() argument
31 serpent_dec_blk_4way(ctx, dst, src); in serpent_dec_blk_xway()
38 asmlinkage void __serpent_enc_blk_8way(struct serpent_ctx *ctx, u8 *dst,
40 asmlinkage void serpent_dec_blk_8way(struct serpent_ctx *ctx, u8 *dst,
[all …]
/linux-4.1.27/drivers/iommu/
Dmsm_iommu.c126 static void __reset_context(void __iomem *base, int ctx) in __reset_context() argument
128 SET_BPRCOSH(base, ctx, 0); in __reset_context()
129 SET_BPRCISH(base, ctx, 0); in __reset_context()
130 SET_BPRCNSH(base, ctx, 0); in __reset_context()
131 SET_BPSHCFG(base, ctx, 0); in __reset_context()
132 SET_BPMTCFG(base, ctx, 0); in __reset_context()
133 SET_ACTLR(base, ctx, 0); in __reset_context()
134 SET_SCTLR(base, ctx, 0); in __reset_context()
135 SET_FSRRESTORE(base, ctx, 0); in __reset_context()
136 SET_TTBR0(base, ctx, 0); in __reset_context()
[all …]
/linux-4.1.27/drivers/power/reset/
Dsyscon-reboot.c37 struct syscon_reboot_context *ctx = in syscon_restart_handle() local
42 regmap_write(ctx->map, ctx->offset, ctx->mask); in syscon_restart_handle()
52 struct syscon_reboot_context *ctx; in syscon_reboot_probe() local
56 ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL); in syscon_reboot_probe()
57 if (!ctx) in syscon_reboot_probe()
60 ctx->map = syscon_regmap_lookup_by_phandle(dev->of_node, "regmap"); in syscon_reboot_probe()
61 if (IS_ERR(ctx->map)) in syscon_reboot_probe()
62 return PTR_ERR(ctx->map); in syscon_reboot_probe()
64 if (of_property_read_u32(pdev->dev.of_node, "offset", &ctx->offset)) in syscon_reboot_probe()
67 if (of_property_read_u32(pdev->dev.of_node, "mask", &ctx->mask)) in syscon_reboot_probe()
[all …]
Dxgene-reboot.c47 struct xgene_reboot_context *ctx = in xgene_restart_handler() local
52 writel(ctx->mask, ctx->csr); in xgene_restart_handler()
56 dev_emerg(ctx->dev, "Unable to restart system\n"); in xgene_restart_handler()
63 struct xgene_reboot_context *ctx; in xgene_reboot_probe() local
67 ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL); in xgene_reboot_probe()
68 if (!ctx) in xgene_reboot_probe()
71 ctx->csr = of_iomap(dev->of_node, 0); in xgene_reboot_probe()
72 if (!ctx->csr) { in xgene_reboot_probe()
77 if (of_property_read_u32(dev->of_node, "mask", &ctx->mask)) in xgene_reboot_probe()
78 ctx->mask = 0xFFFFFFFF; in xgene_reboot_probe()
[all …]
/linux-4.1.27/drivers/acpi/apei/
Derst.c122 static int erst_exec_load_var1(struct apei_exec_context *ctx, in erst_exec_load_var1() argument
125 return __apei_exec_read_register(entry, &ctx->var1); in erst_exec_load_var1()
128 static int erst_exec_load_var2(struct apei_exec_context *ctx, in erst_exec_load_var2() argument
131 return __apei_exec_read_register(entry, &ctx->var2); in erst_exec_load_var2()
134 static int erst_exec_store_var1(struct apei_exec_context *ctx, in erst_exec_store_var1() argument
137 return __apei_exec_write_register(entry, ctx->var1); in erst_exec_store_var1()
140 static int erst_exec_add(struct apei_exec_context *ctx, in erst_exec_add() argument
143 ctx->var1 += ctx->var2; in erst_exec_add()
147 static int erst_exec_subtract(struct apei_exec_context *ctx, in erst_exec_subtract() argument
150 ctx->var1 -= ctx->var2; in erst_exec_subtract()
[all …]
Dapei-internal.h14 typedef int (*apei_exec_ins_func_t)(struct apei_exec_context *ctx,
37 void apei_exec_ctx_init(struct apei_exec_context *ctx,
43 static inline void apei_exec_ctx_set_input(struct apei_exec_context *ctx, in apei_exec_ctx_set_input() argument
46 ctx->value = input; in apei_exec_ctx_set_input()
49 static inline u64 apei_exec_ctx_get_output(struct apei_exec_context *ctx) in apei_exec_ctx_get_output() argument
51 return ctx->value; in apei_exec_ctx_get_output()
54 int __apei_exec_run(struct apei_exec_context *ctx, u8 action, bool optional);
56 static inline int apei_exec_run(struct apei_exec_context *ctx, u8 action) in apei_exec_run() argument
58 return __apei_exec_run(ctx, action, 0); in apei_exec_run()
62 static inline int apei_exec_run_optional(struct apei_exec_context *ctx, u8 action) in apei_exec_run_optional() argument
[all …]
Dapei-base.c56 void apei_exec_ctx_init(struct apei_exec_context *ctx, in apei_exec_ctx_init() argument
62 ctx->ins_table = ins_table; in apei_exec_ctx_init()
63 ctx->instructions = instructions; in apei_exec_ctx_init()
64 ctx->action_table = action_table; in apei_exec_ctx_init()
65 ctx->entries = entries; in apei_exec_ctx_init()
82 int apei_exec_read_register(struct apei_exec_context *ctx, in apei_exec_read_register() argument
91 ctx->value = val; in apei_exec_read_register()
97 int apei_exec_read_register_value(struct apei_exec_context *ctx, in apei_exec_read_register_value() argument
102 rc = apei_exec_read_register(ctx, entry); in apei_exec_read_register_value()
105 ctx->value = (ctx->value == entry->value); in apei_exec_read_register_value()
[all …]
/linux-4.1.27/drivers/vfio/pci/
Dvfio_pci_intrs.c36 eventfd_signal(vdev->ctx[0].trigger, 1); in vfio_send_intx_eventfd()
55 } else if (!vdev->ctx[0].masked) { in vfio_pci_intx_mask()
65 vdev->ctx[0].masked = true; in vfio_pci_intx_mask()
93 } else if (vdev->ctx[0].masked && !vdev->virq_disabled) { in vfio_pci_intx_unmask_handler()
105 vdev->ctx[0].masked = (ret > 0); in vfio_pci_intx_unmask_handler()
129 vdev->ctx[0].masked = true; in vfio_intx_handler()
131 } else if (!vdev->ctx[0].masked && /* may be shared */ in vfio_intx_handler()
133 vdev->ctx[0].masked = true; in vfio_intx_handler()
153 vdev->ctx = kzalloc(sizeof(struct vfio_pci_irq_ctx), GFP_KERNEL); in vfio_intx_enable()
154 if (!vdev->ctx) in vfio_intx_enable()
[all …]
/linux-4.1.27/drivers/firewire/
Dohci.c114 typedef int (*descriptor_callback_t)(struct context *ctx,
665 static inline dma_addr_t ar_buffer_bus(struct ar_context *ctx, unsigned int i) in ar_buffer_bus() argument
667 return page_private(ctx->pages[i]); in ar_buffer_bus()
670 static void ar_context_link_page(struct ar_context *ctx, unsigned int index) in ar_context_link_page() argument
674 d = &ctx->descriptors[index]; in ar_context_link_page()
680 d = &ctx->descriptors[ctx->last_buffer_index]; in ar_context_link_page()
683 ctx->last_buffer_index = index; in ar_context_link_page()
685 reg_write(ctx->ohci, CONTROL_SET(ctx->regs), CONTEXT_WAKE); in ar_context_link_page()
688 static void ar_context_release(struct ar_context *ctx) in ar_context_release() argument
692 vunmap(ctx->buffer); in ar_context_release()
[all …]
Dcore-iso.c167 struct fw_iso_context *ctx; in fw_iso_context_create() local
169 ctx = card->driver->allocate_iso_context(card, in fw_iso_context_create()
171 if (IS_ERR(ctx)) in fw_iso_context_create()
172 return ctx; in fw_iso_context_create()
174 ctx->card = card; in fw_iso_context_create()
175 ctx->type = type; in fw_iso_context_create()
176 ctx->channel = channel; in fw_iso_context_create()
177 ctx->speed = speed; in fw_iso_context_create()
178 ctx->header_size = header_size; in fw_iso_context_create()
179 ctx->callback.sc = callback; in fw_iso_context_create()
[all …]
/linux-4.1.27/drivers/video/fbdev/omap2/
Dvrfb.c83 static void omap2_sms_write_rot_control(u32 val, unsigned ctx) in omap2_sms_write_rot_control() argument
85 __raw_writel(val, vrfb_base + SMS_ROT_CONTROL(ctx)); in omap2_sms_write_rot_control()
88 static void omap2_sms_write_rot_size(u32 val, unsigned ctx) in omap2_sms_write_rot_size() argument
90 __raw_writel(val, vrfb_base + SMS_ROT_SIZE(ctx)); in omap2_sms_write_rot_size()
93 static void omap2_sms_write_rot_physical_ba(u32 val, unsigned ctx) in omap2_sms_write_rot_physical_ba() argument
95 __raw_writel(val, vrfb_base + SMS_ROT_PHYSICAL_BA(ctx)); in omap2_sms_write_rot_physical_ba()
98 static inline void restore_hw_context(int ctx) in restore_hw_context() argument
100 omap2_sms_write_rot_control(ctxs[ctx].control, ctx); in restore_hw_context()
101 omap2_sms_write_rot_size(ctxs[ctx].size, ctx); in restore_hw_context()
102 omap2_sms_write_rot_physical_ba(ctxs[ctx].physical_ba, ctx); in restore_hw_context()
[all …]
/linux-4.1.27/fs/xfs/
Dxfs_log_cil.c283 struct xfs_cil_ctx *ctx = cil->xc_ctx; in xlog_cil_insert_items() local
315 ctx->nvecs += diff_iovecs; in xlog_cil_insert_items()
319 list_splice_init(&tp->t_busy, &ctx->busy_extents); in xlog_cil_insert_items()
328 if (ctx->ticket->t_curr_res == 0) { in xlog_cil_insert_items()
329 ctx->ticket->t_curr_res = ctx->ticket->t_unit_res; in xlog_cil_insert_items()
330 tp->t_ticket->t_curr_res -= ctx->ticket->t_unit_res; in xlog_cil_insert_items()
335 if (len > 0 && (ctx->space_used / iclog_space != in xlog_cil_insert_items()
336 (ctx->space_used + len) / iclog_space)) { in xlog_cil_insert_items()
342 ctx->ticket->t_unit_res += hdrs; in xlog_cil_insert_items()
343 ctx->ticket->t_curr_res += hdrs; in xlog_cil_insert_items()
[all …]
/linux-4.1.27/drivers/video/fbdev/omap2/dss/
Ddpi.c158 struct dpi_clk_calc_ctx *ctx = data; in dpi_calc_dispc_cb() local
165 if (ctx->pck_min >= 100000000) { in dpi_calc_dispc_cb()
173 ctx->dispc_cinfo.lck_div = lckd; in dpi_calc_dispc_cb()
174 ctx->dispc_cinfo.pck_div = pckd; in dpi_calc_dispc_cb()
175 ctx->dispc_cinfo.lck = lck; in dpi_calc_dispc_cb()
176 ctx->dispc_cinfo.pck = pck; in dpi_calc_dispc_cb()
185 struct dpi_clk_calc_ctx *ctx = data; in dpi_calc_hsdiv_cb() local
192 if (m_dispc > 1 && m_dispc % 2 != 0 && ctx->pck_min >= 100000000) in dpi_calc_hsdiv_cb()
195 ctx->dsi_cinfo.mX[HSDIV_DISPC] = m_dispc; in dpi_calc_hsdiv_cb()
196 ctx->dsi_cinfo.clkout[HSDIV_DISPC] = dispc; in dpi_calc_hsdiv_cb()
[all …]
/linux-4.1.27/arch/frv/mm/
Dmmu-context.c44 static unsigned get_cxn(mm_context_t *ctx) in get_cxn() argument
50 if (!list_empty(&ctx->id_link)) { in get_cxn()
51 list_move_tail(&ctx->id_link, &cxn_owners_lru); in get_cxn()
78 ctx->id = cxn; in get_cxn()
79 list_add_tail(&ctx->id_link, &cxn_owners_lru); in get_cxn()
82 return ctx->id; in get_cxn()
90 void change_mm_context(mm_context_t *old, mm_context_t *ctx, pgd_t *pgd) in change_mm_context() argument
106 get_cxn(ctx); in change_mm_context()
107 ctx->id_busy = 1; in change_mm_context()
110 asm volatile("movgs %0,cxnr" : : "r"(ctx->id)); in change_mm_context()
[all …]
/linux-4.1.27/drivers/staging/ozwpan/
Dozcdev.c50 struct oz_serial_ctx *ctx; in oz_cdev_claim_ctx() local
53 ctx = (struct oz_serial_ctx *) pd->app_ctx[OZ_APPID_SERIAL]; in oz_cdev_claim_ctx()
54 if (ctx) in oz_cdev_claim_ctx()
55 atomic_inc(&ctx->ref_count); in oz_cdev_claim_ctx()
57 return ctx; in oz_cdev_claim_ctx()
63 static void oz_cdev_release_ctx(struct oz_serial_ctx *ctx) in oz_cdev_release_ctx() argument
65 if (atomic_dec_and_test(&ctx->ref_count)) { in oz_cdev_release_ctx()
67 kfree(ctx); in oz_cdev_release_ctx()
102 struct oz_serial_ctx *ctx; in oz_cdev_read() local
111 ctx = oz_cdev_claim_ctx(pd); in oz_cdev_read()
[all …]
/linux-4.1.27/drivers/net/ieee802154/
Dat86rf230.c303 struct at86rf230_state_change *ctx,
469 struct at86rf230_state_change *ctx = context; in at86rf230_async_error_recover() local
470 struct at86rf230_local *lp = ctx->lp; in at86rf230_async_error_recover()
473 at86rf230_async_state_change(lp, ctx, STATE_RX_AACK_ON, NULL, false); in at86rf230_async_error_recover()
479 struct at86rf230_state_change *ctx, int rc) in at86rf230_async_error() argument
483 at86rf230_async_state_change(lp, ctx, STATE_FORCE_TRX_OFF, in at86rf230_async_error()
490 struct at86rf230_state_change *ctx, in at86rf230_async_read_reg() argument
496 u8 *tx_buf = ctx->buf; in at86rf230_async_read_reg()
499 ctx->msg.complete = complete; in at86rf230_async_read_reg()
500 ctx->irq_enable = irq_enable; in at86rf230_async_read_reg()
[all …]
/linux-4.1.27/arch/x86/crypto/
Daesni-intel_glue.c83 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
85 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
87 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
89 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
91 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
93 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
95 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
106 static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
108 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
111 asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
[all …]
Dcast6_avx_glue.c45 asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst,
47 asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst,
50 asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst,
52 asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src,
55 asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst,
57 asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst,
60 static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) in cast6_xts_enc() argument
62 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in cast6_xts_enc()
66 static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) in cast6_xts_dec() argument
68 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in cast6_xts_dec()
[all …]
Dserpent_avx_glue.c45 asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
49 asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
53 asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
57 asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
61 asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
65 asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
69 void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) in __serpent_crypt_ctr() argument
76 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in __serpent_crypt_ctr()
81 void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) in serpent_xts_enc() argument
83 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in serpent_xts_enc()
[all …]
Dtwofish_avx_glue.c51 asmlinkage void twofish_ecb_enc_8way(struct twofish_ctx *ctx, u8 *dst,
53 asmlinkage void twofish_ecb_dec_8way(struct twofish_ctx *ctx, u8 *dst,
56 asmlinkage void twofish_cbc_dec_8way(struct twofish_ctx *ctx, u8 *dst,
58 asmlinkage void twofish_ctr_8way(struct twofish_ctx *ctx, u8 *dst,
61 asmlinkage void twofish_xts_enc_8way(struct twofish_ctx *ctx, u8 *dst,
63 asmlinkage void twofish_xts_dec_8way(struct twofish_ctx *ctx, u8 *dst,
66 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst, in twofish_enc_blk_3way() argument
69 __twofish_enc_blk_3way(ctx, dst, src, false); in twofish_enc_blk_3way()
72 static void twofish_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) in twofish_xts_enc() argument
74 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in twofish_xts_enc()
[all …]
Dtwofish_glue_3way.c39 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst, in twofish_enc_blk_3way() argument
42 __twofish_enc_blk_3way(ctx, dst, src, false); in twofish_enc_blk_3way()
45 static inline void twofish_enc_blk_xor_3way(struct twofish_ctx *ctx, u8 *dst, in twofish_enc_blk_xor_3way() argument
48 __twofish_enc_blk_3way(ctx, dst, src, true); in twofish_enc_blk_xor_3way()
51 void twofish_dec_blk_cbc_3way(void *ctx, u128 *dst, const u128 *src) in twofish_dec_blk_cbc_3way() argument
58 twofish_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src); in twofish_dec_blk_cbc_3way()
65 void twofish_enc_blk_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) in twofish_enc_blk_ctr() argument
75 twofish_enc_blk(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in twofish_enc_blk_ctr()
80 void twofish_enc_blk_ctr_3way(void *ctx, u128 *dst, const u128 *src, in twofish_enc_blk_ctr_3way() argument
98 twofish_enc_blk_xor_3way(ctx, (u8 *)dst, (u8 *)ctrblks); in twofish_enc_blk_ctr_3way()
[all …]
Dserpent_sse2_glue.c48 static void serpent_decrypt_cbc_xway(void *ctx, u128 *dst, const u128 *src) in serpent_decrypt_cbc_xway() argument
56 serpent_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src); in serpent_decrypt_cbc_xway()
62 static void serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) in serpent_crypt_ctr() argument
69 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in serpent_crypt_ctr()
73 static void serpent_crypt_ctr_xway(void *ctx, u128 *dst, const u128 *src, in serpent_crypt_ctr_xway() argument
87 serpent_enc_blk_xway_xor(ctx, (u8 *)dst, (u8 *)ctrblks); in serpent_crypt_ctr_xway()
186 struct serpent_ctx *ctx; member
193 struct crypt_priv *ctx = priv; in encrypt_callback() local
196 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback()
199 serpent_enc_blk_xway(ctx->ctx, srcdst, srcdst); in encrypt_callback()
[all …]
Dcamellia_aesni_avx2_glue.c31 asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst,
33 asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst,
36 asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst,
38 asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst,
41 asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst,
43 asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst,
206 struct camellia_ctx *ctx; member
213 struct crypt_priv *ctx = priv; in encrypt_callback() local
216 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback()
219 camellia_ecb_enc_32way(ctx->ctx, srcdst, srcdst); in encrypt_callback()
[all …]
Dcamellia_aesni_avx_glue.c30 asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
34 asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
38 asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
42 asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
46 asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst,
50 asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst,
54 void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) in camellia_xts_enc() argument
56 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in camellia_xts_enc()
61 void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) in camellia_xts_dec() argument
63 glue_xts_crypt_128bit_one(ctx, dst, src, iv, in camellia_xts_dec()
[all …]
Dserpent_avx2_glue.c31 asmlinkage void serpent_ecb_enc_16way(struct serpent_ctx *ctx, u8 *dst,
33 asmlinkage void serpent_ecb_dec_16way(struct serpent_ctx *ctx, u8 *dst,
35 asmlinkage void serpent_cbc_dec_16way(void *ctx, u128 *dst, const u128 *src);
37 asmlinkage void serpent_ctr_16way(void *ctx, u128 *dst, const u128 *src,
39 asmlinkage void serpent_xts_enc_16way(struct serpent_ctx *ctx, u8 *dst,
41 asmlinkage void serpent_xts_dec_16way(struct serpent_ctx *ctx, u8 *dst,
184 struct serpent_ctx *ctx; member
191 struct crypt_priv *ctx = priv; in encrypt_callback() local
194 ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes); in encrypt_callback()
197 serpent_ecb_enc_16way(ctx->ctx, srcdst, srcdst); in encrypt_callback()
[all …]
Dcrct10dif-pclmul_glue.c50 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); in chksum_init() local
52 ctx->crc = 0; in chksum_init()
60 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); in chksum_update() local
64 ctx->crc = crc_t10dif_pcl(ctx->crc, data, length); in chksum_update()
67 ctx->crc = crc_t10dif_generic(ctx->crc, data, length); in chksum_update()
73 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); in chksum_final() local
75 *(__u16 *)out = ctx->crc; in chksum_final()
94 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); in chksum_finup() local
96 return __chksum_finup(&ctx->crc, data, len, out); in chksum_finup()
102 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc); in chksum_digest() local
[all …]
/linux-4.1.27/security/keys/
Dkeyring.c426 struct keyring_read_iterator_context *ctx = data; in keyring_read_iterator() local
431 key->type->name, key->serial, ctx->count, ctx->qty); in keyring_read_iterator()
433 if (ctx->count >= ctx->qty) in keyring_read_iterator()
436 ret = put_user(key->serial, ctx->buffer); in keyring_read_iterator()
439 ctx->buffer++; in keyring_read_iterator()
440 ctx->count += sizeof(key->serial); in keyring_read_iterator()
454 struct keyring_read_iterator_context ctx; in keyring_read() local
468 ctx.qty = nr_keys * sizeof(key_serial_t); in keyring_read()
471 return ctx.qty; in keyring_read()
473 if (buflen > ctx.qty) in keyring_read()
[all …]
Dprocess_keys.c322 key_ref_t search_my_process_keyrings(struct keyring_search_context *ctx) in search_my_process_keyrings() argument
338 if (ctx->cred->thread_keyring) { in search_my_process_keyrings()
340 make_key_ref(ctx->cred->thread_keyring, 1), ctx); in search_my_process_keyrings()
356 if (ctx->cred->process_keyring) { in search_my_process_keyrings()
358 make_key_ref(ctx->cred->process_keyring, 1), ctx); in search_my_process_keyrings()
376 if (ctx->cred->session_keyring) { in search_my_process_keyrings()
379 make_key_ref(rcu_dereference(ctx->cred->session_keyring), 1), in search_my_process_keyrings()
380 ctx); in search_my_process_keyrings()
399 else if (ctx->cred->user->session_keyring) { in search_my_process_keyrings()
401 make_key_ref(ctx->cred->user->session_keyring, 1), in search_my_process_keyrings()
[all …]
/linux-4.1.27/tools/perf/util/
Dparse-options.c360 void parse_options_start(struct parse_opt_ctx_t *ctx, in parse_options_start() argument
363 memset(ctx, 0, sizeof(*ctx)); in parse_options_start()
364 ctx->argc = argc - 1; in parse_options_start()
365 ctx->argv = argv + 1; in parse_options_start()
366 ctx->out = argv; in parse_options_start()
367 ctx->cpidx = ((flags & PARSE_OPT_KEEP_ARGV0) != 0); in parse_options_start()
368 ctx->flags = flags; in parse_options_start()
377 int parse_options_step(struct parse_opt_ctx_t *ctx, in parse_options_step() argument
381 int internal_help = !(ctx->flags & PARSE_OPT_NO_INTERNAL_HELP); in parse_options_step()
386 ctx->opt = NULL; in parse_options_step()
[all …]
/linux-4.1.27/kernel/events/
Dcore.c317 static void update_context_time(struct perf_event_context *ctx);
338 __get_cpu_context(struct perf_event_context *ctx) in __get_cpu_context() argument
340 return this_cpu_ptr(ctx->pmu->pmu_cpu_context); in __get_cpu_context()
344 struct perf_event_context *ctx) in perf_ctx_lock() argument
346 raw_spin_lock(&cpuctx->ctx.lock); in perf_ctx_lock()
347 if (ctx) in perf_ctx_lock()
348 raw_spin_lock(&ctx->lock); in perf_ctx_lock()
352 struct perf_event_context *ctx) in perf_ctx_unlock() argument
354 if (ctx) in perf_ctx_unlock()
355 raw_spin_unlock(&ctx->lock); in perf_ctx_unlock()
[all …]
/linux-4.1.27/drivers/media/platform/s5p-tv/
Dsii9234_drv.c105 static int sii9234_reset(struct sii9234_context *ctx) in sii9234_reset() argument
107 struct i2c_client *client = ctx->client; in sii9234_reset()
111 gpio_direction_output(ctx->gpio_n_reset, 1); in sii9234_reset()
113 gpio_direction_output(ctx->gpio_n_reset, 0); in sii9234_reset()
115 gpio_direction_output(ctx->gpio_n_reset, 1); in sii9234_reset()
216 static int sii9234_set_internal(struct sii9234_context *ctx) in sii9234_set_internal() argument
218 struct i2c_client *client = ctx->client; in sii9234_set_internal()
233 struct sii9234_context *ctx = sd_to_context(sd); in sii9234_runtime_suspend() local
234 struct i2c_client *client = ctx->client; in sii9234_runtime_suspend()
239 regulator_disable(ctx->power); in sii9234_runtime_suspend()
[all …]
/linux-4.1.27/arch/metag/kernel/
Dprocess.c135 pr_info(" SaveMask = 0x%04hx\n", regs->ctx.SaveMask); in show_regs()
136 pr_info(" Flags = 0x%04hx (%c%c%c%c)\n", regs->ctx.Flags, in show_regs()
137 regs->ctx.Flags & FLAG_Z ? 'Z' : 'z', in show_regs()
138 regs->ctx.Flags & FLAG_N ? 'N' : 'n', in show_regs()
139 regs->ctx.Flags & FLAG_O ? 'O' : 'o', in show_regs()
140 regs->ctx.Flags & FLAG_C ? 'C' : 'c'); in show_regs()
141 pr_info(" TXRPT = 0x%08x\n", regs->ctx.CurrRPT); in show_regs()
142 pr_info(" PC = 0x%08x\n", regs->ctx.CurrPC); in show_regs()
148 regs->ctx.AX[i].U0); in show_regs()
151 regs->ctx.AX[i].U1); in show_regs()
[all …]
Dptrace.c37 regs->ctx.DX, 0, 4*16); in metag_gp_regs_copyout()
42 regs->ctx.AX, 4*16, 4*20); in metag_gp_regs_copyout()
46 if (regs->ctx.SaveMask & TBICTX_XEXT_BIT) in metag_gp_regs_copyout()
47 ptr = regs->ctx.Ext.Ctx.pExt; in metag_gp_regs_copyout()
49 ptr = &regs->ctx.Ext.AX2; in metag_gp_regs_copyout()
56 &regs->ctx.AX3, 4*22, 4*24); in metag_gp_regs_copyout()
61 &regs->ctx.CurrPC, 4*24, 4*25); in metag_gp_regs_copyout()
65 data = (unsigned long)regs->ctx.Flags; in metag_gp_regs_copyout()
66 if (regs->ctx.SaveMask & TBICTX_CBUF_BIT) in metag_gp_regs_copyout()
74 &regs->ctx.CurrRPT, 4*26, 4*29); in metag_gp_regs_copyout()
[all …]
/linux-4.1.27/fs/ntfs/
Dattrib.c84 int ntfs_map_runlist_nolock(ntfs_inode *ni, VCN vcn, ntfs_attr_search_ctx *ctx) in ntfs_map_runlist_nolock() argument
103 if (!ctx) { in ntfs_map_runlist_nolock()
108 ctx = ntfs_attr_get_search_ctx(base_ni, m); in ntfs_map_runlist_nolock()
109 if (unlikely(!ctx)) { in ntfs_map_runlist_nolock()
116 BUG_ON(IS_ERR(ctx->mrec)); in ntfs_map_runlist_nolock()
117 a = ctx->attr; in ntfs_map_runlist_nolock()
143 old_ctx = *ctx; in ntfs_map_runlist_nolock()
161 ntfs_attr_reinit_search_ctx(ctx); in ntfs_map_runlist_nolock()
167 CASE_SENSITIVE, vcn, NULL, 0, ctx); in ntfs_map_runlist_nolock()
173 BUG_ON(!ctx->attr->non_resident); in ntfs_map_runlist_nolock()
[all …]
/linux-4.1.27/drivers/crypto/ccp/
Dccp-crypto-aes-cmac.c62 struct ccp_ctx *ctx = crypto_ahash_ctx(tfm); in ccp_do_cmac_update() local
72 if (!ctx->u.aes.key_len) in ccp_do_cmac_update()
143 cmac_key_sg = (need_pad) ? &ctx->u.aes.k2_sg in ccp_do_cmac_update()
144 : &ctx->u.aes.k1_sg; in ccp_do_cmac_update()
149 rctx->cmd.u.aes.type = ctx->u.aes.type; in ccp_do_cmac_update()
150 rctx->cmd.u.aes.mode = ctx->u.aes.mode; in ccp_do_cmac_update()
152 rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; in ccp_do_cmac_update()
153 rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; in ccp_do_cmac_update()
160 rctx->cmd.u.aes.cmac_key_len = ctx->u.aes.kn_len; in ccp_do_cmac_update()
244 struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm)); in ccp_aes_cmac_setkey() local
[all …]
Dccp-crypto-aes.c28 struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm); in ccp_aes_complete() local
34 if (ctx->u.aes.mode != CCP_AES_MODE_ECB) in ccp_aes_complete()
43 struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm)); in ccp_aes_setkey() local
49 ctx->u.aes.type = CCP_AES_TYPE_128; in ccp_aes_setkey()
52 ctx->u.aes.type = CCP_AES_TYPE_192; in ccp_aes_setkey()
55 ctx->u.aes.type = CCP_AES_TYPE_256; in ccp_aes_setkey()
61 ctx->u.aes.mode = alg->mode; in ccp_aes_setkey()
62 ctx->u.aes.key_len = key_len; in ccp_aes_setkey()
64 memcpy(ctx->u.aes.key, key, key_len); in ccp_aes_setkey()
65 sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len); in ccp_aes_setkey()
[all …]
Dccp-crypto-sha.c49 memcpy(req->result, rctx->ctx, digest_size); in ccp_sha_complete()
61 struct ccp_ctx *ctx = crypto_ahash_ctx(tfm); in ccp_do_sha_update() local
94 sg_init_one(&rctx->ctx_sg, rctx->ctx, sizeof(rctx->ctx)); in ccp_do_sha_update()
128 rctx->cmd.u.sha.ctx = &rctx->ctx_sg; in ccp_do_sha_update()
129 rctx->cmd.u.sha.ctx_len = sizeof(rctx->ctx); in ccp_do_sha_update()
132 rctx->cmd.u.sha.opad = ctx->u.sha.key_len ? in ccp_do_sha_update()
133 &ctx->u.sha.opad_sg : NULL; in ccp_do_sha_update()
134 rctx->cmd.u.sha.opad_len = ctx->u.sha.key_len ? in ccp_do_sha_update()
135 ctx->u.sha.opad_count : 0; in ccp_do_sha_update()
150 struct ccp_ctx *ctx = crypto_ahash_ctx(tfm); in ccp_sha_init() local
[all …]
/linux-4.1.27/arch/powerpc/crypto/
Daes-spe-glue.c95 struct ppc_aes_ctx *ctx = crypto_tfm_ctx(tfm); in ppc_aes_setkey() local
106 ctx->rounds = 4; in ppc_aes_setkey()
107 ppc_expand_key_128(ctx->key_enc, in_key); in ppc_aes_setkey()
110 ctx->rounds = 5; in ppc_aes_setkey()
111 ppc_expand_key_192(ctx->key_enc, in_key); in ppc_aes_setkey()
114 ctx->rounds = 6; in ppc_aes_setkey()
115 ppc_expand_key_256(ctx->key_enc, in_key); in ppc_aes_setkey()
119 ppc_generate_decrypt_key(ctx->key_dec, ctx->key_enc, key_len); in ppc_aes_setkey()
127 struct ppc_xts_ctx *ctx = crypto_tfm_ctx(tfm); in ppc_xts_setkey() local
140 ctx->rounds = 4; in ppc_xts_setkey()
[all …]
/linux-4.1.27/arch/arm/crypto/
Dghash-ce-glue.c48 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_init() local
50 *ctx = (struct ghash_desc_ctx){}; in ghash_init()
57 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_update() local
58 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_update()
60 ctx->count += len; in ghash_update()
69 memcpy(ctx->buf + partial, src, p); in ghash_update()
78 pmull_ghash_update(blocks, ctx->digest, src, key, in ghash_update()
79 partial ? ctx->buf : NULL); in ghash_update()
85 memcpy(ctx->buf + partial, src, len); in ghash_update()
91 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_final() local
[all …]
/linux-4.1.27/kernel/locking/
Dmutex.c119 DEBUG_LOCKS_WARN_ON(ww->ctx); in ww_mutex_lock_acquired()
158 struct ww_acquire_ctx *ctx) in ww_mutex_set_context_fastpath() argument
163 ww_mutex_lock_acquired(lock, ctx); in ww_mutex_set_context_fastpath()
165 lock->ctx = ctx; in ww_mutex_set_context_fastpath()
202 struct ww_acquire_ctx *ctx) in ww_mutex_set_context_slowpath() argument
206 ww_mutex_lock_acquired(lock, ctx); in ww_mutex_set_context_slowpath()
207 lock->ctx = ctx; in ww_mutex_set_context_slowpath()
337 if (READ_ONCE(ww->ctx)) in mutex_optimistic_spin()
459 if (lock->ctx) { in ww_mutex_unlock()
461 DEBUG_LOCKS_WARN_ON(!lock->ctx->acquired); in ww_mutex_unlock()
[all …]
/linux-4.1.27/drivers/crypto/vmx/
Daes.c42 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); in p8_aes_init() local
60 ctx->fallback = fallback; in p8_aes_init()
67 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); in p8_aes_exit() local
69 if (ctx->fallback) { in p8_aes_exit()
70 crypto_free_cipher(ctx->fallback); in p8_aes_exit()
71 ctx->fallback = NULL; in p8_aes_exit()
79 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); in p8_aes_setkey() local
84 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); in p8_aes_setkey()
85 ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); in p8_aes_setkey()
88 ret += crypto_cipher_setkey(ctx->fallback, key, keylen); in p8_aes_setkey()
[all …]
Daes_cbc.c43 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); in p8_aes_cbc_init() local
61 ctx->fallback = fallback; in p8_aes_cbc_init()
68 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); in p8_aes_cbc_exit() local
70 if (ctx->fallback) { in p8_aes_cbc_exit()
71 crypto_free_blkcipher(ctx->fallback); in p8_aes_cbc_exit()
72 ctx->fallback = NULL; in p8_aes_cbc_exit()
80 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm); in p8_aes_cbc_setkey() local
85 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); in p8_aes_cbc_setkey()
86 ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key); in p8_aes_cbc_setkey()
89 ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); in p8_aes_cbc_setkey()
[all …]
/linux-4.1.27/drivers/crypto/ux500/hash/
Dhash_core.c148 struct hash_ctx *ctx = data; in hash_dma_callback() local
150 complete(&ctx->device->dma.complete); in hash_dma_callback()
153 static int hash_set_dma_transfer(struct hash_ctx *ctx, struct scatterlist *sg, in hash_set_dma_transfer() argument
161 dev_err(ctx->device->dev, "%s: Invalid DMA direction\n", in hash_set_dma_transfer()
168 channel = ctx->device->dma.chan_mem2hash; in hash_set_dma_transfer()
169 ctx->device->dma.sg = sg; in hash_set_dma_transfer()
170 ctx->device->dma.sg_len = dma_map_sg(channel->device->dev, in hash_set_dma_transfer()
171 ctx->device->dma.sg, ctx->device->dma.nents, in hash_set_dma_transfer()
174 if (!ctx->device->dma.sg_len) { in hash_set_dma_transfer()
175 dev_err(ctx->device->dev, "%s: Could not map the sg list (TO_DEVICE)\n", in hash_set_dma_transfer()
[all …]
/linux-4.1.27/drivers/staging/lustre/lustre/ptlrpc/
Dsec.c279 struct ptlrpc_cli_ctx *sptlrpc_cli_ctx_get(struct ptlrpc_cli_ctx *ctx) in sptlrpc_cli_ctx_get() argument
281 atomic_inc(&ctx->cc_refcount); in sptlrpc_cli_ctx_get()
282 return ctx; in sptlrpc_cli_ctx_get()
286 void sptlrpc_cli_ctx_put(struct ptlrpc_cli_ctx *ctx, int sync) in sptlrpc_cli_ctx_put() argument
288 struct ptlrpc_sec *sec = ctx->cc_sec; in sptlrpc_cli_ctx_put()
291 LASSERT_ATOMIC_POS(&ctx->cc_refcount); in sptlrpc_cli_ctx_put()
293 if (!atomic_dec_and_test(&ctx->cc_refcount)) in sptlrpc_cli_ctx_put()
296 sec->ps_policy->sp_cops->release_ctx(sec, ctx, sync); in sptlrpc_cli_ctx_put()
305 void sptlrpc_cli_ctx_expire(struct ptlrpc_cli_ctx *ctx) in sptlrpc_cli_ctx_expire() argument
307 LASSERT(ctx->cc_ops->force_die); in sptlrpc_cli_ctx_expire()
[all …]
/linux-4.1.27/drivers/mtd/nand/
Dau1550nd.c200 struct au1550nd_ctx *ctx = container_of(mtd, struct au1550nd_ctx, info); in au1550_hwcontrol() local
206 this->IO_ADDR_W = ctx->base + MEM_STNAND_CMD; in au1550_hwcontrol()
210 this->IO_ADDR_W = ctx->base + MEM_STNAND_DATA; in au1550_hwcontrol()
214 this->IO_ADDR_W = ctx->base + MEM_STNAND_ADDR; in au1550_hwcontrol()
218 this->IO_ADDR_W = ctx->base + MEM_STNAND_DATA; in au1550_hwcontrol()
226 alchemy_wrsmem((1 << (4 + ctx->cs)), AU1000_MEM_STNDCTL); in au1550_hwcontrol()
270 struct au1550nd_ctx *ctx = container_of(mtd, struct au1550nd_ctx, info); in au1550_command() local
294 ctx->write_byte(mtd, readcmd); in au1550_command()
296 ctx->write_byte(mtd, command); in au1550_command()
310 ctx->write_byte(mtd, column); in au1550_command()
[all …]
/linux-4.1.27/drivers/infiniband/hw/mlx4/
Dmad.c107 __be64 mlx4_ib_get_new_demux_tid(struct mlx4_ib_demux_ctx *ctx) in mlx4_ib_get_new_demux_tid() argument
109 return cpu_to_be64(atomic_inc_return(&ctx->tid)) | in mlx4_ib_get_new_demux_tid()
1109 struct mlx4_ib_demux_pv_ctx *ctx = cq->cq_context; in mlx4_ib_tunnel_comp_handler() local
1110 struct mlx4_ib_dev *dev = to_mdev(ctx->ib_dev); in mlx4_ib_tunnel_comp_handler()
1112 if (!dev->sriov.is_going_down && ctx->state == DEMUX_PV_STATE_ACTIVE) in mlx4_ib_tunnel_comp_handler()
1113 queue_work(ctx->wq, &ctx->work); in mlx4_ib_tunnel_comp_handler()
1117 static int mlx4_ib_post_pv_qp_buf(struct mlx4_ib_demux_pv_ctx *ctx, in mlx4_ib_post_pv_qp_buf() argument
1130 sg_list.lkey = ctx->mr->lkey; in mlx4_ib_post_pv_qp_buf()
1137 ib_dma_sync_single_for_device(ctx->ib_dev, tun_qp->ring[index].map, in mlx4_ib_post_pv_qp_buf()
1281 static void mlx4_ib_multiplex_mad(struct mlx4_ib_demux_pv_ctx *ctx, struct ib_wc *wc) in mlx4_ib_multiplex_mad() argument
[all …]
/linux-4.1.27/arch/arm64/crypto/
Dghash-ce-glue.c41 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_init() local
43 *ctx = (struct ghash_desc_ctx){}; in ghash_init()
50 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_update() local
51 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_update()
53 ctx->count += len; in ghash_update()
62 memcpy(ctx->buf + partial, src, p); in ghash_update()
71 pmull_ghash_update(blocks, ctx->digest, src, key, in ghash_update()
72 partial ? ctx->buf : NULL); in ghash_update()
78 memcpy(ctx->buf + partial, src, len); in ghash_update()
84 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); in ghash_final() local
[all …]
Daes-ce-cipher.c27 static int num_rounds(struct crypto_aes_ctx *ctx) in num_rounds() argument
36 return 6 + ctx->key_length / 4; in num_rounds()
41 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); in aes_cipher_encrypt() local
77 "1"(ctx->key_enc), in aes_cipher_encrypt()
78 "2"(num_rounds(ctx) - 2) in aes_cipher_encrypt()
86 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); in aes_cipher_decrypt() local
122 "1"(ctx->key_dec), in aes_cipher_decrypt()
123 "2"(num_rounds(ctx) - 2) in aes_cipher_decrypt()
149 int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key, in ce_aes_expandkey() argument
168 memcpy(ctx->key_enc, in_key, key_len); in ce_aes_expandkey()
[all …]
/linux-4.1.27/tools/perf/bench/
Dsched-messaging.c83 static void *sender(struct sender_context *ctx) in sender() argument
88 ready(ctx->ready_out, ctx->wakefd); in sender()
92 for (j = 0; j < ctx->num_fds; j++) { in sender()
96 ret = write(ctx->out_fds[j], data + done, in sender()
111 static void *receiver(struct receiver_context* ctx) in receiver() argument
116 close(ctx->in_fds[1]); in receiver()
119 ready(ctx->ready_out, ctx->wakefd); in receiver()
122 for (i = 0; i < ctx->num_packets; i++) { in receiver()
127 ret = read(ctx->in_fds[0], data + done, DATASIZE - done); in receiver()
138 static pthread_t create_worker(void *ctx, void *(*func)(void *)) in create_worker() argument
[all …]
/linux-4.1.27/net/netfilter/
Dnf_tables_api.c91 static void nft_ctx_init(struct nft_ctx *ctx, in nft_ctx_init() argument
99 ctx->net = sock_net(skb->sk); in nft_ctx_init()
100 ctx->afi = afi; in nft_ctx_init()
101 ctx->table = table; in nft_ctx_init()
102 ctx->chain = chain; in nft_ctx_init()
103 ctx->nla = nla; in nft_ctx_init()
104 ctx->portid = NETLINK_CB(skb).portid; in nft_ctx_init()
105 ctx->report = nlmsg_report(nlh); in nft_ctx_init()
106 ctx->seq = nlh->nlmsg_seq; in nft_ctx_init()
109 static struct nft_trans *nft_trans_alloc(struct nft_ctx *ctx, int msg_type, in nft_trans_alloc() argument
[all …]
/linux-4.1.27/arch/sparc/crypto/
Daes_glue.c169 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm); in aes_set_key() local
174 ctx->expanded_key_length = 0xb0; in aes_set_key()
175 ctx->ops = &aes128_ops; in aes_set_key()
179 ctx->expanded_key_length = 0xd0; in aes_set_key()
180 ctx->ops = &aes192_ops; in aes_set_key()
184 ctx->expanded_key_length = 0xf0; in aes_set_key()
185 ctx->ops = &aes256_ops; in aes_set_key()
193 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len); in aes_set_key()
194 ctx->key_length = key_len; in aes_set_key()
201 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm); in aes_encrypt() local
[all …]
/linux-4.1.27/kernel/
Dauditsc.c131 static int audit_match_perm(struct audit_context *ctx, int mask) in audit_match_perm() argument
134 if (unlikely(!ctx)) in audit_match_perm()
136 n = ctx->major; in audit_match_perm()
138 switch (audit_classify_syscall(ctx->arch, n)) { in audit_match_perm()
162 return mask & ACC_MODE(ctx->argv[1]); in audit_match_perm()
164 return mask & ACC_MODE(ctx->argv[2]); in audit_match_perm()
166 return ((mask & AUDIT_PERM_WRITE) && ctx->argv[0] == SYS_BIND); in audit_match_perm()
174 static int audit_match_filetype(struct audit_context *ctx, int val) in audit_match_filetype() argument
179 if (unlikely(!ctx)) in audit_match_filetype()
182 list_for_each_entry(n, &ctx->names_list, list) { in audit_match_filetype()
[all …]
/linux-4.1.27/drivers/media/firewire/
Dfiredtv-fw.c84 static int queue_iso(struct fdtv_ir_context *ctx, int index) in queue_iso() argument
89 p.interrupt = !(++ctx->interrupt_packet & (IRQ_INTERVAL - 1)); in queue_iso()
93 return fw_iso_context_queue(ctx->context, &p, &ctx->buffer, in queue_iso()
101 struct fdtv_ir_context *ctx = fdtv->ir_context; in handle_iso() local
103 int length, err, i = ctx->current_packet; in handle_iso()
113 p = ctx->pages[i / PACKETS_PER_PAGE] in handle_iso()
121 err = queue_iso(ctx, i); in handle_iso()
127 fw_iso_context_queue_flush(ctx->context); in handle_iso()
128 ctx->current_packet = i; in handle_iso()
133 struct fdtv_ir_context *ctx; in fdtv_start_iso() local
[all …]
/linux-4.1.27/arch/powerpc/mm/
Dmmu_context_hash32.c66 unsigned long ctx = next_mmu_context; in __init_new_context() local
68 while (test_and_set_bit(ctx, context_map)) { in __init_new_context()
69 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx); in __init_new_context()
70 if (ctx > LAST_CONTEXT) in __init_new_context()
71 ctx = 0; in __init_new_context()
73 next_mmu_context = (ctx + 1) & LAST_CONTEXT; in __init_new_context()
75 return ctx; in __init_new_context()
92 void __destroy_context(unsigned long ctx) in __destroy_context() argument
94 clear_bit(ctx, context_map); in __destroy_context()
/linux-4.1.27/arch/microblaze/include/asm/
Dmmu_context_mm.h26 # define CTX_TO_VSID(ctx, va) (((ctx) * (897 * 16) + ((va) >> 28) * 0x111) \ argument
83 mm_context_t ctx; in get_mmu_context() local
89 ctx = next_mmu_context; in get_mmu_context()
90 while (test_and_set_bit(ctx, context_map)) { in get_mmu_context()
91 ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx); in get_mmu_context()
92 if (ctx > LAST_CONTEXT) in get_mmu_context()
93 ctx = 0; in get_mmu_context()
95 next_mmu_context = (ctx + 1) & LAST_CONTEXT; in get_mmu_context()
96 mm->context = ctx; in get_mmu_context()
97 context_mm[ctx] = mm; in get_mmu_context()
/linux-4.1.27/drivers/net/wireless/brcm80211/brcmfmac/
Dcommonring.h28 int (*cr_ring_bell)(void *ctx);
29 int (*cr_update_rptr)(void *ctx);
30 int (*cr_update_wptr)(void *ctx);
31 int (*cr_write_rptr)(void *ctx);
32 int (*cr_write_wptr)(void *ctx);
46 int (*cr_ring_bell)(void *ctx),
47 int (*cr_update_rptr)(void *ctx),
48 int (*cr_update_wptr)(void *ctx),
49 int (*cr_write_rptr)(void *ctx),
50 int (*cr_write_wptr)(void *ctx), void *ctx);
/linux-4.1.27/drivers/infiniband/hw/cxgb4/
Ddevice.c899 static void c4iw_dealloc(struct uld_ctx *ctx) in c4iw_dealloc() argument
901 c4iw_rdev_close(&ctx->dev->rdev); in c4iw_dealloc()
902 idr_destroy(&ctx->dev->cqidr); in c4iw_dealloc()
903 idr_destroy(&ctx->dev->qpidr); in c4iw_dealloc()
904 idr_destroy(&ctx->dev->mmidr); in c4iw_dealloc()
905 idr_destroy(&ctx->dev->hwtid_idr); in c4iw_dealloc()
906 idr_destroy(&ctx->dev->stid_idr); in c4iw_dealloc()
907 idr_destroy(&ctx->dev->atid_idr); in c4iw_dealloc()
908 if (ctx->dev->rdev.bar2_kva) in c4iw_dealloc()
909 iounmap(ctx->dev->rdev.bar2_kva); in c4iw_dealloc()
[all …]
/linux-4.1.27/block/
Dblk-mq-sysfs.c36 struct blk_mq_ctx *ctx; in blk_mq_sysfs_show() local
41 ctx = container_of(kobj, struct blk_mq_ctx, kobj); in blk_mq_sysfs_show()
42 q = ctx->queue; in blk_mq_sysfs_show()
50 res = entry->show(ctx, page); in blk_mq_sysfs_show()
59 struct blk_mq_ctx *ctx; in blk_mq_sysfs_store() local
64 ctx = container_of(kobj, struct blk_mq_ctx, kobj); in blk_mq_sysfs_store()
65 q = ctx->queue; in blk_mq_sysfs_store()
73 res = entry->store(ctx, page, length); in blk_mq_sysfs_store()
125 static ssize_t blk_mq_sysfs_dispatched_show(struct blk_mq_ctx *ctx, char *page) in blk_mq_sysfs_dispatched_show() argument
127 return sprintf(page, "%lu %lu\n", ctx->rq_dispatched[1], in blk_mq_sysfs_dispatched_show()
[all …]

12345