ce                 94 arch/alpha/kernel/time.c 	struct clock_event_device *ce = &per_cpu(cpu_ce, cpu);
ce                 97 arch/alpha/kernel/time.c 	if (likely(clockevent_state_periodic(ce)))
ce                 98 arch/alpha/kernel/time.c 		ce->event_handler(ce);
ce                109 arch/alpha/kernel/time.c rtc_ce_set_next_event(unsigned long evt, struct clock_event_device *ce)
ce                119 arch/alpha/kernel/time.c 	struct clock_event_device *ce = &per_cpu(cpu_ce, cpu);
ce                121 arch/alpha/kernel/time.c 	*ce = (struct clock_event_device){
ce                129 arch/alpha/kernel/time.c 	clockevents_config_and_register(ce, CONFIG_HZ, 0, 0);
ce                157 arch/alpha/kernel/time.c static int qemu_ce_shutdown(struct clock_event_device *ce)
ce                166 arch/alpha/kernel/time.c qemu_ce_set_next_event(unsigned long evt, struct clock_event_device *ce)
ce                176 arch/alpha/kernel/time.c 	struct clock_event_device *ce = &per_cpu(cpu_ce, cpu);
ce                178 arch/alpha/kernel/time.c 	ce->event_handler(ce);
ce                186 arch/alpha/kernel/time.c 	struct clock_event_device *ce = &per_cpu(cpu_ce, cpu);
ce                188 arch/alpha/kernel/time.c 	*ce = (struct clock_event_device){
ce                199 arch/alpha/kernel/time.c 	clockevents_config_and_register(ce, NSEC_PER_SEC, 1000, LONG_MAX);
ce                 91 arch/arm/mach-footbridge/dc21285-timer.c 	struct clock_event_device *ce = dev_id;
ce                 96 arch/arm/mach-footbridge/dc21285-timer.c 	if (clockevent_state_oneshot(ce))
ce                 99 arch/arm/mach-footbridge/dc21285-timer.c 	ce->event_handler(ce);
ce                116 arch/arm/mach-footbridge/dc21285-timer.c 	struct clock_event_device *ce = &ckevt_dc21285;
ce                121 arch/arm/mach-footbridge/dc21285-timer.c 	setup_irq(ce->irq, &footbridge_timer_irq);
ce                123 arch/arm/mach-footbridge/dc21285-timer.c 	ce->cpumask = cpumask_of(smp_processor_id());
ce                124 arch/arm/mach-footbridge/dc21285-timer.c 	clockevents_config_and_register(ce, rate, 0x4, 0xffffff);
ce                 23 arch/arm/mach-footbridge/isa-timer.c 	struct clock_event_device *ce = dev_id;
ce                 24 arch/arm/mach-footbridge/isa-timer.c 	ce->event_handler(ce);
ce                709 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t ce:6;
ce                713 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t ce:6;
ce                740 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t ce:6;
ce                744 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t ce:6;
ce                278 arch/mips/include/asm/octeon/cvmx-pciercx-defs.h 		__BITFIELD_FIELD(uint32_t ce:1,
ce                471 arch/mips/pci/pcie-octeon.c 	pciercx_cfg070.s.ce = 1;	/* ECRC check enable. */
ce                 88 arch/powerpc/include/asm/icswx.h 	u8 ce;
ce                285 arch/sparc/kernel/leon_kernel.c 	struct clock_event_device *ce;
ce                293 arch/sparc/kernel/leon_kernel.c 	ce = &per_cpu(sparc32_clockevent, cpu);
ce                296 arch/sparc/kernel/leon_kernel.c 	if (ce->event_handler)
ce                297 arch/sparc/kernel/leon_kernel.c 		ce->event_handler(ce);
ce                368 arch/sparc/kernel/sun4d_smp.c 	struct clock_event_device *ce;
ce                384 arch/sparc/kernel/sun4d_smp.c 	ce = &per_cpu(sparc32_clockevent, cpu);
ce                387 arch/sparc/kernel/sun4d_smp.c 	ce->event_handler(ce);
ce                244 arch/sparc/kernel/sun4m_smp.c 	struct clock_event_device *ce;
ce                249 arch/sparc/kernel/sun4m_smp.c 	ce = &per_cpu(sparc32_clockevent, cpu);
ce                251 arch/sparc/kernel/sun4m_smp.c 	if (clockevent_state_periodic(ce))
ce                257 arch/sparc/kernel/sun4m_smp.c 	ce->event_handler(ce);
ce                121 arch/sparc/kernel/time_32.c 	struct clock_event_device *ce = &timer_ce;
ce                125 arch/sparc/kernel/time_32.c 	ce->name     = "timer_ce";
ce                126 arch/sparc/kernel/time_32.c 	ce->rating   = 100;
ce                127 arch/sparc/kernel/time_32.c 	ce->features = CLOCK_EVT_FEAT_PERIODIC;
ce                128 arch/sparc/kernel/time_32.c 	ce->set_state_shutdown = timer_ce_shutdown;
ce                129 arch/sparc/kernel/time_32.c 	ce->set_state_periodic = timer_ce_set_periodic;
ce                130 arch/sparc/kernel/time_32.c 	ce->tick_resume = timer_ce_set_periodic;
ce                131 arch/sparc/kernel/time_32.c 	ce->cpumask  = cpu_possible_mask;
ce                132 arch/sparc/kernel/time_32.c 	ce->shift    = 32;
ce                133 arch/sparc/kernel/time_32.c 	ce->mult     = div_sc(sparc_config.clock_rate, NSEC_PER_SEC,
ce                134 arch/sparc/kernel/time_32.c 	                      ce->shift);
ce                135 arch/sparc/kernel/time_32.c 	clockevents_register_device(ce);
ce                214 arch/sparc/kernel/time_32.c 	struct clock_event_device *ce = &per_cpu(sparc32_clockevent, cpu);
ce                220 arch/sparc/kernel/time_32.c 	ce->name           = "percpu_ce";
ce                221 arch/sparc/kernel/time_32.c 	ce->rating         = 200;
ce                222 arch/sparc/kernel/time_32.c 	ce->features       = features;
ce                223 arch/sparc/kernel/time_32.c 	ce->set_state_shutdown = percpu_ce_shutdown;
ce                224 arch/sparc/kernel/time_32.c 	ce->set_state_periodic = percpu_ce_set_periodic;
ce                225 arch/sparc/kernel/time_32.c 	ce->set_state_oneshot = percpu_ce_shutdown;
ce                226 arch/sparc/kernel/time_32.c 	ce->set_next_event = percpu_ce_set_next_event;
ce                227 arch/sparc/kernel/time_32.c 	ce->cpumask        = cpumask_of(cpu);
ce                228 arch/sparc/kernel/time_32.c 	ce->shift          = 32;
ce                229 arch/sparc/kernel/time_32.c 	ce->mult           = div_sc(sparc_config.clock_rate, NSEC_PER_SEC,
ce                230 arch/sparc/kernel/time_32.c 	                            ce->shift);
ce                231 arch/sparc/kernel/time_32.c 	ce->max_delta_ns   = clockevent_delta2ns(sparc_config.clock_rate, ce);
ce                232 arch/sparc/kernel/time_32.c 	ce->max_delta_ticks = (unsigned long)sparc_config.clock_rate;
ce                233 arch/sparc/kernel/time_32.c 	ce->min_delta_ns   = clockevent_delta2ns(100, ce);
ce                234 arch/sparc/kernel/time_32.c 	ce->min_delta_ticks = 100;
ce                236 arch/sparc/kernel/time_32.c 	clockevents_register_device(ce);
ce                202 drivers/ata/pata_octeon_cf.c 	reg_tim.s.ce = ns_to_tim_reg(div, 5);
ce                 42 drivers/base/power/clock_ops.c static inline void __pm_clk_enable(struct device *dev, struct pm_clock_entry *ce)
ce                 46 drivers/base/power/clock_ops.c 	if (ce->status < PCE_STATUS_ERROR) {
ce                 47 drivers/base/power/clock_ops.c 		ret = clk_enable(ce->clk);
ce                 49 drivers/base/power/clock_ops.c 			ce->status = PCE_STATUS_ENABLED;
ce                 52 drivers/base/power/clock_ops.c 				__func__, ce->clk, ret);
ce                 61 drivers/base/power/clock_ops.c static void pm_clk_acquire(struct device *dev, struct pm_clock_entry *ce)
ce                 63 drivers/base/power/clock_ops.c 	if (!ce->clk)
ce                 64 drivers/base/power/clock_ops.c 		ce->clk = clk_get(dev, ce->con_id);
ce                 65 drivers/base/power/clock_ops.c 	if (IS_ERR(ce->clk)) {
ce                 66 drivers/base/power/clock_ops.c 		ce->status = PCE_STATUS_ERROR;
ce                 68 drivers/base/power/clock_ops.c 		if (clk_prepare(ce->clk)) {
ce                 69 drivers/base/power/clock_ops.c 			ce->status = PCE_STATUS_ERROR;
ce                 72 drivers/base/power/clock_ops.c 			ce->status = PCE_STATUS_ACQUIRED;
ce                 75 drivers/base/power/clock_ops.c 				ce->clk, ce->con_id);
ce                 84 drivers/base/power/clock_ops.c 	struct pm_clock_entry *ce;
ce                 89 drivers/base/power/clock_ops.c 	ce = kzalloc(sizeof(*ce), GFP_KERNEL);
ce                 90 drivers/base/power/clock_ops.c 	if (!ce)
ce                 94 drivers/base/power/clock_ops.c 		ce->con_id = kstrdup(con_id, GFP_KERNEL);
ce                 95 drivers/base/power/clock_ops.c 		if (!ce->con_id) {
ce                 96 drivers/base/power/clock_ops.c 			kfree(ce);
ce                101 drivers/base/power/clock_ops.c 			kfree(ce);
ce                104 drivers/base/power/clock_ops.c 		ce->clk = clk;
ce                107 drivers/base/power/clock_ops.c 	pm_clk_acquire(dev, ce);
ce                110 drivers/base/power/clock_ops.c 	list_add_tail(&ce->node, &psd->clock_list);
ce                237 drivers/base/power/clock_ops.c static void __pm_clk_remove(struct pm_clock_entry *ce)
ce                239 drivers/base/power/clock_ops.c 	if (!ce)
ce                242 drivers/base/power/clock_ops.c 	if (ce->status < PCE_STATUS_ERROR) {
ce                243 drivers/base/power/clock_ops.c 		if (ce->status == PCE_STATUS_ENABLED)
ce                244 drivers/base/power/clock_ops.c 			clk_disable(ce->clk);
ce                246 drivers/base/power/clock_ops.c 		if (ce->status >= PCE_STATUS_ACQUIRED) {
ce                247 drivers/base/power/clock_ops.c 			clk_unprepare(ce->clk);
ce                248 drivers/base/power/clock_ops.c 			clk_put(ce->clk);
ce                252 drivers/base/power/clock_ops.c 	kfree(ce->con_id);
ce                253 drivers/base/power/clock_ops.c 	kfree(ce);
ce                267 drivers/base/power/clock_ops.c 	struct pm_clock_entry *ce;
ce                274 drivers/base/power/clock_ops.c 	list_for_each_entry(ce, &psd->clock_list, node) {
ce                275 drivers/base/power/clock_ops.c 		if (!con_id && !ce->con_id)
ce                277 drivers/base/power/clock_ops.c 		else if (!con_id || !ce->con_id)
ce                279 drivers/base/power/clock_ops.c 		else if (!strcmp(con_id, ce->con_id))
ce                287 drivers/base/power/clock_ops.c 	list_del(&ce->node);
ce                290 drivers/base/power/clock_ops.c 	__pm_clk_remove(ce);
ce                305 drivers/base/power/clock_ops.c 	struct pm_clock_entry *ce;
ce                312 drivers/base/power/clock_ops.c 	list_for_each_entry(ce, &psd->clock_list, node) {
ce                313 drivers/base/power/clock_ops.c 		if (clk == ce->clk)
ce                321 drivers/base/power/clock_ops.c 	list_del(&ce->node);
ce                324 drivers/base/power/clock_ops.c 	__pm_clk_remove(ce);
ce                367 drivers/base/power/clock_ops.c 	struct pm_clock_entry *ce, *c;
ce                377 drivers/base/power/clock_ops.c 	list_for_each_entry_safe_reverse(ce, c, &psd->clock_list, node)
ce                378 drivers/base/power/clock_ops.c 		list_move(&ce->node, &list);
ce                384 drivers/base/power/clock_ops.c 	list_for_each_entry_safe_reverse(ce, c, &list, node) {
ce                385 drivers/base/power/clock_ops.c 		list_del(&ce->node);
ce                386 drivers/base/power/clock_ops.c 		__pm_clk_remove(ce);
ce                398 drivers/base/power/clock_ops.c 	struct pm_clock_entry *ce;
ce                408 drivers/base/power/clock_ops.c 	list_for_each_entry_reverse(ce, &psd->clock_list, node) {
ce                409 drivers/base/power/clock_ops.c 		if (ce->status < PCE_STATUS_ERROR) {
ce                410 drivers/base/power/clock_ops.c 			if (ce->status == PCE_STATUS_ENABLED)
ce                411 drivers/base/power/clock_ops.c 				clk_disable(ce->clk);
ce                412 drivers/base/power/clock_ops.c 			ce->status = PCE_STATUS_ACQUIRED;
ce                429 drivers/base/power/clock_ops.c 	struct pm_clock_entry *ce;
ce                439 drivers/base/power/clock_ops.c 	list_for_each_entry(ce, &psd->clock_list, node)
ce                440 drivers/base/power/clock_ops.c 		__pm_clk_enable(dev, ce);
ce                 47 drivers/clocksource/hyperv_timer.c 	struct clock_event_device *ce;
ce                 49 drivers/clocksource/hyperv_timer.c 	ce = this_cpu_ptr(hv_clock_event);
ce                 50 drivers/clocksource/hyperv_timer.c 	ce->event_handler(ce);
ce                107 drivers/clocksource/hyperv_timer.c 	struct clock_event_device *ce;
ce                117 drivers/clocksource/hyperv_timer.c 	ce = per_cpu_ptr(hv_clock_event, cpu);
ce                118 drivers/clocksource/hyperv_timer.c 	ce->name = "Hyper-V clockevent";
ce                119 drivers/clocksource/hyperv_timer.c 	ce->features = CLOCK_EVT_FEAT_ONESHOT;
ce                120 drivers/clocksource/hyperv_timer.c 	ce->cpumask = cpumask_of(cpu);
ce                121 drivers/clocksource/hyperv_timer.c 	ce->rating = 1000;
ce                122 drivers/clocksource/hyperv_timer.c 	ce->set_state_shutdown = hv_ce_shutdown;
ce                123 drivers/clocksource/hyperv_timer.c 	ce->set_state_oneshot = hv_ce_set_oneshot;
ce                124 drivers/clocksource/hyperv_timer.c 	ce->set_next_event = hv_ce_set_next_event;
ce                126 drivers/clocksource/hyperv_timer.c 	clockevents_config_and_register(ce,
ce                138 drivers/clocksource/hyperv_timer.c 	struct clock_event_device *ce;
ce                142 drivers/clocksource/hyperv_timer.c 		ce = per_cpu_ptr(hv_clock_event, cpu);
ce                143 drivers/clocksource/hyperv_timer.c 		hv_ce_shutdown(ce);
ce                193 drivers/clocksource/hyperv_timer.c 	struct clock_event_device *ce;
ce                197 drivers/clocksource/hyperv_timer.c 			ce = per_cpu_ptr(hv_clock_event, cpu);
ce                198 drivers/clocksource/hyperv_timer.c 			clockevents_unbind_device(ce, cpu);
ce                 54 drivers/clocksource/mps2-timer.c static int mps2_timer_shutdown(struct clock_event_device *ce)
ce                 56 drivers/clocksource/mps2-timer.c 	clockevent_mps2_writel(0, ce, TIMER_RELOAD);
ce                 57 drivers/clocksource/mps2-timer.c 	clockevent_mps2_writel(0, ce, TIMER_CTRL);
ce                 62 drivers/clocksource/mps2-timer.c static int mps2_timer_set_next_event(unsigned long next, struct clock_event_device *ce)
ce                 64 drivers/clocksource/mps2-timer.c 	clockevent_mps2_writel(next, ce, TIMER_VALUE);
ce                 65 drivers/clocksource/mps2-timer.c 	clockevent_mps2_writel(TIMER_CTRL_IE | TIMER_CTRL_ENABLE, ce, TIMER_CTRL);
ce                 70 drivers/clocksource/mps2-timer.c static int mps2_timer_set_periodic(struct clock_event_device *ce)
ce                 72 drivers/clocksource/mps2-timer.c 	u32 clock_count_per_tick = to_mps2_clkevt(ce)->clock_count_per_tick;
ce                 74 drivers/clocksource/mps2-timer.c 	clockevent_mps2_writel(clock_count_per_tick, ce, TIMER_RELOAD);
ce                 75 drivers/clocksource/mps2-timer.c 	clockevent_mps2_writel(clock_count_per_tick, ce, TIMER_VALUE);
ce                 76 drivers/clocksource/mps2-timer.c 	clockevent_mps2_writel(TIMER_CTRL_IE | TIMER_CTRL_ENABLE, ce, TIMER_CTRL);
ce                 83 drivers/clocksource/mps2-timer.c 	struct clockevent_mps2 *ce = dev_id;
ce                 84 drivers/clocksource/mps2-timer.c 	u32 status = readl_relaxed(ce->reg + TIMER_INT);
ce                 91 drivers/clocksource/mps2-timer.c 	writel_relaxed(1, ce->reg + TIMER_INT);
ce                 93 drivers/clocksource/mps2-timer.c 	ce->clkevt.event_handler(&ce->clkevt);
ce                102 drivers/clocksource/mps2-timer.c 	struct clockevent_mps2 *ce;
ce                139 drivers/clocksource/mps2-timer.c 	ce = kzalloc(sizeof(*ce), GFP_KERNEL);
ce                140 drivers/clocksource/mps2-timer.c 	if (!ce) {
ce                145 drivers/clocksource/mps2-timer.c 	ce->reg = base;
ce                146 drivers/clocksource/mps2-timer.c 	ce->clock_count_per_tick = DIV_ROUND_CLOSEST(rate, HZ);
ce                147 drivers/clocksource/mps2-timer.c 	ce->clkevt.irq = irq;
ce                148 drivers/clocksource/mps2-timer.c 	ce->clkevt.name = name;
ce                149 drivers/clocksource/mps2-timer.c 	ce->clkevt.rating = 200;
ce                150 drivers/clocksource/mps2-timer.c 	ce->clkevt.features = CLOCK_EVT_FEAT_PERIODIC | CLOCK_EVT_FEAT_ONESHOT;
ce                151 drivers/clocksource/mps2-timer.c 	ce->clkevt.cpumask = cpu_possible_mask;
ce                152 drivers/clocksource/mps2-timer.c 	ce->clkevt.set_state_shutdown	= mps2_timer_shutdown,
ce                153 drivers/clocksource/mps2-timer.c 	ce->clkevt.set_state_periodic	= mps2_timer_set_periodic,
ce                154 drivers/clocksource/mps2-timer.c 	ce->clkevt.set_state_oneshot	= mps2_timer_shutdown,
ce                155 drivers/clocksource/mps2-timer.c 	ce->clkevt.set_next_event	= mps2_timer_set_next_event;
ce                160 drivers/clocksource/mps2-timer.c 	ret = request_irq(irq, mps2_timer_interrupt, IRQF_TIMER, name, ce);
ce                166 drivers/clocksource/mps2-timer.c 	clockevents_config_and_register(&ce->clkevt, rate, 0xf, 0xffffffff);
ce                171 drivers/clocksource/mps2-timer.c 	kfree(ce);
ce                 72 drivers/clocksource/timer-atlas7.c 	struct clock_event_device *ce = dev_id;
ce                 78 drivers/clocksource/timer-atlas7.c 	if (clockevent_state_oneshot(ce))
ce                 81 drivers/clocksource/timer-atlas7.c 	ce->event_handler(ce);
ce                101 drivers/clocksource/timer-atlas7.c 	struct clock_event_device *ce)
ce                176 drivers/clocksource/timer-atlas7.c 	struct clock_event_device *ce = per_cpu_ptr(sirfsoc_clockevent, cpu);
ce                184 drivers/clocksource/timer-atlas7.c 	ce->irq = action->irq;
ce                185 drivers/clocksource/timer-atlas7.c 	ce->name = "local_timer";
ce                186 drivers/clocksource/timer-atlas7.c 	ce->features = CLOCK_EVT_FEAT_ONESHOT;
ce                187 drivers/clocksource/timer-atlas7.c 	ce->rating = 200;
ce                188 drivers/clocksource/timer-atlas7.c 	ce->set_state_shutdown = sirfsoc_timer_shutdown;
ce                189 drivers/clocksource/timer-atlas7.c 	ce->set_state_oneshot = sirfsoc_timer_shutdown;
ce                190 drivers/clocksource/timer-atlas7.c 	ce->tick_resume = sirfsoc_timer_shutdown;
ce                191 drivers/clocksource/timer-atlas7.c 	ce->set_next_event = sirfsoc_timer_set_next_event;
ce                192 drivers/clocksource/timer-atlas7.c 	clockevents_calc_mult_shift(ce, atlas7_timer_rate, 60);
ce                193 drivers/clocksource/timer-atlas7.c 	ce->max_delta_ns = clockevent_delta2ns(-2, ce);
ce                194 drivers/clocksource/timer-atlas7.c 	ce->max_delta_ticks = (unsigned long)-2;
ce                195 drivers/clocksource/timer-atlas7.c 	ce->min_delta_ns = clockevent_delta2ns(2, ce);
ce                196 drivers/clocksource/timer-atlas7.c 	ce->min_delta_ticks = 2;
ce                197 drivers/clocksource/timer-atlas7.c 	ce->cpumask = cpumask_of(cpu);
ce                199 drivers/clocksource/timer-atlas7.c 	action->dev_id = ce;
ce                200 drivers/clocksource/timer-atlas7.c 	BUG_ON(setup_irq(ce->irq, action));
ce                203 drivers/clocksource/timer-atlas7.c 	clockevents_register_device(ce);
ce                 93 drivers/clocksource/timer-cadence-ttc.c 	struct clock_event_device	ce;
ce                 97 drivers/clocksource/timer-cadence-ttc.c 		container_of(x, struct ttc_timer_clockevent, ce)
ce                144 drivers/clocksource/timer-cadence-ttc.c 	ttce->ce.event_handler(&ttce->ce);
ce                391 drivers/clocksource/timer-cadence-ttc.c 		clockevents_update_freq(&ttcce->ce, ndata->new_rate / PRESCALE);
ce                433 drivers/clocksource/timer-cadence-ttc.c 	ttcce->ce.name = "ttc_clockevent";
ce                434 drivers/clocksource/timer-cadence-ttc.c 	ttcce->ce.features = CLOCK_EVT_FEAT_PERIODIC | CLOCK_EVT_FEAT_ONESHOT;
ce                435 drivers/clocksource/timer-cadence-ttc.c 	ttcce->ce.set_next_event = ttc_set_next_event;
ce                436 drivers/clocksource/timer-cadence-ttc.c 	ttcce->ce.set_state_shutdown = ttc_shutdown;
ce                437 drivers/clocksource/timer-cadence-ttc.c 	ttcce->ce.set_state_periodic = ttc_set_periodic;
ce                438 drivers/clocksource/timer-cadence-ttc.c 	ttcce->ce.set_state_oneshot = ttc_shutdown;
ce                439 drivers/clocksource/timer-cadence-ttc.c 	ttcce->ce.tick_resume = ttc_resume;
ce                440 drivers/clocksource/timer-cadence-ttc.c 	ttcce->ce.rating = 200;
ce                441 drivers/clocksource/timer-cadence-ttc.c 	ttcce->ce.irq = irq;
ce                442 drivers/clocksource/timer-cadence-ttc.c 	ttcce->ce.cpumask = cpu_possible_mask;
ce                455 drivers/clocksource/timer-cadence-ttc.c 			  IRQF_TIMER, ttcce->ce.name, ttcce);
ce                461 drivers/clocksource/timer-cadence-ttc.c 	clockevents_config_and_register(&ttcce->ce,
ce                 61 drivers/clocksource/timer-digicolor.c 	struct clock_event_device ce;
ce                 67 drivers/clocksource/timer-digicolor.c static struct digicolor_timer *dc_timer(struct clock_event_device *ce)
ce                 69 drivers/clocksource/timer-digicolor.c 	return container_of(ce, struct digicolor_timer, ce);
ce                 72 drivers/clocksource/timer-digicolor.c static inline void dc_timer_disable(struct clock_event_device *ce)
ce                 74 drivers/clocksource/timer-digicolor.c 	struct digicolor_timer *dt = dc_timer(ce);
ce                 78 drivers/clocksource/timer-digicolor.c static inline void dc_timer_enable(struct clock_event_device *ce, u32 mode)
ce                 80 drivers/clocksource/timer-digicolor.c 	struct digicolor_timer *dt = dc_timer(ce);
ce                 84 drivers/clocksource/timer-digicolor.c static inline void dc_timer_set_count(struct clock_event_device *ce,
ce                 87 drivers/clocksource/timer-digicolor.c 	struct digicolor_timer *dt = dc_timer(ce);
ce                 91 drivers/clocksource/timer-digicolor.c static int digicolor_clkevt_shutdown(struct clock_event_device *ce)
ce                 93 drivers/clocksource/timer-digicolor.c 	dc_timer_disable(ce);
ce                 97 drivers/clocksource/timer-digicolor.c static int digicolor_clkevt_set_oneshot(struct clock_event_device *ce)
ce                 99 drivers/clocksource/timer-digicolor.c 	dc_timer_disable(ce);
ce                100 drivers/clocksource/timer-digicolor.c 	dc_timer_enable(ce, CONTROL_MODE_ONESHOT);
ce                104 drivers/clocksource/timer-digicolor.c static int digicolor_clkevt_set_periodic(struct clock_event_device *ce)
ce                106 drivers/clocksource/timer-digicolor.c 	struct digicolor_timer *dt = dc_timer(ce);
ce                108 drivers/clocksource/timer-digicolor.c 	dc_timer_disable(ce);
ce                109 drivers/clocksource/timer-digicolor.c 	dc_timer_set_count(ce, dt->ticks_per_jiffy);
ce                110 drivers/clocksource/timer-digicolor.c 	dc_timer_enable(ce, CONTROL_MODE_PERIODIC);
ce                115 drivers/clocksource/timer-digicolor.c 				       struct clock_event_device *ce)
ce                117 drivers/clocksource/timer-digicolor.c 	dc_timer_disable(ce);
ce                118 drivers/clocksource/timer-digicolor.c 	dc_timer_set_count(ce, evt);
ce                119 drivers/clocksource/timer-digicolor.c 	dc_timer_enable(ce, CONTROL_MODE_ONESHOT);
ce                125 drivers/clocksource/timer-digicolor.c 	.ce = {
ce                193 drivers/clocksource/timer-digicolor.c 			  &dc_timer_dev.ce);
ce                199 drivers/clocksource/timer-digicolor.c 	dc_timer_dev.ce.cpumask = cpu_possible_mask;
ce                200 drivers/clocksource/timer-digicolor.c 	dc_timer_dev.ce.irq = irq;
ce                202 drivers/clocksource/timer-digicolor.c 	clockevents_config_and_register(&dc_timer_dev.ce, rate, 0, 0xffffffff);
ce                 27 drivers/clocksource/timer-gx6605s.c 	struct clock_event_device *ce = dev;
ce                 28 drivers/clocksource/timer-gx6605s.c 	void __iomem *base = timer_of_base(to_timer_of(ce));
ce                 32 drivers/clocksource/timer-gx6605s.c 	ce->event_handler(ce);
ce                 37 drivers/clocksource/timer-gx6605s.c static int gx6605s_timer_set_oneshot(struct clock_event_device *ce)
ce                 39 drivers/clocksource/timer-gx6605s.c 	void __iomem *base = timer_of_base(to_timer_of(ce));
ce                 52 drivers/clocksource/timer-gx6605s.c 					struct clock_event_device *ce)
ce                 54 drivers/clocksource/timer-gx6605s.c 	void __iomem *base = timer_of_base(to_timer_of(ce));
ce                 66 drivers/clocksource/timer-gx6605s.c static int gx6605s_timer_shutdown(struct clock_event_device *ce)
ce                 68 drivers/clocksource/timer-gx6605s.c 	void __iomem *base = timer_of_base(to_timer_of(ce));
ce                 21 drivers/clocksource/timer-mp-csky.c 				       struct clock_event_device *ce)
ce                 28 drivers/clocksource/timer-mp-csky.c static int csky_mptimer_shutdown(struct clock_event_device *ce)
ce                 35 drivers/clocksource/timer-mp-csky.c static int csky_mptimer_oneshot(struct clock_event_device *ce)
ce                 42 drivers/clocksource/timer-mp-csky.c static int csky_mptimer_oneshot_stopped(struct clock_event_device *ce)
ce                 60 drivers/clocksource/timer-prima2.c 	struct clock_event_device *ce = dev_id;
ce                 68 drivers/clocksource/timer-prima2.c 	ce->event_handler(ce);
ce                 89 drivers/clocksource/timer-prima2.c 	struct clock_event_device *ce)
ce                 20 drivers/clocksource/timer-riscv.c 		struct clock_event_device *ce)
ce                 59 drivers/clocksource/timer-riscv.c 	struct clock_event_device *ce = per_cpu_ptr(&riscv_clock_event, cpu);
ce                 61 drivers/clocksource/timer-riscv.c 	ce->cpumask = cpumask_of(cpu);
ce                 62 drivers/clocksource/timer-riscv.c 	clockevents_config_and_register(ce, riscv_timebase, 100, 0x7fffffff);
ce                 43 drivers/clocksource/timer-rockchip.c 	struct clock_event_device ce;
ce                 50 drivers/clocksource/timer-rockchip.c static inline struct rk_timer *rk_timer(struct clock_event_device *ce)
ce                 52 drivers/clocksource/timer-rockchip.c 	return &container_of(ce, struct rk_clkevt, ce)->timer;
ce                 78 drivers/clocksource/timer-rockchip.c 					  struct clock_event_device *ce)
ce                 80 drivers/clocksource/timer-rockchip.c 	struct rk_timer *timer = rk_timer(ce);
ce                 89 drivers/clocksource/timer-rockchip.c static int rk_timer_shutdown(struct clock_event_device *ce)
ce                 91 drivers/clocksource/timer-rockchip.c 	struct rk_timer *timer = rk_timer(ce);
ce                 97 drivers/clocksource/timer-rockchip.c static int rk_timer_set_periodic(struct clock_event_device *ce)
ce                 99 drivers/clocksource/timer-rockchip.c 	struct rk_timer *timer = rk_timer(ce);
ce                109 drivers/clocksource/timer-rockchip.c 	struct clock_event_device *ce = dev_id;
ce                110 drivers/clocksource/timer-rockchip.c 	struct rk_timer *timer = rk_timer(ce);
ce                114 drivers/clocksource/timer-rockchip.c 	if (clockevent_state_oneshot(ce))
ce                117 drivers/clocksource/timer-rockchip.c 	ce->event_handler(ce);
ce                207 drivers/clocksource/timer-rockchip.c 	struct clock_event_device *ce;
ce                220 drivers/clocksource/timer-rockchip.c 	ce = &rk_clkevt->ce;
ce                221 drivers/clocksource/timer-rockchip.c 	ce->name = TIMER_NAME;
ce                222 drivers/clocksource/timer-rockchip.c 	ce->features = CLOCK_EVT_FEAT_PERIODIC | CLOCK_EVT_FEAT_ONESHOT |
ce                224 drivers/clocksource/timer-rockchip.c 	ce->set_next_event = rk_timer_set_next_event;
ce                225 drivers/clocksource/timer-rockchip.c 	ce->set_state_shutdown = rk_timer_shutdown;
ce                226 drivers/clocksource/timer-rockchip.c 	ce->set_state_periodic = rk_timer_set_periodic;
ce                227 drivers/clocksource/timer-rockchip.c 	ce->irq = rk_clkevt->timer.irq;
ce                228 drivers/clocksource/timer-rockchip.c 	ce->cpumask = cpu_possible_mask;
ce                229 drivers/clocksource/timer-rockchip.c 	ce->rating = 250;
ce                232 drivers/clocksource/timer-rockchip.c 			  TIMER_NAME, ce);
ce                239 drivers/clocksource/timer-rockchip.c 	clockevents_config_and_register(&rk_clkevt->ce,
ce                 80 drivers/clocksource/timer-sprd.c 				     struct clock_event_device *ce)
ce                 82 drivers/clocksource/timer-sprd.c 	struct timer_of *to = to_timer_of(ce);
ce                 91 drivers/clocksource/timer-sprd.c static int sprd_timer_set_periodic(struct clock_event_device *ce)
ce                 93 drivers/clocksource/timer-sprd.c 	struct timer_of *to = to_timer_of(ce);
ce                102 drivers/clocksource/timer-sprd.c static int sprd_timer_shutdown(struct clock_event_device *ce)
ce                104 drivers/clocksource/timer-sprd.c 	struct timer_of *to = to_timer_of(ce);
ce                112 drivers/clocksource/timer-sprd.c 	struct clock_event_device *ce = (struct clock_event_device *)dev_id;
ce                113 drivers/clocksource/timer-sprd.c 	struct timer_of *to = to_timer_of(ce);
ce                117 drivers/clocksource/timer-sprd.c 	if (clockevent_state_oneshot(ce))
ce                120 drivers/clocksource/timer-sprd.c 	ce->event_handler(ce);
ce                 73 drivers/clocksource/timer-sun5i.c static void sun5i_clkevt_sync(struct sun5i_timer_clkevt *ce)
ce                 75 drivers/clocksource/timer-sun5i.c 	u32 old = readl(ce->timer.base + TIMER_CNTVAL_LO_REG(1));
ce                 77 drivers/clocksource/timer-sun5i.c 	while ((old - readl(ce->timer.base + TIMER_CNTVAL_LO_REG(1))) < TIMER_SYNC_TICKS)
ce                 81 drivers/clocksource/timer-sun5i.c static void sun5i_clkevt_time_stop(struct sun5i_timer_clkevt *ce, u8 timer)
ce                 83 drivers/clocksource/timer-sun5i.c 	u32 val = readl(ce->timer.base + TIMER_CTL_REG(timer));
ce                 84 drivers/clocksource/timer-sun5i.c 	writel(val & ~TIMER_CTL_ENABLE, ce->timer.base + TIMER_CTL_REG(timer));
ce                 86 drivers/clocksource/timer-sun5i.c 	sun5i_clkevt_sync(ce);
ce                 89 drivers/clocksource/timer-sun5i.c static void sun5i_clkevt_time_setup(struct sun5i_timer_clkevt *ce, u8 timer, u32 delay)
ce                 91 drivers/clocksource/timer-sun5i.c 	writel(delay, ce->timer.base + TIMER_INTVAL_LO_REG(timer));
ce                 94 drivers/clocksource/timer-sun5i.c static void sun5i_clkevt_time_start(struct sun5i_timer_clkevt *ce, u8 timer, bool periodic)
ce                 96 drivers/clocksource/timer-sun5i.c 	u32 val = readl(ce->timer.base + TIMER_CTL_REG(timer));
ce                104 drivers/clocksource/timer-sun5i.c 	       ce->timer.base + TIMER_CTL_REG(timer));
ce                109 drivers/clocksource/timer-sun5i.c 	struct sun5i_timer_clkevt *ce = to_sun5i_timer_clkevt(clkevt);
ce                111 drivers/clocksource/timer-sun5i.c 	sun5i_clkevt_time_stop(ce, 0);
ce                117 drivers/clocksource/timer-sun5i.c 	struct sun5i_timer_clkevt *ce = to_sun5i_timer_clkevt(clkevt);
ce                119 drivers/clocksource/timer-sun5i.c 	sun5i_clkevt_time_stop(ce, 0);
ce                120 drivers/clocksource/timer-sun5i.c 	sun5i_clkevt_time_start(ce, 0, false);
ce                126 drivers/clocksource/timer-sun5i.c 	struct sun5i_timer_clkevt *ce = to_sun5i_timer_clkevt(clkevt);
ce                128 drivers/clocksource/timer-sun5i.c 	sun5i_clkevt_time_stop(ce, 0);
ce                129 drivers/clocksource/timer-sun5i.c 	sun5i_clkevt_time_setup(ce, 0, ce->timer.ticks_per_jiffy);
ce                130 drivers/clocksource/timer-sun5i.c 	sun5i_clkevt_time_start(ce, 0, true);
ce                137 drivers/clocksource/timer-sun5i.c 	struct sun5i_timer_clkevt *ce = to_sun5i_timer_clkevt(clkevt);
ce                139 drivers/clocksource/timer-sun5i.c 	sun5i_clkevt_time_stop(ce, 0);
ce                140 drivers/clocksource/timer-sun5i.c 	sun5i_clkevt_time_setup(ce, 0, evt - TIMER_SYNC_TICKS);
ce                141 drivers/clocksource/timer-sun5i.c 	sun5i_clkevt_time_start(ce, 0, false);
ce                148 drivers/clocksource/timer-sun5i.c 	struct sun5i_timer_clkevt *ce = (struct sun5i_timer_clkevt *)dev_id;
ce                150 drivers/clocksource/timer-sun5i.c 	writel(0x1, ce->timer.base + TIMER_IRQ_ST_REG);
ce                151 drivers/clocksource/timer-sun5i.c 	ce->clkevt.event_handler(&ce->clkevt);
ce                254 drivers/clocksource/timer-sun5i.c 	struct sun5i_timer_clkevt *ce = container_of(timer, struct sun5i_timer_clkevt, timer);
ce                257 drivers/clocksource/timer-sun5i.c 		clockevents_update_freq(&ce->clkevt, ndata->new_rate);
ce                258 drivers/clocksource/timer-sun5i.c 		ce->timer.ticks_per_jiffy = DIV_ROUND_UP(ndata->new_rate, HZ);
ce                267 drivers/clocksource/timer-sun5i.c 	struct sun5i_timer_clkevt *ce;
ce                272 drivers/clocksource/timer-sun5i.c 	ce = kzalloc(sizeof(*ce), GFP_KERNEL);
ce                273 drivers/clocksource/timer-sun5i.c 	if (!ce)
ce                289 drivers/clocksource/timer-sun5i.c 	ce->timer.base = base;
ce                290 drivers/clocksource/timer-sun5i.c 	ce->timer.ticks_per_jiffy = DIV_ROUND_UP(rate, HZ);
ce                291 drivers/clocksource/timer-sun5i.c 	ce->timer.clk = clk;
ce                292 drivers/clocksource/timer-sun5i.c 	ce->timer.clk_rate_cb.notifier_call = sun5i_rate_cb_clkevt;
ce                293 drivers/clocksource/timer-sun5i.c 	ce->timer.clk_rate_cb.next = NULL;
ce                295 drivers/clocksource/timer-sun5i.c 	ret = clk_notifier_register(clk, &ce->timer.clk_rate_cb);
ce                301 drivers/clocksource/timer-sun5i.c 	ce->clkevt.name = node->name;
ce                302 drivers/clocksource/timer-sun5i.c 	ce->clkevt.features = CLOCK_EVT_FEAT_PERIODIC | CLOCK_EVT_FEAT_ONESHOT;
ce                303 drivers/clocksource/timer-sun5i.c 	ce->clkevt.set_next_event = sun5i_clkevt_next_event;
ce                304 drivers/clocksource/timer-sun5i.c 	ce->clkevt.set_state_shutdown = sun5i_clkevt_shutdown;
ce                305 drivers/clocksource/timer-sun5i.c 	ce->clkevt.set_state_periodic = sun5i_clkevt_set_periodic;
ce                306 drivers/clocksource/timer-sun5i.c 	ce->clkevt.set_state_oneshot = sun5i_clkevt_set_oneshot;
ce                307 drivers/clocksource/timer-sun5i.c 	ce->clkevt.tick_resume = sun5i_clkevt_shutdown;
ce                308 drivers/clocksource/timer-sun5i.c 	ce->clkevt.rating = 340;
ce                309 drivers/clocksource/timer-sun5i.c 	ce->clkevt.irq = irq;
ce                310 drivers/clocksource/timer-sun5i.c 	ce->clkevt.cpumask = cpu_possible_mask;
ce                316 drivers/clocksource/timer-sun5i.c 	clockevents_config_and_register(&ce->clkevt, rate,
ce                320 drivers/clocksource/timer-sun5i.c 			  "sun5i_timer0", ce);
ce                329 drivers/clocksource/timer-sun5i.c 	clk_notifier_unregister(clk, &ce->timer.clk_rate_cb);
ce                333 drivers/clocksource/timer-sun5i.c 	kfree(ce);
ce                 65 drivers/crypto/cavium/zip/zip_deflate.c 	zip_cmd->s.ce = 1;
ce                 65 drivers/crypto/cavium/zip/zip_inflate.c 	zip_cmd->s.ce = 0;
ce                190 drivers/crypto/cavium/zip/zip_regs.h 		u64 ce                          : 1;
ce                200 drivers/crypto/cavium/zip/zip_regs.h 		u64 ce                          : 1;
ce                278 drivers/crypto/hisilicon/qm.c 	void (*hw_error_init)(struct hisi_qm *qm, u32 ce, u32 nfe, u32 fe,
ce                981 drivers/crypto/hisilicon/qm.c static void qm_hw_error_init_v1(struct hisi_qm *qm, u32 ce, u32 nfe, u32 fe,
ce                990 drivers/crypto/hisilicon/qm.c static void qm_hw_error_init_v2(struct hisi_qm *qm, u32 ce, u32 nfe, u32 fe,
ce                993 drivers/crypto/hisilicon/qm.c 	u32 irq_enable = ce | nfe | fe | msi;
ce                996 drivers/crypto/hisilicon/qm.c 	qm->error_mask = ce | nfe | fe;
ce               1000 drivers/crypto/hisilicon/qm.c 	writel(ce, qm->io_base + QM_RAS_CE_ENABLE);
ce               1861 drivers/crypto/hisilicon/qm.c void hisi_qm_hw_error_init(struct hisi_qm *qm, u32 ce, u32 nfe, u32 fe,
ce               1870 drivers/crypto/hisilicon/qm.c 	qm->ops->hw_error_init(qm, ce, nfe, fe, msi);
ce                210 drivers/crypto/hisilicon/qm.h void hisi_qm_hw_error_init(struct hisi_qm *qm, u32 ce, u32 nfe, u32 fe,
ce                170 drivers/crypto/nx/nx-842-powernv.c 	       (csb)->cs, (csb)->cc, (csb)->ce,			\
ce                363 drivers/crypto/nx/nx-842-powernv.c 	if (csb->ce & CSB_CE_TERMINATION) {
ce                367 drivers/crypto/nx/nx-842-powernv.c 	if (csb->ce & CSB_CE_INCOMPLETE) {
ce                371 drivers/crypto/nx/nx-842-powernv.c 	if (!(csb->ce & CSB_CE_TPBC)) {
ce                261 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h 	struct amdgpu_ce		ce;
ce               1449 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	amdgpu_bo_free_kernel(&adev->gfx.ce.ce_fw_obj,
ce               1450 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 			      &adev->gfx.ce.ce_fw_gpu_addr,
ce               1451 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 			      (void **)&adev->gfx.ce.ce_fw_ptr);
ce               2550 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 				      &adev->gfx.ce.ce_fw_obj,
ce               2551 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 				      &adev->gfx.ce.ce_fw_gpu_addr,
ce               2552 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 				      (void **)&adev->gfx.ce.ce_fw_ptr);
ce               2559 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	memcpy(adev->gfx.ce.ce_fw_ptr, fw_data, fw_size);
ce               2561 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	amdgpu_bo_kunmap(adev->gfx.ce.ce_fw_obj);
ce               2562 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	amdgpu_bo_unreserve(adev->gfx.ce.ce_fw_obj);
ce               2592 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 		adev->gfx.ce.ce_fw_gpu_addr & 0xFFFFF000);
ce               2594 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 		upper_32_bits(adev->gfx.ce.ce_fw_gpu_addr));
ce                100 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	struct intel_context *ce;
ce                158 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	struct drm_i915_private *i915 = w->ce->engine->i915;
ce                182 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	batch = intel_emit_vma_fill_blt(w->ce, vma, w->value);
ce                188 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	rq = intel_context_create_request(w->ce);
ce                203 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	if (w->ce->engine->emit_init_breadcrumb) {
ce                204 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 		err = w->ce->engine->emit_init_breadcrumb(rq);
ce                218 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	err = w->ce->engine->emit_bb_start(rq,
ce                229 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	intel_emit_vma_release(w->ce, batch);
ce                265 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 				     struct intel_context *ce,
ce                274 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	sleeve = create_sleeve(ce->vm, obj, pages, page_sizes);
ce                286 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	work->ce = ce;
ce                 16 drivers/gpu/drm/i915/gem/i915_gem_client_blt.h 				     struct intel_context *ce,
ce                295 drivers/gpu/drm/i915/gem/i915_gem_context.c 		struct intel_context *ce;
ce                297 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ce = intel_context_create(ctx, engine);
ce                298 drivers/gpu/drm/i915/gem/i915_gem_context.c 		if (IS_ERR(ce)) {
ce                300 drivers/gpu/drm/i915/gem/i915_gem_context.c 			return ERR_CAST(ce);
ce                303 drivers/gpu/drm/i915/gem/i915_gem_context.c 		e->engines[id] = ce;
ce                458 drivers/gpu/drm/i915/gem/i915_gem_context.c 		  void (*fn)(struct intel_context *ce, void *data),
ce                462 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct intel_context *ce;
ce                464 drivers/gpu/drm/i915/gem/i915_gem_context.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it)
ce                465 drivers/gpu/drm/i915/gem/i915_gem_context.c 		fn(ce, data);
ce                469 drivers/gpu/drm/i915/gem/i915_gem_context.c static void __apply_ppgtt(struct intel_context *ce, void *vm)
ce                471 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_vm_put(ce->vm);
ce                472 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ce->vm = i915_vm_get(vm);
ce                510 drivers/gpu/drm/i915/gem/i915_gem_context.c static void __apply_timeline(struct intel_context *ce, void *timeline)
ce                512 drivers/gpu/drm/i915/gem/i915_gem_context.c 	__set_timeline(&ce->timeline, timeline);
ce                868 drivers/gpu/drm/i915/gem/i915_gem_context.c 				bool (*skip)(struct intel_context *ce, void *data),
ce                876 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct intel_context *ce;
ce                893 drivers/gpu/drm/i915/gem/i915_gem_context.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
ce                897 drivers/gpu/drm/i915/gem/i915_gem_context.c 				       ce->engine->mask)) {
ce                902 drivers/gpu/drm/i915/gem/i915_gem_context.c 		if (!(ce->engine->mask & engines))
ce                905 drivers/gpu/drm/i915/gem/i915_gem_context.c 		if (skip && skip(ce, data))
ce                908 drivers/gpu/drm/i915/gem/i915_gem_context.c 		rq = intel_context_create_request(ce);
ce               1035 drivers/gpu/drm/i915/gem/i915_gem_context.c static bool skip_ppgtt_update(struct intel_context *ce, void *data)
ce               1037 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (HAS_LOGICAL_RING_CONTEXTS(ce->engine->i915))
ce               1038 drivers/gpu/drm/i915/gem/i915_gem_context.c 		return !ce->state;
ce               1040 drivers/gpu/drm/i915/gem/i915_gem_context.c 		return !atomic_read(&ce->pin_count);
ce               1108 drivers/gpu/drm/i915/gem/i915_gem_context.c 				 struct intel_context *ce,
ce               1118 drivers/gpu/drm/i915/gem/i915_gem_context.c 	offset = i915_ggtt_offset(ce->state) +
ce               1133 drivers/gpu/drm/i915/gem/i915_gem_context.c gen8_modify_rpcs(struct intel_context *ce, struct intel_sseu sseu)
ce               1138 drivers/gpu/drm/i915/gem/i915_gem_context.c 	lockdep_assert_held(&ce->pin_mutex);
ce               1146 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!intel_context_is_pinned(ce))
ce               1149 drivers/gpu/drm/i915/gem/i915_gem_context.c 	rq = i915_request_create(ce->engine->kernel_context);
ce               1154 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ret = intel_context_prepare_remote_request(ce, rq);
ce               1156 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ret = gen8_emit_rpcs_config(rq, ce, sseu);
ce               1163 drivers/gpu/drm/i915/gem/i915_gem_context.c __intel_context_reconfigure_sseu(struct intel_context *ce,
ce               1168 drivers/gpu/drm/i915/gem/i915_gem_context.c 	GEM_BUG_ON(INTEL_GEN(ce->engine->i915) < 8);
ce               1170 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ret = intel_context_lock_pinned(ce);
ce               1175 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!memcmp(&ce->sseu, &sseu, sizeof(sseu)))
ce               1178 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ret = gen8_modify_rpcs(ce, sseu);
ce               1180 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ce->sseu = sseu;
ce               1183 drivers/gpu/drm/i915/gem/i915_gem_context.c 	intel_context_unlock_pinned(ce);
ce               1188 drivers/gpu/drm/i915/gem/i915_gem_context.c intel_context_reconfigure_sseu(struct intel_context *ce, struct intel_sseu sseu)
ce               1190 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct drm_i915_private *i915 = ce->engine->i915;
ce               1197 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ret = __intel_context_reconfigure_sseu(ce, sseu);
ce               1307 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct intel_context *ce;
ce               1332 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ce = lookup_user_engine(ctx, lookup, &user_sseu.engine);
ce               1333 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (IS_ERR(ce))
ce               1334 drivers/gpu/drm/i915/gem/i915_gem_context.c 		return PTR_ERR(ce);
ce               1337 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (ce->engine->class != RENDER_CLASS) {
ce               1346 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ret = intel_context_reconfigure_sseu(ce, sseu);
ce               1353 drivers/gpu/drm/i915/gem/i915_gem_context.c 	intel_context_put(ce);
ce               1370 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct intel_context *ce;
ce               1435 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ce = intel_execlists_create_virtual(set->ctx, siblings, n);
ce               1436 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (IS_ERR(ce)) {
ce               1437 drivers/gpu/drm/i915/gem/i915_gem_context.c 		err = PTR_ERR(ce);
ce               1441 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (cmpxchg(&set->engines->engines[idx], NULL, ce)) {
ce               1442 drivers/gpu/drm/i915/gem/i915_gem_context.c 		intel_context_put(ce);
ce               1587 drivers/gpu/drm/i915/gem/i915_gem_context.c 		struct intel_context *ce;
ce               1610 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ce = intel_context_create(ctx, engine);
ce               1611 drivers/gpu/drm/i915/gem/i915_gem_context.c 		if (IS_ERR(ce)) {
ce               1613 drivers/gpu/drm/i915/gem/i915_gem_context.c 			return PTR_ERR(ce);
ce               1616 drivers/gpu/drm/i915/gem/i915_gem_context.c 		set.engines->engines[n] = ce;
ce               1939 drivers/gpu/drm/i915/gem/i915_gem_context.c 		struct intel_context *ce = e->engines[n];
ce               1941 drivers/gpu/drm/i915/gem/i915_gem_context.c 		if (clone->engines[n]->engine->class != ce->engine->class) {
ce               1948 drivers/gpu/drm/i915/gem/i915_gem_context.c 		err = intel_context_lock_pinned(ce);
ce               1952 drivers/gpu/drm/i915/gem/i915_gem_context.c 		clone->engines[n]->sseu = ce->sseu;
ce               1953 drivers/gpu/drm/i915/gem/i915_gem_context.c 		intel_context_unlock_pinned(ce);
ce               2162 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct intel_context *ce;
ce               2185 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ce = lookup_user_engine(ctx, lookup, &user_sseu.engine);
ce               2186 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (IS_ERR(ce))
ce               2187 drivers/gpu/drm/i915/gem/i915_gem_context.c 		return PTR_ERR(ce);
ce               2189 drivers/gpu/drm/i915/gem/i915_gem_context.c 	err = intel_context_lock_pinned(ce); /* serialises with set_sseu */
ce               2191 drivers/gpu/drm/i915/gem/i915_gem_context.c 		intel_context_put(ce);
ce               2195 drivers/gpu/drm/i915/gem/i915_gem_context.c 	user_sseu.slice_mask = ce->sseu.slice_mask;
ce               2196 drivers/gpu/drm/i915/gem/i915_gem_context.c 	user_sseu.subslice_mask = ce->sseu.subslice_mask;
ce               2197 drivers/gpu/drm/i915/gem/i915_gem_context.c 	user_sseu.min_eus_per_subslice = ce->sseu.min_eus_per_subslice;
ce               2198 drivers/gpu/drm/i915/gem/i915_gem_context.c 	user_sseu.max_eus_per_subslice = ce->sseu.max_eus_per_subslice;
ce               2200 drivers/gpu/drm/i915/gem/i915_gem_context.c 	intel_context_unlock_pinned(ce);
ce               2201 drivers/gpu/drm/i915/gem/i915_gem_context.c 	intel_context_put(ce);
ce                201 drivers/gpu/drm/i915/gem/i915_gem_context.h 	struct intel_context *ce = ERR_PTR(-EINVAL);
ce                206 drivers/gpu/drm/i915/gem/i915_gem_context.h 			ce = intel_context_get(e->engines[idx]);
ce                209 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return ce;
ce                224 drivers/gpu/drm/i915/gem/i915_gem_context.h #define for_each_gem_engine(ce, engines, it) \
ce                226 drivers/gpu/drm/i915/gem/i915_gem_context.h 	     ((ce) = i915_gem_engines_iter_next(&(it)));)
ce               2140 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c static struct i915_request *eb_throttle(struct intel_context *ce)
ce               2142 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct intel_ring *ring = ce->ring;
ce               2143 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct intel_timeline *tl = ce->timeline;
ce               2175 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c __eb_pin_context(struct i915_execbuffer *eb, struct intel_context *ce)
ce               2179 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (likely(atomic_inc_not_zero(&ce->pin_count)))
ce               2186 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	err = __intel_context_do_pin(ce);
ce               2193 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c __eb_unpin_context(struct i915_execbuffer *eb, struct intel_context *ce)
ce               2195 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (likely(atomic_add_unless(&ce->pin_count, -1, 1)))
ce               2199 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	intel_context_unpin(ce);
ce               2203 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c static int __eb_pin_engine(struct i915_execbuffer *eb, struct intel_context *ce)
ce               2213 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	err = intel_gt_terminally_wedged(ce->engine->gt);
ce               2222 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	err = __eb_pin_context(eb, ce);
ce               2234 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	tl = intel_context_timeline_lock(ce);
ce               2240 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	intel_context_enter(ce);
ce               2241 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	rq = eb_throttle(ce);
ce               2257 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	eb->engine = ce->engine;
ce               2258 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	eb->context = ce;
ce               2263 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	intel_context_exit(ce);
ce               2266 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	__eb_unpin_context(eb, ce);
ce               2272 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct intel_context *ce = eb->context;
ce               2273 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct intel_timeline *tl = ce->timeline;
ce               2276 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	intel_context_exit(ce);
ce               2279 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	__eb_unpin_context(eb, ce);
ce               2328 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct intel_context *ce;
ce               2337 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	ce = i915_gem_context_get_engine(eb->gem_context, idx);
ce               2338 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (IS_ERR(ce))
ce               2339 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		return PTR_ERR(ce);
ce               2341 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	err = __eb_pin_engine(eb, ce);
ce               2342 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	intel_context_put(ce);
ce                 14 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c struct i915_vma *intel_emit_vma_fill_blt(struct intel_context *ce,
ce                 18 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	struct drm_i915_private *i915 = ce->vm->i915;
ce                 29 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	GEM_BUG_ON(intel_engine_is_virtual(ce->engine));
ce                 30 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_engine_pm_get(ce->engine);
ce                 35 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	pool = intel_engine_pool_get(&ce->engine->pool, size);
ce                 80 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_gt_chipset_flush(ce->vm->gt);
ce                 84 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	batch = i915_vma_instance(pool->obj, ce->vm, NULL);
ce                100 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_engine_pm_put(ce->engine);
ce                119 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c void intel_emit_vma_release(struct intel_context *ce, struct i915_vma *vma)
ce                123 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_engine_pm_put(ce->engine);
ce                127 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 			     struct intel_context *ce,
ce                135 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	vma = i915_vma_instance(obj, ce->vm, NULL);
ce                149 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	batch = intel_emit_vma_fill_blt(ce, vma, value);
ce                155 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	rq = intel_context_create_request(ce);
ce                169 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	if (ce->engine->emit_init_breadcrumb) {
ce                170 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 		err = ce->engine->emit_init_breadcrumb(rq);
ce                183 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	err = ce->engine->emit_bb_start(rq,
ce                192 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_emit_vma_release(ce, batch);
ce                198 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c struct i915_vma *intel_emit_vma_copy_blt(struct intel_context *ce,
ce                202 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	struct drm_i915_private *i915 = ce->vm->i915;
ce                213 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	GEM_BUG_ON(intel_engine_is_virtual(ce->engine));
ce                214 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_engine_pm_get(ce->engine);
ce                219 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	pool = intel_engine_pool_get(&ce->engine->pool, size);
ce                279 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_gt_chipset_flush(ce->vm->gt);
ce                283 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	batch = i915_vma_instance(pool->obj, ce->vm, NULL);
ce                299 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_engine_pm_put(ce->engine);
ce                315 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 			     struct intel_context *ce)
ce                318 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	struct i915_address_space *vm = ce->vm;
ce                340 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	batch = intel_emit_vma_copy_blt(ce, vma[0], vma[1]);
ce                346 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	rq = intel_context_create_request(ce);
ce                391 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_emit_vma_release(ce, batch);
ce                 18 drivers/gpu/drm/i915/gem/i915_gem_object_blt.h struct i915_vma *intel_emit_vma_fill_blt(struct intel_context *ce,
ce                 22 drivers/gpu/drm/i915/gem/i915_gem_object_blt.h struct i915_vma *intel_emit_vma_copy_blt(struct intel_context *ce,
ce                 27 drivers/gpu/drm/i915/gem/i915_gem_object_blt.h void intel_emit_vma_release(struct intel_context *ce, struct i915_vma *vma);
ce                 30 drivers/gpu/drm/i915/gem/i915_gem_object_blt.h 			     struct intel_context *ce,
ce                 35 drivers/gpu/drm/i915/gem/i915_gem_object_blt.h 			     struct intel_context *ce);
ce                 18 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c 	struct intel_context *ce = i915->engine[BCS0]->kernel_context;
ce                 29 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c 		u32 sz = min_t(u64, ce->vm->total >> 4, prandom_u32_state(&prng));
ce                 68 drivers/gpu/drm/i915/gem/selftests/i915_gem_client_blt.c 		err = i915_gem_schedule_fill_pages_blt(obj, ce, obj->mm.pages,
ce                626 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		struct intel_context *ce,
ce                634 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	GEM_BUG_ON(!intel_engine_can_store_dword(ce->engine));
ce                636 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	vma = i915_vma_instance(obj, ce->vm, NULL);
ce                656 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	rq = i915_request_create(ce);
ce                716 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	       struct intel_context *ce,
ce                730 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = igt_spinner_init(*spin, ce->engine->gt);
ce                734 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	rq = igt_spinner_create_request(*spin, ce, MI_NOOP);
ce                760 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c __read_slice_count(struct intel_context *ce,
ce                771 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = emit_rpcs_query(obj, ce, &rq);
ce                789 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	if (INTEL_GEN(ce->engine->i915) >= 11) {
ce                834 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	      struct intel_context *ce,
ce                839 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	unsigned int slices = hweight32(ce->engine->sseu.slice_mask);
ce                844 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		ret = intel_engine_reset(ce->engine, "sseu");
ce                849 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = __read_slice_count(ce, obj,
ce                855 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = __read_slice_count(ce->engine->kernel_context, obj, NULL, &rpcs);
ce                863 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		ret = i915_gem_wait_for_idle(ce->engine->i915,
ce                868 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		ret = __read_slice_count(ce, obj, NULL, &rpcs);
ce                879 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	    struct intel_context *ce,
ce                886 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = __sseu_prepare(name, flags, ce, &spin);
ce                890 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = __intel_context_reconfigure_sseu(ce, sseu);
ce                894 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = __sseu_finish(name, flags, ce, obj,
ce                914 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	struct intel_context *ce;
ce                963 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ce = i915_gem_context_get_engine(ctx, RCS0);
ce                964 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	if (IS_ERR(ce)) {
ce                965 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		ret = PTR_ERR(ce);
ce                969 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = intel_context_pin(ce);
ce                974 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = __sseu_test(name, flags, ce, obj, engine->sseu);
ce                979 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = __sseu_test(name, flags, ce, obj, pg_sseu);
ce                984 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = __sseu_test(name, flags, ce, obj, engine->sseu);
ce                989 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ret = __sseu_test(name, flags, ce, obj, pg_sseu);
ce                997 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	intel_context_unpin(ce);
ce                999 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	intel_context_put(ce);
ce               1482 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c static bool skip_unused_engines(struct intel_context *ce, void *data)
ce               1484 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	return !ce->state;
ce                 18 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c 	struct intel_context *ce = i915->engine[BCS0]->kernel_context;
ce                 35 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c 		u32 sz = min_t(u64, ce->vm->total >> 4, prandom_u32_state(&prng));
ce                 69 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c 		err = i915_gem_object_fill_blt(obj, ce, val);
ce                109 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c 	struct intel_context *ce = i915->engine[BCS0]->kernel_context;
ce                120 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c 		u32 sz = min_t(u64, ce->vm->total >> 4, prandom_u32_state(&prng));
ce                170 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c 		err = i915_gem_object_copy_blt(src, dst, ce);
ce                 20 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	struct intel_context *ce;
ce                 28 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	ce = i915_gem_context_get_engine(ctx, engine->legacy_idx);
ce                 29 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	if (IS_ERR(ce))
ce                 30 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 		return ERR_CAST(ce);
ce                 32 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	rq = intel_context_create_request(ce);
ce                 33 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	intel_context_put(ce);
ce                 87 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c check_signal_order(struct intel_context *ce, struct i915_request *rq)
ce                 89 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 	if (!list_is_last(&rq->signal_link, &ce->signals) &&
ce                 94 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 	if (!list_is_first(&rq->signal_link, &ce->signals) &&
ce                135 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 	struct intel_context *ce, *cn;
ce                144 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 	list_for_each_entry_safe(ce, cn, &b->signalers, signal_link) {
ce                145 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 		GEM_BUG_ON(list_empty(&ce->signals));
ce                147 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 		list_for_each_safe(pos, next, &ce->signals) {
ce                151 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 			GEM_BUG_ON(!check_signal_order(ce, rq));
ce                177 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 		if (!list_is_first(pos, &ce->signals)) {
ce                179 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 			__list_del_many(&ce->signals, pos);
ce                180 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 			if (&ce->signals == pos) /* now empty */
ce                181 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 				list_del_init(&ce->signal_link);
ce                282 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 		struct intel_context *ce = rq->hw_context;
ce                304 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 		list_for_each_prev(pos, &ce->signals) {
ce                312 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 		if (pos == &ce->signals) /* catch transitions from empty list */
ce                313 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 			list_move_tail(&ce->signal_link, &b->signalers);
ce                314 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 		GEM_BUG_ON(!check_signal_order(ce, rq));
ce                338 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 		struct intel_context *ce = rq->hw_context;
ce                341 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 		if (list_empty(&ce->signals))
ce                342 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 			list_del_init(&ce->signal_link);
ce                353 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 	struct intel_context *ce;
ce                362 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 	list_for_each_entry(ce, &b->signalers, signal_link) {
ce                363 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c 		list_for_each_entry(rq, &ce->signals, signal_link) {
ce                 27 drivers/gpu/drm/i915/gt/intel_context.c void intel_context_free(struct intel_context *ce)
ce                 29 drivers/gpu/drm/i915/gt/intel_context.c 	kmem_cache_free(global.slab_ce, ce);
ce                 36 drivers/gpu/drm/i915/gt/intel_context.c 	struct intel_context *ce;
ce                 38 drivers/gpu/drm/i915/gt/intel_context.c 	ce = intel_context_alloc();
ce                 39 drivers/gpu/drm/i915/gt/intel_context.c 	if (!ce)
ce                 42 drivers/gpu/drm/i915/gt/intel_context.c 	intel_context_init(ce, ctx, engine);
ce                 43 drivers/gpu/drm/i915/gt/intel_context.c 	return ce;
ce                 46 drivers/gpu/drm/i915/gt/intel_context.c int __intel_context_do_pin(struct intel_context *ce)
ce                 50 drivers/gpu/drm/i915/gt/intel_context.c 	if (mutex_lock_interruptible(&ce->pin_mutex))
ce                 53 drivers/gpu/drm/i915/gt/intel_context.c 	if (likely(!atomic_read(&ce->pin_count))) {
ce                 56 drivers/gpu/drm/i915/gt/intel_context.c 		if (unlikely(!test_bit(CONTEXT_ALLOC_BIT, &ce->flags))) {
ce                 57 drivers/gpu/drm/i915/gt/intel_context.c 			err = ce->ops->alloc(ce);
ce                 61 drivers/gpu/drm/i915/gt/intel_context.c 			__set_bit(CONTEXT_ALLOC_BIT, &ce->flags);
ce                 65 drivers/gpu/drm/i915/gt/intel_context.c 		with_intel_runtime_pm(&ce->engine->i915->runtime_pm, wakeref)
ce                 66 drivers/gpu/drm/i915/gt/intel_context.c 			err = ce->ops->pin(ce);
ce                 71 drivers/gpu/drm/i915/gt/intel_context.c 			  ce->engine->name, ce->timeline->fence_context,
ce                 72 drivers/gpu/drm/i915/gt/intel_context.c 			  ce->ring->head, ce->ring->tail);
ce                 74 drivers/gpu/drm/i915/gt/intel_context.c 		i915_gem_context_get(ce->gem_context); /* for ctx->ppgtt */
ce                 79 drivers/gpu/drm/i915/gt/intel_context.c 	atomic_inc(&ce->pin_count);
ce                 80 drivers/gpu/drm/i915/gt/intel_context.c 	GEM_BUG_ON(!intel_context_is_pinned(ce)); /* no overflow! */
ce                 82 drivers/gpu/drm/i915/gt/intel_context.c 	mutex_unlock(&ce->pin_mutex);
ce                 86 drivers/gpu/drm/i915/gt/intel_context.c 	mutex_unlock(&ce->pin_mutex);
ce                 90 drivers/gpu/drm/i915/gt/intel_context.c void intel_context_unpin(struct intel_context *ce)
ce                 92 drivers/gpu/drm/i915/gt/intel_context.c 	if (likely(atomic_add_unless(&ce->pin_count, -1, 1)))
ce                 96 drivers/gpu/drm/i915/gt/intel_context.c 	intel_context_get(ce);
ce                 97 drivers/gpu/drm/i915/gt/intel_context.c 	mutex_lock_nested(&ce->pin_mutex, SINGLE_DEPTH_NESTING);
ce                 99 drivers/gpu/drm/i915/gt/intel_context.c 	if (likely(atomic_dec_and_test(&ce->pin_count))) {
ce                101 drivers/gpu/drm/i915/gt/intel_context.c 			  ce->engine->name, ce->timeline->fence_context);
ce                103 drivers/gpu/drm/i915/gt/intel_context.c 		ce->ops->unpin(ce);
ce                105 drivers/gpu/drm/i915/gt/intel_context.c 		i915_gem_context_put(ce->gem_context);
ce                106 drivers/gpu/drm/i915/gt/intel_context.c 		intel_context_active_release(ce);
ce                109 drivers/gpu/drm/i915/gt/intel_context.c 	mutex_unlock(&ce->pin_mutex);
ce                110 drivers/gpu/drm/i915/gt/intel_context.c 	intel_context_put(ce);
ce                143 drivers/gpu/drm/i915/gt/intel_context.c 	struct intel_context *ce = container_of(active, typeof(*ce), active);
ce                146 drivers/gpu/drm/i915/gt/intel_context.c 		  ce->engine->name, ce->timeline->fence_context);
ce                148 drivers/gpu/drm/i915/gt/intel_context.c 	if (ce->state)
ce                149 drivers/gpu/drm/i915/gt/intel_context.c 		__context_unpin_state(ce->state);
ce                151 drivers/gpu/drm/i915/gt/intel_context.c 	intel_timeline_unpin(ce->timeline);
ce                152 drivers/gpu/drm/i915/gt/intel_context.c 	intel_ring_unpin(ce->ring);
ce                153 drivers/gpu/drm/i915/gt/intel_context.c 	intel_context_put(ce);
ce                158 drivers/gpu/drm/i915/gt/intel_context.c 	struct intel_context *ce = container_of(active, typeof(*ce), active);
ce                161 drivers/gpu/drm/i915/gt/intel_context.c 	intel_context_get(ce);
ce                163 drivers/gpu/drm/i915/gt/intel_context.c 	err = intel_ring_pin(ce->ring);
ce                167 drivers/gpu/drm/i915/gt/intel_context.c 	err = intel_timeline_pin(ce->timeline);
ce                171 drivers/gpu/drm/i915/gt/intel_context.c 	if (!ce->state)
ce                174 drivers/gpu/drm/i915/gt/intel_context.c 	err = __context_pin_state(ce->state);
ce                181 drivers/gpu/drm/i915/gt/intel_context.c 	intel_timeline_unpin(ce->timeline);
ce                183 drivers/gpu/drm/i915/gt/intel_context.c 	intel_ring_unpin(ce->ring);
ce                185 drivers/gpu/drm/i915/gt/intel_context.c 	intel_context_put(ce);
ce                189 drivers/gpu/drm/i915/gt/intel_context.c int intel_context_active_acquire(struct intel_context *ce)
ce                193 drivers/gpu/drm/i915/gt/intel_context.c 	err = i915_active_acquire(&ce->active);
ce                198 drivers/gpu/drm/i915/gt/intel_context.c 	if (!i915_gem_context_is_kernel(ce->gem_context)) {
ce                199 drivers/gpu/drm/i915/gt/intel_context.c 		err = i915_active_acquire_preallocate_barrier(&ce->active,
ce                200 drivers/gpu/drm/i915/gt/intel_context.c 							      ce->engine);
ce                202 drivers/gpu/drm/i915/gt/intel_context.c 			i915_active_release(&ce->active);
ce                210 drivers/gpu/drm/i915/gt/intel_context.c void intel_context_active_release(struct intel_context *ce)
ce                213 drivers/gpu/drm/i915/gt/intel_context.c 	i915_active_acquire_barrier(&ce->active);
ce                214 drivers/gpu/drm/i915/gt/intel_context.c 	i915_active_release(&ce->active);
ce                218 drivers/gpu/drm/i915/gt/intel_context.c intel_context_init(struct intel_context *ce,
ce                224 drivers/gpu/drm/i915/gt/intel_context.c 	kref_init(&ce->ref);
ce                226 drivers/gpu/drm/i915/gt/intel_context.c 	ce->gem_context = ctx;
ce                227 drivers/gpu/drm/i915/gt/intel_context.c 	ce->vm = i915_vm_get(ctx->vm ?: &engine->gt->ggtt->vm);
ce                229 drivers/gpu/drm/i915/gt/intel_context.c 		ce->timeline = intel_timeline_get(ctx->timeline);
ce                231 drivers/gpu/drm/i915/gt/intel_context.c 	ce->engine = engine;
ce                232 drivers/gpu/drm/i915/gt/intel_context.c 	ce->ops = engine->cops;
ce                233 drivers/gpu/drm/i915/gt/intel_context.c 	ce->sseu = engine->sseu;
ce                234 drivers/gpu/drm/i915/gt/intel_context.c 	ce->ring = __intel_context_ring_size(SZ_16K);
ce                236 drivers/gpu/drm/i915/gt/intel_context.c 	INIT_LIST_HEAD(&ce->signal_link);
ce                237 drivers/gpu/drm/i915/gt/intel_context.c 	INIT_LIST_HEAD(&ce->signals);
ce                239 drivers/gpu/drm/i915/gt/intel_context.c 	mutex_init(&ce->pin_mutex);
ce                241 drivers/gpu/drm/i915/gt/intel_context.c 	i915_active_init(ctx->i915, &ce->active,
ce                245 drivers/gpu/drm/i915/gt/intel_context.c void intel_context_fini(struct intel_context *ce)
ce                247 drivers/gpu/drm/i915/gt/intel_context.c 	if (ce->timeline)
ce                248 drivers/gpu/drm/i915/gt/intel_context.c 		intel_timeline_put(ce->timeline);
ce                249 drivers/gpu/drm/i915/gt/intel_context.c 	i915_vm_put(ce->vm);
ce                251 drivers/gpu/drm/i915/gt/intel_context.c 	mutex_destroy(&ce->pin_mutex);
ce                252 drivers/gpu/drm/i915/gt/intel_context.c 	i915_active_fini(&ce->active);
ce                280 drivers/gpu/drm/i915/gt/intel_context.c void intel_context_enter_engine(struct intel_context *ce)
ce                282 drivers/gpu/drm/i915/gt/intel_context.c 	intel_engine_pm_get(ce->engine);
ce                283 drivers/gpu/drm/i915/gt/intel_context.c 	intel_timeline_enter(ce->timeline);
ce                286 drivers/gpu/drm/i915/gt/intel_context.c void intel_context_exit_engine(struct intel_context *ce)
ce                288 drivers/gpu/drm/i915/gt/intel_context.c 	intel_timeline_exit(ce->timeline);
ce                289 drivers/gpu/drm/i915/gt/intel_context.c 	intel_engine_pm_put(ce->engine);
ce                292 drivers/gpu/drm/i915/gt/intel_context.c int intel_context_prepare_remote_request(struct intel_context *ce,
ce                295 drivers/gpu/drm/i915/gt/intel_context.c 	struct intel_timeline *tl = ce->timeline;
ce                299 drivers/gpu/drm/i915/gt/intel_context.c 	GEM_BUG_ON(rq->hw_context == ce);
ce                321 drivers/gpu/drm/i915/gt/intel_context.c 	GEM_BUG_ON(i915_active_is_idle(&ce->active));
ce                322 drivers/gpu/drm/i915/gt/intel_context.c 	return i915_active_ref(&ce->active, rq->timeline, rq);
ce                325 drivers/gpu/drm/i915/gt/intel_context.c struct i915_request *intel_context_create_request(struct intel_context *ce)
ce                330 drivers/gpu/drm/i915/gt/intel_context.c 	err = intel_context_pin(ce);
ce                334 drivers/gpu/drm/i915/gt/intel_context.c 	rq = i915_request_create(ce);
ce                335 drivers/gpu/drm/i915/gt/intel_context.c 	intel_context_unpin(ce);
ce                 17 drivers/gpu/drm/i915/gt/intel_context.h void intel_context_init(struct intel_context *ce,
ce                 20 drivers/gpu/drm/i915/gt/intel_context.h void intel_context_fini(struct intel_context *ce);
ce                 26 drivers/gpu/drm/i915/gt/intel_context.h void intel_context_free(struct intel_context *ce);
ce                 36 drivers/gpu/drm/i915/gt/intel_context.h static inline int intel_context_lock_pinned(struct intel_context *ce)
ce                 37 drivers/gpu/drm/i915/gt/intel_context.h 	__acquires(ce->pin_mutex)
ce                 39 drivers/gpu/drm/i915/gt/intel_context.h 	return mutex_lock_interruptible(&ce->pin_mutex);
ce                 52 drivers/gpu/drm/i915/gt/intel_context.h intel_context_is_pinned(struct intel_context *ce)
ce                 54 drivers/gpu/drm/i915/gt/intel_context.h 	return atomic_read(&ce->pin_count);
ce                 63 drivers/gpu/drm/i915/gt/intel_context.h static inline void intel_context_unlock_pinned(struct intel_context *ce)
ce                 64 drivers/gpu/drm/i915/gt/intel_context.h 	__releases(ce->pin_mutex)
ce                 66 drivers/gpu/drm/i915/gt/intel_context.h 	mutex_unlock(&ce->pin_mutex);
ce                 69 drivers/gpu/drm/i915/gt/intel_context.h int __intel_context_do_pin(struct intel_context *ce);
ce                 71 drivers/gpu/drm/i915/gt/intel_context.h static inline int intel_context_pin(struct intel_context *ce)
ce                 73 drivers/gpu/drm/i915/gt/intel_context.h 	if (likely(atomic_inc_not_zero(&ce->pin_count)))
ce                 76 drivers/gpu/drm/i915/gt/intel_context.h 	return __intel_context_do_pin(ce);
ce                 79 drivers/gpu/drm/i915/gt/intel_context.h static inline void __intel_context_pin(struct intel_context *ce)
ce                 81 drivers/gpu/drm/i915/gt/intel_context.h 	GEM_BUG_ON(!intel_context_is_pinned(ce));
ce                 82 drivers/gpu/drm/i915/gt/intel_context.h 	atomic_inc(&ce->pin_count);
ce                 85 drivers/gpu/drm/i915/gt/intel_context.h void intel_context_unpin(struct intel_context *ce);
ce                 87 drivers/gpu/drm/i915/gt/intel_context.h void intel_context_enter_engine(struct intel_context *ce);
ce                 88 drivers/gpu/drm/i915/gt/intel_context.h void intel_context_exit_engine(struct intel_context *ce);
ce                 90 drivers/gpu/drm/i915/gt/intel_context.h static inline void intel_context_enter(struct intel_context *ce)
ce                 92 drivers/gpu/drm/i915/gt/intel_context.h 	lockdep_assert_held(&ce->timeline->mutex);
ce                 93 drivers/gpu/drm/i915/gt/intel_context.h 	if (!ce->active_count++)
ce                 94 drivers/gpu/drm/i915/gt/intel_context.h 		ce->ops->enter(ce);
ce                 97 drivers/gpu/drm/i915/gt/intel_context.h static inline void intel_context_mark_active(struct intel_context *ce)
ce                 99 drivers/gpu/drm/i915/gt/intel_context.h 	lockdep_assert_held(&ce->timeline->mutex);
ce                100 drivers/gpu/drm/i915/gt/intel_context.h 	++ce->active_count;
ce                103 drivers/gpu/drm/i915/gt/intel_context.h static inline void intel_context_exit(struct intel_context *ce)
ce                105 drivers/gpu/drm/i915/gt/intel_context.h 	lockdep_assert_held(&ce->timeline->mutex);
ce                106 drivers/gpu/drm/i915/gt/intel_context.h 	GEM_BUG_ON(!ce->active_count);
ce                107 drivers/gpu/drm/i915/gt/intel_context.h 	if (!--ce->active_count)
ce                108 drivers/gpu/drm/i915/gt/intel_context.h 		ce->ops->exit(ce);
ce                111 drivers/gpu/drm/i915/gt/intel_context.h int intel_context_active_acquire(struct intel_context *ce);
ce                112 drivers/gpu/drm/i915/gt/intel_context.h void intel_context_active_release(struct intel_context *ce);
ce                114 drivers/gpu/drm/i915/gt/intel_context.h static inline struct intel_context *intel_context_get(struct intel_context *ce)
ce                116 drivers/gpu/drm/i915/gt/intel_context.h 	kref_get(&ce->ref);
ce                117 drivers/gpu/drm/i915/gt/intel_context.h 	return ce;
ce                120 drivers/gpu/drm/i915/gt/intel_context.h static inline void intel_context_put(struct intel_context *ce)
ce                122 drivers/gpu/drm/i915/gt/intel_context.h 	kref_put(&ce->ref, ce->ops->destroy);
ce                126 drivers/gpu/drm/i915/gt/intel_context.h intel_context_timeline_lock(struct intel_context *ce)
ce                127 drivers/gpu/drm/i915/gt/intel_context.h 	__acquires(&ce->timeline->mutex)
ce                129 drivers/gpu/drm/i915/gt/intel_context.h 	struct intel_timeline *tl = ce->timeline;
ce                145 drivers/gpu/drm/i915/gt/intel_context.h int intel_context_prepare_remote_request(struct intel_context *ce,
ce                148 drivers/gpu/drm/i915/gt/intel_context.h struct i915_request *intel_context_create_request(struct intel_context *ce);
ce                 26 drivers/gpu/drm/i915/gt/intel_context_types.h 	int (*alloc)(struct intel_context *ce);
ce                 28 drivers/gpu/drm/i915/gt/intel_context_types.h 	int (*pin)(struct intel_context *ce);
ce                 29 drivers/gpu/drm/i915/gt/intel_context_types.h 	void (*unpin)(struct intel_context *ce);
ce                 31 drivers/gpu/drm/i915/gt/intel_context_types.h 	void (*enter)(struct intel_context *ce);
ce                 32 drivers/gpu/drm/i915/gt/intel_context_types.h 	void (*exit)(struct intel_context *ce);
ce                 34 drivers/gpu/drm/i915/gt/intel_context_types.h 	void (*reset)(struct intel_context *ce);
ce                 43 drivers/gpu/drm/i915/gt/intel_context_types.h #define intel_context_inflight(ce) ptr_mask_bits((ce)->inflight, 2)
ce                 44 drivers/gpu/drm/i915/gt/intel_context_types.h #define intel_context_inflight_count(ce) ptr_unmask_bits((ce)->inflight, 2)
ce                733 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	struct intel_context *ce;
ce                736 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	ce = intel_context_create(engine->i915->kernel_context, engine);
ce                737 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	if (IS_ERR(ce))
ce                738 drivers/gpu/drm/i915/gt/intel_engine_cs.c 		return ce;
ce                740 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	ce->ring = __intel_context_ring_size(SZ_4K);
ce                742 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	err = intel_context_pin(ce);
ce                744 drivers/gpu/drm/i915/gt/intel_engine_cs.c 		intel_context_put(ce);
ce                748 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	return ce;
ce                764 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	struct intel_context *ce;
ce                777 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	ce = create_kernel_context(engine);
ce                778 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	if (IS_ERR(ce))
ce                779 drivers/gpu/drm/i915/gt/intel_engine_cs.c 		return PTR_ERR(ce);
ce                781 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	engine->kernel_context = ce;
ce                792 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	intel_context_unpin(ce);
ce                793 drivers/gpu/drm/i915/gt/intel_engine_cs.c 	intel_context_put(ce);
ce                 42 drivers/gpu/drm/i915/gt/intel_engine_pm.c static inline unsigned long __timeline_mark_lock(struct intel_context *ce)
ce                 47 drivers/gpu/drm/i915/gt/intel_engine_pm.c 	mutex_acquire(&ce->timeline->mutex.dep_map, 2, 0, _THIS_IP_);
ce                 52 drivers/gpu/drm/i915/gt/intel_engine_pm.c static inline void __timeline_mark_unlock(struct intel_context *ce,
ce                 55 drivers/gpu/drm/i915/gt/intel_engine_pm.c 	mutex_release(&ce->timeline->mutex.dep_map, 0, _THIS_IP_);
ce                 61 drivers/gpu/drm/i915/gt/intel_engine_pm.c static inline unsigned long __timeline_mark_lock(struct intel_context *ce)
ce                 66 drivers/gpu/drm/i915/gt/intel_engine_pm.c static inline void __timeline_mark_unlock(struct intel_context *ce,
ce                148 drivers/gpu/drm/i915/gt/intel_gt_pm.c 		struct intel_context *ce;
ce                152 drivers/gpu/drm/i915/gt/intel_gt_pm.c 		ce = engine->kernel_context;
ce                153 drivers/gpu/drm/i915/gt/intel_gt_pm.c 		if (ce)
ce                154 drivers/gpu/drm/i915/gt/intel_gt_pm.c 			ce->ops->reset(ce);
ce                229 drivers/gpu/drm/i915/gt/intel_lrc.c static int __execlists_context_alloc(struct intel_context *ce,
ce                233 drivers/gpu/drm/i915/gt/intel_lrc.c 				     struct intel_context *ce,
ce                430 drivers/gpu/drm/i915/gt/intel_lrc.c lrc_descriptor(struct intel_context *ce, struct intel_engine_cs *engine)
ce                432 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct i915_gem_context *ctx = ce->gem_context;
ce                439 drivers/gpu/drm/i915/gt/intel_lrc.c 	if (i915_vm_is_4lvl(ce->vm))
ce                447 drivers/gpu/drm/i915/gt/intel_lrc.c 	desc |= i915_ggtt_offset(ce->state) + LRC_HEADER_PAGES * PAGE_SIZE;
ce                562 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct intel_context * const ce = rq->hw_context;
ce                564 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_context_get(ce);
ce                576 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct intel_context * const ce = rq->hw_context;
ce                582 drivers/gpu/drm/i915/gt/intel_lrc.c 	old = READ_ONCE(ce->inflight);
ce                585 drivers/gpu/drm/i915/gt/intel_lrc.c 			WRITE_ONCE(ce->inflight, __execlists_schedule_in(rq));
ce                588 drivers/gpu/drm/i915/gt/intel_lrc.c 	} while (!try_cmpxchg(&ce->inflight, &old, ptr_inc(old)));
ce                590 drivers/gpu/drm/i915/gt/intel_lrc.c 	GEM_BUG_ON(intel_context_inflight(ce) != rq->engine);
ce                594 drivers/gpu/drm/i915/gt/intel_lrc.c static void kick_siblings(struct i915_request *rq, struct intel_context *ce)
ce                596 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct virtual_engine *ve = container_of(ce, typeof(*ve), context);
ce                607 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct intel_context * const ce = rq->hw_context;
ce                622 drivers/gpu/drm/i915/gt/intel_lrc.c 	if (ce->engine != engine)
ce                623 drivers/gpu/drm/i915/gt/intel_lrc.c 		kick_siblings(rq, ce);
ce                625 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_context_put(ce);
ce                631 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct intel_context * const ce = rq->hw_context;
ce                636 drivers/gpu/drm/i915/gt/intel_lrc.c 	old = READ_ONCE(ce->inflight);
ce                639 drivers/gpu/drm/i915/gt/intel_lrc.c 	while (!try_cmpxchg(&ce->inflight, &old, cur));
ce                648 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct intel_context *ce = rq->hw_context;
ce                649 drivers/gpu/drm/i915/gt/intel_lrc.c 	u64 desc = ce->lrc_desc;
ce                670 drivers/gpu/drm/i915/gt/intel_lrc.c 	prev = ce->lrc_reg_state[CTX_RING_TAIL + 1];
ce                673 drivers/gpu/drm/i915/gt/intel_lrc.c 	ce->lrc_reg_state[CTX_RING_TAIL + 1] = tail;
ce                693 drivers/gpu/drm/i915/gt/intel_lrc.c 	ce->lrc_desc &= ~CTX_DESC_FORCE_RESTORE;
ce                733 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct intel_context *ce = NULL;
ce                744 drivers/gpu/drm/i915/gt/intel_lrc.c 		if (ce == rq->hw_context)
ce                747 drivers/gpu/drm/i915/gt/intel_lrc.c 		ce = rq->hw_context;
ce                751 drivers/gpu/drm/i915/gt/intel_lrc.c 		if (i915_active_is_idle(&ce->active))
ce                754 drivers/gpu/drm/i915/gt/intel_lrc.c 		if (!i915_vma_is_pinned(ce->state))
ce                758 drivers/gpu/drm/i915/gt/intel_lrc.c 	return ce;
ce                797 drivers/gpu/drm/i915/gt/intel_lrc.c static bool ctx_single_port_submission(const struct intel_context *ce)
ce                800 drivers/gpu/drm/i915/gt/intel_lrc.c 		i915_gem_context_force_single_submission(ce->gem_context));
ce               1705 drivers/gpu/drm/i915/gt/intel_lrc.c static void __execlists_context_fini(struct intel_context *ce)
ce               1707 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_ring_put(ce->ring);
ce               1708 drivers/gpu/drm/i915/gt/intel_lrc.c 	i915_vma_put(ce->state);
ce               1713 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct intel_context *ce = container_of(kref, typeof(*ce), ref);
ce               1715 drivers/gpu/drm/i915/gt/intel_lrc.c 	GEM_BUG_ON(!i915_active_is_idle(&ce->active));
ce               1716 drivers/gpu/drm/i915/gt/intel_lrc.c 	GEM_BUG_ON(intel_context_is_pinned(ce));
ce               1718 drivers/gpu/drm/i915/gt/intel_lrc.c 	if (ce->state)
ce               1719 drivers/gpu/drm/i915/gt/intel_lrc.c 		__execlists_context_fini(ce);
ce               1721 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_context_fini(ce);
ce               1722 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_context_free(ce);
ce               1752 drivers/gpu/drm/i915/gt/intel_lrc.c static void execlists_context_unpin(struct intel_context *ce)
ce               1754 drivers/gpu/drm/i915/gt/intel_lrc.c 	check_redzone((void *)ce->lrc_reg_state - LRC_STATE_PN * PAGE_SIZE,
ce               1755 drivers/gpu/drm/i915/gt/intel_lrc.c 		      ce->engine);
ce               1757 drivers/gpu/drm/i915/gt/intel_lrc.c 	i915_gem_context_unpin_hw_id(ce->gem_context);
ce               1758 drivers/gpu/drm/i915/gt/intel_lrc.c 	i915_gem_object_unpin_map(ce->state->obj);
ce               1759 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_ring_reset(ce->ring, ce->ring->tail);
ce               1763 drivers/gpu/drm/i915/gt/intel_lrc.c __execlists_update_reg_state(struct intel_context *ce,
ce               1766 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct intel_ring *ring = ce->ring;
ce               1767 drivers/gpu/drm/i915/gt/intel_lrc.c 	u32 *regs = ce->lrc_reg_state;
ce               1779 drivers/gpu/drm/i915/gt/intel_lrc.c 			intel_sseu_make_rpcs(engine->i915, &ce->sseu);
ce               1781 drivers/gpu/drm/i915/gt/intel_lrc.c 		i915_oa_init_reg_state(engine, ce, regs);
ce               1786 drivers/gpu/drm/i915/gt/intel_lrc.c __execlists_context_pin(struct intel_context *ce,
ce               1792 drivers/gpu/drm/i915/gt/intel_lrc.c 	GEM_BUG_ON(!ce->state);
ce               1794 drivers/gpu/drm/i915/gt/intel_lrc.c 	ret = intel_context_active_acquire(ce);
ce               1797 drivers/gpu/drm/i915/gt/intel_lrc.c 	GEM_BUG_ON(!i915_vma_is_pinned(ce->state));
ce               1799 drivers/gpu/drm/i915/gt/intel_lrc.c 	vaddr = i915_gem_object_pin_map(ce->state->obj,
ce               1807 drivers/gpu/drm/i915/gt/intel_lrc.c 	ret = i915_gem_context_pin_hw_id(ce->gem_context);
ce               1811 drivers/gpu/drm/i915/gt/intel_lrc.c 	ce->lrc_desc = lrc_descriptor(ce, engine);
ce               1812 drivers/gpu/drm/i915/gt/intel_lrc.c 	ce->lrc_reg_state = vaddr + LRC_STATE_PN * PAGE_SIZE;
ce               1813 drivers/gpu/drm/i915/gt/intel_lrc.c 	__execlists_update_reg_state(ce, engine);
ce               1818 drivers/gpu/drm/i915/gt/intel_lrc.c 	i915_gem_object_unpin_map(ce->state->obj);
ce               1820 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_context_active_release(ce);
ce               1825 drivers/gpu/drm/i915/gt/intel_lrc.c static int execlists_context_pin(struct intel_context *ce)
ce               1827 drivers/gpu/drm/i915/gt/intel_lrc.c 	return __execlists_context_pin(ce, ce->engine);
ce               1830 drivers/gpu/drm/i915/gt/intel_lrc.c static int execlists_context_alloc(struct intel_context *ce)
ce               1832 drivers/gpu/drm/i915/gt/intel_lrc.c 	return __execlists_context_alloc(ce, ce->engine);
ce               1835 drivers/gpu/drm/i915/gt/intel_lrc.c static void execlists_context_reset(struct intel_context *ce)
ce               1853 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_ring_reset(ce->ring, 0);
ce               1854 drivers/gpu/drm/i915/gt/intel_lrc.c 	__execlists_update_reg_state(ce, ce->engine);
ce               2438 drivers/gpu/drm/i915/gt/intel_lrc.c 	const struct intel_context * const ce = rq->hw_context;
ce               2450 drivers/gpu/drm/i915/gt/intel_lrc.c 		if (rq->hw_context != ce)
ce               2462 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct intel_context *ce;
ce               2480 drivers/gpu/drm/i915/gt/intel_lrc.c 	ce = rq->hw_context;
ce               2481 drivers/gpu/drm/i915/gt/intel_lrc.c 	GEM_BUG_ON(i915_active_is_idle(&ce->active));
ce               2482 drivers/gpu/drm/i915/gt/intel_lrc.c 	GEM_BUG_ON(!i915_vma_is_pinned(ce->state));
ce               2485 drivers/gpu/drm/i915/gt/intel_lrc.c 		ce->ring->head = ce->ring->tail;
ce               2489 drivers/gpu/drm/i915/gt/intel_lrc.c 	ce->ring->head = intel_ring_wrap(ce->ring, rq->head);
ce               2529 drivers/gpu/drm/i915/gt/intel_lrc.c 	regs = ce->lrc_reg_state;
ce               2535 drivers/gpu/drm/i915/gt/intel_lrc.c 	execlists_init_reg_state(regs, ce, engine, ce->ring);
ce               2539 drivers/gpu/drm/i915/gt/intel_lrc.c 		  engine->name, ce->ring->head, ce->ring->tail);
ce               2540 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_ring_update_space(ce->ring);
ce               2541 drivers/gpu/drm/i915/gt/intel_lrc.c 	__execlists_update_reg_state(ce, engine);
ce               3189 drivers/gpu/drm/i915/gt/intel_lrc.c 				     struct intel_context *ce,
ce               3193 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(ce->vm);
ce               3292 drivers/gpu/drm/i915/gt/intel_lrc.c populate_lr_context(struct intel_context *ce,
ce               3333 drivers/gpu/drm/i915/gt/intel_lrc.c 	execlists_init_reg_state(regs, ce, engine, ring);
ce               3347 drivers/gpu/drm/i915/gt/intel_lrc.c static int __execlists_context_alloc(struct intel_context *ce,
ce               3356 drivers/gpu/drm/i915/gt/intel_lrc.c 	GEM_BUG_ON(ce->state);
ce               3377 drivers/gpu/drm/i915/gt/intel_lrc.c 	if (!ce->timeline) {
ce               3386 drivers/gpu/drm/i915/gt/intel_lrc.c 		ce->timeline = tl;
ce               3389 drivers/gpu/drm/i915/gt/intel_lrc.c 	ring = intel_engine_create_ring(engine, (unsigned long)ce->ring);
ce               3395 drivers/gpu/drm/i915/gt/intel_lrc.c 	ret = populate_lr_context(ce, ctx_obj, engine, ring);
ce               3401 drivers/gpu/drm/i915/gt/intel_lrc.c 	ce->ring = ring;
ce               3402 drivers/gpu/drm/i915/gt/intel_lrc.c 	ce->state = vma;
ce               3479 drivers/gpu/drm/i915/gt/intel_lrc.c static int virtual_context_pin(struct intel_context *ce)
ce               3481 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct virtual_engine *ve = container_of(ce, typeof(*ve), context);
ce               3485 drivers/gpu/drm/i915/gt/intel_lrc.c 	err = __execlists_context_pin(ce, ve->siblings[0]);
ce               3493 drivers/gpu/drm/i915/gt/intel_lrc.c static void virtual_context_enter(struct intel_context *ce)
ce               3495 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct virtual_engine *ve = container_of(ce, typeof(*ve), context);
ce               3501 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_timeline_enter(ce->timeline);
ce               3504 drivers/gpu/drm/i915/gt/intel_lrc.c static void virtual_context_exit(struct intel_context *ce)
ce               3506 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct virtual_engine *ve = container_of(ce, typeof(*ve), context);
ce               3509 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_timeline_exit(ce->timeline);
ce               3998 drivers/gpu/drm/i915/gt/intel_lrc.c 			    struct intel_context *ce,
ce               4011 drivers/gpu/drm/i915/gt/intel_lrc.c 		u32 *regs = ce->lrc_reg_state;
ce               4018 drivers/gpu/drm/i915/gt/intel_lrc.c 		execlists_init_reg_state(regs, ce, engine, ce->ring);
ce               4022 drivers/gpu/drm/i915/gt/intel_lrc.c 	ce->ring->head = head;
ce               4023 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_ring_update_space(ce->ring);
ce               4025 drivers/gpu/drm/i915/gt/intel_lrc.c 	__execlists_update_reg_state(ce, engine);
ce                110 drivers/gpu/drm/i915/gt/intel_lrc.h 			    struct intel_context *ce,
ce               1347 drivers/gpu/drm/i915/gt/intel_ringbuffer.c static void __ring_context_fini(struct intel_context *ce)
ce               1349 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	i915_gem_object_put(ce->state->obj);
ce               1354 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	struct intel_context *ce = container_of(ref, typeof(*ce), ref);
ce               1356 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	GEM_BUG_ON(intel_context_is_pinned(ce));
ce               1358 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	if (ce->state)
ce               1359 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 		__ring_context_fini(ce);
ce               1361 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	intel_context_fini(ce);
ce               1362 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	intel_context_free(ce);
ce               1365 drivers/gpu/drm/i915/gt/intel_ringbuffer.c static struct i915_address_space *vm_alias(struct intel_context *ce)
ce               1369 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	vm = ce->vm;
ce               1376 drivers/gpu/drm/i915/gt/intel_ringbuffer.c static int __context_pin_ppgtt(struct intel_context *ce)
ce               1381 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	vm = vm_alias(ce);
ce               1388 drivers/gpu/drm/i915/gt/intel_ringbuffer.c static void __context_unpin_ppgtt(struct intel_context *ce)
ce               1392 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	vm = vm_alias(ce);
ce               1397 drivers/gpu/drm/i915/gt/intel_ringbuffer.c static void ring_context_unpin(struct intel_context *ce)
ce               1399 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	__context_unpin_ppgtt(ce);
ce               1470 drivers/gpu/drm/i915/gt/intel_ringbuffer.c static int ring_context_alloc(struct intel_context *ce)
ce               1472 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	struct intel_engine_cs *engine = ce->engine;
ce               1476 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	ce->ring = engine->legacy.ring;
ce               1477 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	ce->timeline = intel_timeline_get(engine->legacy.timeline);
ce               1479 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	GEM_BUG_ON(ce->state);
ce               1487 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 		ce->state = vma;
ce               1493 drivers/gpu/drm/i915/gt/intel_ringbuffer.c static int ring_context_pin(struct intel_context *ce)
ce               1497 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	err = intel_context_active_acquire(ce);
ce               1501 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	err = __context_pin_ppgtt(ce);
ce               1508 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	intel_context_active_release(ce);
ce               1512 drivers/gpu/drm/i915/gt/intel_ringbuffer.c static void ring_context_reset(struct intel_context *ce)
ce               1514 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	intel_ring_reset(ce->ring, 0);
ce               1500 drivers/gpu/drm/i915/gt/intel_workarounds.c static int engine_wa_list_verify(struct intel_context *ce,
ce               1514 drivers/gpu/drm/i915/gt/intel_workarounds.c 	vma = create_scratch(&ce->engine->gt->ggtt->vm, wal->count);
ce               1518 drivers/gpu/drm/i915/gt/intel_workarounds.c 	rq = intel_context_create_request(ce);
ce                112 drivers/gpu/drm/i915/gt/mock_engine.c static void mock_context_unpin(struct intel_context *ce)
ce                118 drivers/gpu/drm/i915/gt/mock_engine.c 	struct intel_context *ce = container_of(ref, typeof(*ce), ref);
ce                120 drivers/gpu/drm/i915/gt/mock_engine.c 	GEM_BUG_ON(intel_context_is_pinned(ce));
ce                122 drivers/gpu/drm/i915/gt/mock_engine.c 	if (test_bit(CONTEXT_ALLOC_BIT, &ce->flags)) {
ce                123 drivers/gpu/drm/i915/gt/mock_engine.c 		kfree(ce->ring);
ce                124 drivers/gpu/drm/i915/gt/mock_engine.c 		mock_timeline_unpin(ce->timeline);
ce                127 drivers/gpu/drm/i915/gt/mock_engine.c 	intel_context_fini(ce);
ce                128 drivers/gpu/drm/i915/gt/mock_engine.c 	intel_context_free(ce);
ce                131 drivers/gpu/drm/i915/gt/mock_engine.c static int mock_context_alloc(struct intel_context *ce)
ce                133 drivers/gpu/drm/i915/gt/mock_engine.c 	ce->ring = mock_ring(ce->engine);
ce                134 drivers/gpu/drm/i915/gt/mock_engine.c 	if (!ce->ring)
ce                137 drivers/gpu/drm/i915/gt/mock_engine.c 	GEM_BUG_ON(ce->timeline);
ce                138 drivers/gpu/drm/i915/gt/mock_engine.c 	ce->timeline = intel_timeline_create(ce->engine->gt, NULL);
ce                139 drivers/gpu/drm/i915/gt/mock_engine.c 	if (IS_ERR(ce->timeline)) {
ce                140 drivers/gpu/drm/i915/gt/mock_engine.c 		kfree(ce->engine);
ce                141 drivers/gpu/drm/i915/gt/mock_engine.c 		return PTR_ERR(ce->timeline);
ce                144 drivers/gpu/drm/i915/gt/mock_engine.c 	mock_timeline_pin(ce->timeline);
ce                149 drivers/gpu/drm/i915/gt/mock_engine.c static int mock_context_pin(struct intel_context *ce)
ce                151 drivers/gpu/drm/i915/gt/mock_engine.c 	return intel_context_active_acquire(ce);
ce                280 drivers/gpu/drm/i915/gt/mock_engine.c 	struct intel_context *ce;
ce                288 drivers/gpu/drm/i915/gt/mock_engine.c 	ce = create_kernel_context(engine);
ce                289 drivers/gpu/drm/i915/gt/mock_engine.c 	if (IS_ERR(ce))
ce                292 drivers/gpu/drm/i915/gt/mock_engine.c 	engine->kernel_context = ce;
ce                 37 drivers/gpu/drm/i915/gt/selftest_context.c static int context_sync(struct intel_context *ce)
ce                 39 drivers/gpu/drm/i915/gt/selftest_context.c 	struct intel_timeline *tl = ce->timeline;
ce                 71 drivers/gpu/drm/i915/gt/selftest_context.c 	struct intel_context *ce;
ce                 76 drivers/gpu/drm/i915/gt/selftest_context.c 	ce = intel_context_create(fixme, engine);
ce                 77 drivers/gpu/drm/i915/gt/selftest_context.c 	if (IS_ERR(ce))
ce                 78 drivers/gpu/drm/i915/gt/selftest_context.c 		return PTR_ERR(ce);
ce                 80 drivers/gpu/drm/i915/gt/selftest_context.c 	err = intel_context_pin(ce);
ce                 84 drivers/gpu/drm/i915/gt/selftest_context.c 	vaddr = i915_gem_object_pin_map(ce->state->obj,
ce                 88 drivers/gpu/drm/i915/gt/selftest_context.c 		intel_context_unpin(ce);
ce                110 drivers/gpu/drm/i915/gt/selftest_context.c 	rq = intel_context_create_request(ce);
ce                111 drivers/gpu/drm/i915/gt/selftest_context.c 	intel_context_unpin(ce);
ce                137 drivers/gpu/drm/i915/gt/selftest_context.c 	i915_gem_object_unpin_map(ce->state->obj);
ce                139 drivers/gpu/drm/i915/gt/selftest_context.c 	intel_context_put(ce);
ce                210 drivers/gpu/drm/i915/gt/selftest_context.c 	struct intel_context *ce;
ce                233 drivers/gpu/drm/i915/gt/selftest_context.c 	ce = intel_context_create(fixme, engine);
ce                234 drivers/gpu/drm/i915/gt/selftest_context.c 	if (IS_ERR(ce))
ce                235 drivers/gpu/drm/i915/gt/selftest_context.c 		return PTR_ERR(ce);
ce                240 drivers/gpu/drm/i915/gt/selftest_context.c 		rq = intel_context_create_request(ce);
ce                251 drivers/gpu/drm/i915/gt/selftest_context.c 		if (i915_active_is_idle(&ce->active)) {
ce                271 drivers/gpu/drm/i915/gt/selftest_context.c 	if (!i915_active_is_idle(&ce->active)) {
ce                289 drivers/gpu/drm/i915/gt/selftest_context.c 	intel_context_put(ce);
ce                330 drivers/gpu/drm/i915/gt/selftest_context.c static int __remote_sync(struct intel_context *ce, struct intel_context *remote)
ce                339 drivers/gpu/drm/i915/gt/selftest_context.c 	rq = intel_context_create_request(ce);
ce                 27 drivers/gpu/drm/i915/gt/selftest_lrc.c 	struct intel_context *ce;
ce                 45 drivers/gpu/drm/i915/gt/selftest_lrc.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
ce                 48 drivers/gpu/drm/i915/gt/selftest_lrc.c 		rq = igt_spinner_create_request(&spin, ce, MI_NOOP);
ce                490 drivers/gpu/drm/i915/gt/selftest_lrc.c 	struct intel_context *ce;
ce                493 drivers/gpu/drm/i915/gt/selftest_lrc.c 	ce = i915_gem_context_get_engine(ctx, engine->legacy_idx);
ce                494 drivers/gpu/drm/i915/gt/selftest_lrc.c 	if (IS_ERR(ce))
ce                495 drivers/gpu/drm/i915/gt/selftest_lrc.c 		return ERR_CAST(ce);
ce                497 drivers/gpu/drm/i915/gt/selftest_lrc.c 	rq = igt_spinner_create_request(spin, ce, arb);
ce                498 drivers/gpu/drm/i915/gt/selftest_lrc.c 	intel_context_put(ce);
ce                244 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct intel_context *ce;
ce                255 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	ce = i915_gem_context_get_engine(ctx, engine->legacy_idx);
ce                256 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	GEM_BUG_ON(IS_ERR(ce));
ce                260 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		rq = igt_spinner_create_request(spin, ce, MI_NOOP);
ce                262 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	intel_context_put(ce);
ce               1089 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct intel_context *ce;
ce               1094 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	for_each_gem_engine(ce, i915_gem_context_engines(ctx), it) {
ce               1095 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		enum intel_engine_id id = ce->engine->id;
ce               1097 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		ok &= engine_wa_list_verify(ce,
ce               1101 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		ok &= engine_wa_list_verify(ce,
ce               1158 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct intel_context *ce;
ce               1177 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
ce               1178 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		struct intel_engine_cs *engine = ce->engine;
ce               1201 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		rq = igt_spinner_create_request(&spin, ce, MI_NOOP);
ce                880 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c static inline bool ctx_save_restore_disabled(struct intel_context *ce)
ce                882 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c 	u32 sr = ce->lrc_reg_state[CTX_CONTEXT_CONTROL + 1];
ce                456 drivers/gpu/drm/i915/gvt/mmio_context.c bool is_inhibit_context(struct intel_context *ce)
ce                458 drivers/gpu/drm/i915/gvt/mmio_context.c 	const u32 *reg_state = ce->lrc_reg_state;
ce                 52 drivers/gpu/drm/i915/gvt/mmio_context.h bool is_inhibit_context(struct intel_context *ce);
ce                285 drivers/gpu/drm/i915/gvt/scheduler.c shadow_context_descriptor_update(struct intel_context *ce,
ce                288 drivers/gpu/drm/i915/gvt/scheduler.c 	u64 desc = ce->lrc_desc;
ce                298 drivers/gpu/drm/i915/gvt/scheduler.c 	ce->lrc_desc = desc;
ce               1256 drivers/gpu/drm/i915/gvt/scheduler.c 		struct intel_context *ce;
ce               1261 drivers/gpu/drm/i915/gvt/scheduler.c 		ce = intel_context_create(ctx, engine);
ce               1262 drivers/gpu/drm/i915/gvt/scheduler.c 		if (IS_ERR(ce)) {
ce               1263 drivers/gpu/drm/i915/gvt/scheduler.c 			ret = PTR_ERR(ce);
ce               1270 drivers/gpu/drm/i915/gvt/scheduler.c 			ce->ring = __intel_context_ring_size(ring_size);
ce               1273 drivers/gpu/drm/i915/gvt/scheduler.c 		ret = intel_context_pin(ce);
ce               1274 drivers/gpu/drm/i915/gvt/scheduler.c 		intel_context_put(ce);
ce               1278 drivers/gpu/drm/i915/gvt/scheduler.c 		s->shadow[i] = ce;
ce                316 drivers/gpu/drm/i915/i915_debugfs.c 		struct intel_context *ce;
ce                318 drivers/gpu/drm/i915/i915_debugfs.c 		for_each_gem_engine(ce,
ce                320 drivers/gpu/drm/i915/i915_debugfs.c 			intel_context_lock_pinned(ce);
ce                321 drivers/gpu/drm/i915/i915_debugfs.c 			if (intel_context_is_pinned(ce)) {
ce                322 drivers/gpu/drm/i915/i915_debugfs.c 				if (ce->state)
ce                324 drivers/gpu/drm/i915/i915_debugfs.c 						       ce->state->obj, &kstats);
ce                325 drivers/gpu/drm/i915/i915_debugfs.c 				per_file_stats(0, ce->ring->vma->obj, &kstats);
ce                327 drivers/gpu/drm/i915/i915_debugfs.c 			intel_context_unlock_pinned(ce);
ce               1578 drivers/gpu/drm/i915/i915_debugfs.c 		struct intel_context *ce;
ce               1602 drivers/gpu/drm/i915/i915_debugfs.c 		for_each_gem_engine(ce,
ce               1604 drivers/gpu/drm/i915/i915_debugfs.c 			intel_context_lock_pinned(ce);
ce               1605 drivers/gpu/drm/i915/i915_debugfs.c 			if (intel_context_is_pinned(ce)) {
ce               1606 drivers/gpu/drm/i915/i915_debugfs.c 				seq_printf(m, "%s: ", ce->engine->name);
ce               1607 drivers/gpu/drm/i915/i915_debugfs.c 				if (ce->state)
ce               1608 drivers/gpu/drm/i915/i915_debugfs.c 					describe_obj(m, ce->state->obj);
ce               1609 drivers/gpu/drm/i915/i915_debugfs.c 				describe_ctx_ring(m, ce->ring);
ce               1612 drivers/gpu/drm/i915/i915_debugfs.c 			intel_context_unlock_pinned(ce);
ce               1274 drivers/gpu/drm/i915/i915_gem.c 		struct intel_context *ce;
ce               1281 drivers/gpu/drm/i915/i915_gem.c 		ce = intel_context_create(i915->kernel_context, engine);
ce               1282 drivers/gpu/drm/i915/i915_gem.c 		if (IS_ERR(ce)) {
ce               1283 drivers/gpu/drm/i915/i915_gem.c 			err = PTR_ERR(ce);
ce               1287 drivers/gpu/drm/i915/i915_gem.c 		rq = intel_context_create_request(ce);
ce               1290 drivers/gpu/drm/i915/i915_gem.c 			intel_context_put(ce);
ce               1381 drivers/gpu/drm/i915/i915_gem.c 		struct intel_context *ce;
ce               1388 drivers/gpu/drm/i915/i915_gem.c 		ce = rq->hw_context;
ce               1390 drivers/gpu/drm/i915/i915_gem.c 		intel_context_put(ce);
ce               1209 drivers/gpu/drm/i915/i915_perf.c 	struct intel_context *ce;
ce               1216 drivers/gpu/drm/i915/i915_perf.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
ce               1217 drivers/gpu/drm/i915/i915_perf.c 		if (ce->engine->class != RENDER_CLASS)
ce               1224 drivers/gpu/drm/i915/i915_perf.c 		err = intel_context_pin(ce);
ce               1226 drivers/gpu/drm/i915/i915_perf.c 			stream->pinned_ctx = ce;
ce               1252 drivers/gpu/drm/i915/i915_perf.c 	struct intel_context *ce;
ce               1254 drivers/gpu/drm/i915/i915_perf.c 	ce = oa_pin_context(stream);
ce               1255 drivers/gpu/drm/i915/i915_perf.c 	if (IS_ERR(ce))
ce               1256 drivers/gpu/drm/i915/i915_perf.c 		return PTR_ERR(ce);
ce               1264 drivers/gpu/drm/i915/i915_perf.c 		stream->specific_ctx_id = i915_ggtt_offset(ce->state);
ce               1284 drivers/gpu/drm/i915/i915_perf.c 				lower_32_bits(ce->lrc_desc) >> 12;
ce               1296 drivers/gpu/drm/i915/i915_perf.c 				upper_32_bits(ce->lrc_desc);
ce               1307 drivers/gpu/drm/i915/i915_perf.c 		stream->specific_ctx_id = upper_32_bits(ce->lrc_desc);
ce               1334 drivers/gpu/drm/i915/i915_perf.c 	struct intel_context *ce;
ce               1339 drivers/gpu/drm/i915/i915_perf.c 	ce = fetch_and_zero(&stream->pinned_ctx);
ce               1340 drivers/gpu/drm/i915/i915_perf.c 	if (ce) {
ce               1342 drivers/gpu/drm/i915/i915_perf.c 		intel_context_unpin(ce);
ce               1676 drivers/gpu/drm/i915/i915_perf.c 			       struct intel_context *ce,
ce               1680 drivers/gpu/drm/i915/i915_perf.c 	struct drm_i915_private *i915 = ce->engine->i915;
ce               1707 drivers/gpu/drm/i915/i915_perf.c 		intel_sseu_make_rpcs(i915, &ce->sseu));
ce               1718 drivers/gpu/drm/i915/i915_perf.c 		struct intel_context *ce,
ce               1728 drivers/gpu/drm/i915/i915_perf.c 	offset = i915_ggtt_offset(ce->state) + LRC_STATE_PN * PAGE_SIZE;
ce               1743 drivers/gpu/drm/i915/i915_perf.c 	       struct intel_context *ce,
ce               1766 drivers/gpu/drm/i915/i915_perf.c static int gen8_modify_context(struct intel_context *ce,
ce               1772 drivers/gpu/drm/i915/i915_perf.c 	lockdep_assert_held(&ce->pin_mutex);
ce               1774 drivers/gpu/drm/i915/i915_perf.c 	rq = i915_request_create(ce->engine->kernel_context);
ce               1779 drivers/gpu/drm/i915/i915_perf.c 	err = intel_context_prepare_remote_request(ce, rq);
ce               1781 drivers/gpu/drm/i915/i915_perf.c 		err = gen8_store_flex(rq, ce, flex, count);
ce               1787 drivers/gpu/drm/i915/i915_perf.c static int gen8_modify_self(struct intel_context *ce,
ce               1793 drivers/gpu/drm/i915/i915_perf.c 	rq = i915_request_create(ce);
ce               1797 drivers/gpu/drm/i915/i915_perf.c 	err = gen8_load_flex(rq, ce, flex, count);
ce               1807 drivers/gpu/drm/i915/i915_perf.c 	struct intel_context *ce;
ce               1810 drivers/gpu/drm/i915/i915_perf.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
ce               1811 drivers/gpu/drm/i915/i915_perf.c 		GEM_BUG_ON(ce == ce->engine->kernel_context);
ce               1813 drivers/gpu/drm/i915/i915_perf.c 		if (ce->engine->class != RENDER_CLASS)
ce               1816 drivers/gpu/drm/i915/i915_perf.c 		err = intel_context_lock_pinned(ce);
ce               1820 drivers/gpu/drm/i915/i915_perf.c 		flex->value = intel_sseu_make_rpcs(ctx->i915, &ce->sseu);
ce               1823 drivers/gpu/drm/i915/i915_perf.c 		if (intel_context_is_pinned(ce))
ce               1824 drivers/gpu/drm/i915/i915_perf.c 			err = gen8_modify_context(ce, flex, count);
ce               1826 drivers/gpu/drm/i915/i915_perf.c 		intel_context_unlock_pinned(ce);
ce               1929 drivers/gpu/drm/i915/i915_perf.c 		struct intel_context *ce = engine->kernel_context;
ce               1935 drivers/gpu/drm/i915/i915_perf.c 		regs[0].value = intel_sseu_make_rpcs(i915, &ce->sseu);
ce               1937 drivers/gpu/drm/i915/i915_perf.c 		err = gen8_modify_self(ce, regs, ARRAY_SIZE(regs));
ce               2302 drivers/gpu/drm/i915/i915_perf.c 			    struct intel_context *ce,
ce               2312 drivers/gpu/drm/i915/i915_perf.c 		gen8_update_reg_state_unlocked(stream, ce, regs, stream->oa_config);
ce                 29 drivers/gpu/drm/i915/i915_perf.h 			    struct intel_context *ce,
ce                635 drivers/gpu/drm/i915/i915_request.c __i915_request_create(struct intel_context *ce, gfp_t gfp)
ce                637 drivers/gpu/drm/i915/i915_request.c 	struct intel_timeline *tl = ce->timeline;
ce                645 drivers/gpu/drm/i915/i915_request.c 	__intel_context_pin(ce);
ce                690 drivers/gpu/drm/i915/i915_request.c 	rq->i915 = ce->engine->i915;
ce                691 drivers/gpu/drm/i915/i915_request.c 	rq->hw_context = ce;
ce                692 drivers/gpu/drm/i915/i915_request.c 	rq->gem_context = ce->gem_context;
ce                693 drivers/gpu/drm/i915/i915_request.c 	rq->engine = ce->engine;
ce                694 drivers/gpu/drm/i915/i915_request.c 	rq->ring = ce->ring;
ce                749 drivers/gpu/drm/i915/i915_request.c 	intel_context_mark_active(ce);
ce                753 drivers/gpu/drm/i915/i915_request.c 	ce->ring->emit = rq->head;
ce                763 drivers/gpu/drm/i915/i915_request.c 	intel_context_unpin(ce);
ce                768 drivers/gpu/drm/i915/i915_request.c i915_request_create(struct intel_context *ce)
ce                773 drivers/gpu/drm/i915/i915_request.c 	tl = intel_context_timeline_lock(ce);
ce                782 drivers/gpu/drm/i915/i915_request.c 	intel_context_enter(ce);
ce                783 drivers/gpu/drm/i915/i915_request.c 	rq = __i915_request_create(ce, GFP_KERNEL);
ce                784 drivers/gpu/drm/i915/i915_request.c 	intel_context_exit(ce); /* active reference transferred to request */
ce                248 drivers/gpu/drm/i915/i915_request.h __i915_request_create(struct intel_context *ce, gfp_t gfp);
ce                250 drivers/gpu/drm/i915/i915_request.h i915_request_create(struct intel_context *ce);
ce                197 drivers/gpu/drm/i915/selftests/i915_request.c 	struct intel_context *ce;
ce                202 drivers/gpu/drm/i915/selftests/i915_request.c 	ce = i915_gem_context_get_engine(ctx[0], RCS0);
ce                203 drivers/gpu/drm/i915/selftests/i915_request.c 	GEM_BUG_ON(IS_ERR(ce));
ce                204 drivers/gpu/drm/i915/selftests/i915_request.c 	request = mock_request(ce, 2 * HZ);
ce                205 drivers/gpu/drm/i915/selftests/i915_request.c 	intel_context_put(ce);
ce                215 drivers/gpu/drm/i915/selftests/i915_request.c 	ce = i915_gem_context_get_engine(ctx[1], RCS0);
ce                216 drivers/gpu/drm/i915/selftests/i915_request.c 	GEM_BUG_ON(IS_ERR(ce));
ce                217 drivers/gpu/drm/i915/selftests/i915_request.c 	vip = mock_request(ce, 0);
ce                218 drivers/gpu/drm/i915/selftests/i915_request.c 	intel_context_put(ce);
ce                267 drivers/gpu/drm/i915/selftests/i915_request.c 	struct i915_request *(*request_alloc)(struct intel_context *ce);
ce                271 drivers/gpu/drm/i915/selftests/i915_request.c __mock_request_alloc(struct intel_context *ce)
ce                273 drivers/gpu/drm/i915/selftests/i915_request.c 	return mock_request(ce, 0);
ce                277 drivers/gpu/drm/i915/selftests/i915_request.c __live_request_alloc(struct intel_context *ce)
ce                279 drivers/gpu/drm/i915/selftests/i915_request.c 	return intel_context_create_request(ce);
ce                338 drivers/gpu/drm/i915/selftests/i915_request.c 			struct intel_context *ce;
ce                342 drivers/gpu/drm/i915/selftests/i915_request.c 			ce = i915_gem_context_get_engine(ctx, t->engine->legacy_idx);
ce                343 drivers/gpu/drm/i915/selftests/i915_request.c 			GEM_BUG_ON(IS_ERR(ce));
ce                344 drivers/gpu/drm/i915/selftests/i915_request.c 			rq = t->request_alloc(ce);
ce                345 drivers/gpu/drm/i915/selftests/i915_request.c 			intel_context_put(ce);
ce                 92 drivers/gpu/drm/i915/selftests/igt_spinner.c 			   struct intel_context *ce,
ce                 95 drivers/gpu/drm/i915/selftests/igt_spinner.c 	struct intel_engine_cs *engine = ce->engine;
ce                101 drivers/gpu/drm/i915/selftests/igt_spinner.c 	GEM_BUG_ON(spin->gt != ce->vm->gt);
ce                103 drivers/gpu/drm/i915/selftests/igt_spinner.c 	vma = i915_vma_instance(spin->obj, ce->vm, NULL);
ce                107 drivers/gpu/drm/i915/selftests/igt_spinner.c 	hws = i915_vma_instance(spin->hws, ce->vm, NULL);
ce                119 drivers/gpu/drm/i915/selftests/igt_spinner.c 	rq = intel_context_create_request(ce);
ce                 32 drivers/gpu/drm/i915/selftests/igt_spinner.h 			   struct intel_context *ce,
ce                 31 drivers/gpu/drm/i915/selftests/mock_request.c mock_request(struct intel_context *ce, unsigned long delay)
ce                 36 drivers/gpu/drm/i915/selftests/mock_request.c 	request = intel_context_create_request(ce);
ce                 33 drivers/gpu/drm/i915/selftests/mock_request.h mock_request(struct intel_context *ce, unsigned long delay);
ce                159 drivers/gpu/drm/nouveau/include/nvkm/core/device.h 	struct nvkm_engine *ce[9];
ce                232 drivers/gpu/drm/nouveau/include/nvkm/core/device.h 	int (*ce[9]   )(struct nvkm_device *, int idx, struct nvkm_engine **);
ce                 30 drivers/gpu/drm/nouveau/nvkm/engine/ce/gf100.c gf100_ce_init(struct nvkm_falcon *ce)
ce                 32 drivers/gpu/drm/nouveau/nvkm/engine/ce/gf100.c 	struct nvkm_device *device = ce->engine.subdev.device;
ce                 33 drivers/gpu/drm/nouveau/nvkm/engine/ce/gf100.c 	const int index = ce->engine.subdev.index - NVKM_ENGINE_CE0;
ce                 34 drivers/gpu/drm/nouveau/nvkm/engine/ce/gf100.c 	nvkm_wr32(device, ce->addr + 0x084, index);
ce                 47 drivers/gpu/drm/nouveau/nvkm/engine/ce/gk104.c gk104_ce_intr_launcherr(struct nvkm_engine *ce, const u32 base)
ce                 49 drivers/gpu/drm/nouveau/nvkm/engine/ce/gk104.c 	struct nvkm_subdev *subdev = &ce->subdev;
ce                 59 drivers/gpu/drm/nouveau/nvkm/engine/ce/gk104.c gk104_ce_intr(struct nvkm_engine *ce)
ce                 61 drivers/gpu/drm/nouveau/nvkm/engine/ce/gk104.c 	const u32 base = (ce->subdev.index - NVKM_ENGINE_CE0) * 0x1000;
ce                 62 drivers/gpu/drm/nouveau/nvkm/engine/ce/gk104.c 	struct nvkm_subdev *subdev = &ce->subdev;
ce                 77 drivers/gpu/drm/nouveau/nvkm/engine/ce/gk104.c 		gk104_ce_intr_launcherr(ce, base);
ce                 49 drivers/gpu/drm/nouveau/nvkm/engine/ce/gp100.c gp100_ce_intr_launcherr(struct nvkm_engine *ce, const u32 base)
ce                 51 drivers/gpu/drm/nouveau/nvkm/engine/ce/gp100.c 	struct nvkm_subdev *subdev = &ce->subdev;
ce                 60 drivers/gpu/drm/nouveau/nvkm/engine/ce/gp100.c gp100_ce_intr(struct nvkm_engine *ce)
ce                 62 drivers/gpu/drm/nouveau/nvkm/engine/ce/gp100.c 	const u32 base = (ce->subdev.index - NVKM_ENGINE_CE0) * 0x80;
ce                 63 drivers/gpu/drm/nouveau/nvkm/engine/ce/gp100.c 	struct nvkm_subdev *subdev = &ce->subdev;
ce                 78 drivers/gpu/drm/nouveau/nvkm/engine/ce/gp100.c 		gp100_ce_intr_launcherr(ce, base);
ce                 43 drivers/gpu/drm/nouveau/nvkm/engine/ce/gt215.c gt215_ce_intr(struct nvkm_falcon *ce, struct nvkm_fifo_chan *chan)
ce                 45 drivers/gpu/drm/nouveau/nvkm/engine/ce/gt215.c 	struct nvkm_subdev *subdev = &ce->engine.subdev;
ce               1162 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gt215_ce_new,
ce               1196 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gt215_ce_new,
ce               1229 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gt215_ce_new,
ce               1326 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gt215_ce_new,
ce               1362 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gf100_ce_new,
ce               1363 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gf100_ce_new,
ce               1399 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gf100_ce_new,
ce               1435 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gf100_ce_new,
ce               1471 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gf100_ce_new,
ce               1472 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gf100_ce_new,
ce               1508 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gf100_ce_new,
ce               1509 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gf100_ce_new,
ce               1545 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gf100_ce_new,
ce               1546 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gf100_ce_new,
ce               1582 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gf100_ce_new,
ce               1617 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gf100_ce_new,
ce               1653 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gf100_ce_new,
ce               1690 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gk104_ce_new,
ce               1691 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gk104_ce_new,
ce               1692 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gk104_ce_new,
ce               1729 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gk104_ce_new,
ce               1730 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gk104_ce_new,
ce               1731 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gk104_ce_new,
ce               1768 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gk104_ce_new,
ce               1769 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gk104_ce_new,
ce               1770 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gk104_ce_new,
ce               1799 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gk104_ce_new,
ce               1832 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gk104_ce_new,
ce               1833 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gk104_ce_new,
ce               1834 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gk104_ce_new,
ce               1870 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gk104_ce_new,
ce               1871 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gk104_ce_new,
ce               1872 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gk104_ce_new,
ce               1908 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gk104_ce_new,
ce               1909 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gk104_ce_new,
ce               1910 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gk104_ce_new,
ce               1946 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gk104_ce_new,
ce               1947 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gk104_ce_new,
ce               1948 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gk104_ce_new,
ce               1984 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gm107_ce_new,
ce               1985 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gm107_ce_new,
ce               2018 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gm107_ce_new,
ce               2019 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gm107_ce_new,
ce               2052 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gm200_ce_new,
ce               2053 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gm200_ce_new,
ce               2054 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gm200_ce_new,
ce               2087 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gm200_ce_new,
ce               2088 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gm200_ce_new,
ce               2089 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gm200_ce_new,
ce               2122 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gm200_ce_new,
ce               2123 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gm200_ce_new,
ce               2124 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gm200_ce_new,
ce               2149 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gm200_ce_new,
ce               2180 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gp100_ce_new,
ce               2181 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gp100_ce_new,
ce               2182 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gp100_ce_new,
ce               2183 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = gp100_ce_new,
ce               2184 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[4] = gp100_ce_new,
ce               2185 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[5] = gp100_ce_new,
ce               2216 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gp102_ce_new,
ce               2217 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gp102_ce_new,
ce               2218 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gp102_ce_new,
ce               2219 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = gp102_ce_new,
ce               2252 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gp102_ce_new,
ce               2253 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gp102_ce_new,
ce               2254 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gp102_ce_new,
ce               2255 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = gp102_ce_new,
ce               2288 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gp102_ce_new,
ce               2289 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gp102_ce_new,
ce               2290 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gp102_ce_new,
ce               2291 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = gp102_ce_new,
ce               2324 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gp102_ce_new,
ce               2325 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gp102_ce_new,
ce               2326 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gp102_ce_new,
ce               2327 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = gp102_ce_new,
ce               2360 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gp102_ce_new,
ce               2361 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gp102_ce_new,
ce               2362 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gp102_ce_new,
ce               2363 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = gp102_ce_new,
ce               2390 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gp102_ce_new,
ce               2422 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = gv100_ce_new,
ce               2423 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = gv100_ce_new,
ce               2424 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = gv100_ce_new,
ce               2425 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = gv100_ce_new,
ce               2426 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[4] = gv100_ce_new,
ce               2427 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[5] = gv100_ce_new,
ce               2428 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[6] = gv100_ce_new,
ce               2429 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[7] = gv100_ce_new,
ce               2430 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[8] = gv100_ce_new,
ce               2461 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = tu102_ce_new,
ce               2462 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = tu102_ce_new,
ce               2463 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = tu102_ce_new,
ce               2464 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = tu102_ce_new,
ce               2465 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[4] = tu102_ce_new,
ce               2496 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = tu102_ce_new,
ce               2497 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = tu102_ce_new,
ce               2498 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = tu102_ce_new,
ce               2499 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = tu102_ce_new,
ce               2500 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[4] = tu102_ce_new,
ce               2531 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = tu102_ce_new,
ce               2532 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = tu102_ce_new,
ce               2533 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = tu102_ce_new,
ce               2534 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = tu102_ce_new,
ce               2535 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[4] = tu102_ce_new,
ce               2566 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = tu102_ce_new,
ce               2567 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = tu102_ce_new,
ce               2568 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = tu102_ce_new,
ce               2569 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = tu102_ce_new,
ce               2570 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[4] = tu102_ce_new,
ce               2601 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[0] = tu102_ce_new,
ce               2602 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[1] = tu102_ce_new,
ce               2603 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[2] = tu102_ce_new,
ce               2604 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[3] = tu102_ce_new,
ce               2605 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	.ce[4] = tu102_ce_new,
ce               2685 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(CE0    , device->ce[0]   ,  device->ce[0]);
ce               2686 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(CE1    , device->ce[1]   ,  device->ce[1]);
ce               2687 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(CE2    , device->ce[2]   ,  device->ce[2]);
ce               2688 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(CE3    , device->ce[3]   ,  device->ce[3]);
ce               2689 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(CE4    , device->ce[4]   ,  device->ce[4]);
ce               2690 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(CE5    , device->ce[5]   ,  device->ce[5]);
ce               2691 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(CE6    , device->ce[6]   ,  device->ce[6]);
ce               2692 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(CE7    , device->ce[7]   ,  device->ce[7]);
ce               2693 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 	_(CE8    , device->ce[8]   ,  device->ce[8]);
ce               3173 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_CE0     ,    ce[0]);
ce               3174 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_CE1     ,    ce[1]);
ce               3175 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_CE2     ,    ce[2]);
ce               3176 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_CE3     ,    ce[3]);
ce               3177 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_CE4     ,    ce[4]);
ce               3178 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_CE5     ,    ce[5]);
ce               3179 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_CE6     ,    ce[6]);
ce               3180 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_CE7     ,    ce[7]);
ce               3181 drivers/gpu/drm/nouveau/nvkm/engine/device/base.c 		_(NVKM_ENGINE_CE8     ,    ce[8]);
ce                 38 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c gv100_fifo_gpfifo_engine_valid(struct gk104_fifo_chan *chan, bool ce, bool valid)
ce                 42 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c 	const u32 mask = ce ? 0x00020000 : 0x00010000;
ce                279 drivers/input/touchscreen/hideep.c #define SET_FLASH_PIO(ce)					\
ce                281 drivers/input/touchscreen/hideep.c 			 0x01 | ((ce) << 1))
ce               2996 drivers/iommu/intel-iommu.c 	struct context_entry *new_ce = NULL, ce;
ce               3050 drivers/iommu/intel-iommu.c 		memcpy(&ce, old_ce + idx, sizeof(ce));
ce               3052 drivers/iommu/intel-iommu.c 		if (!__context_present(&ce))
ce               3055 drivers/iommu/intel-iommu.c 		did = context_domain_id(&ce);
ce               3075 drivers/iommu/intel-iommu.c 		context_clear_pasid_enable(&ce);
ce               3076 drivers/iommu/intel-iommu.c 		context_set_copied(&ce);
ce               3078 drivers/iommu/intel-iommu.c 		new_ce[idx] = ce;
ce                706 drivers/md/dm-snap-persistent.c 	struct core_exception ce;
ce                712 drivers/md/dm-snap-persistent.c 	ce.old_chunk = e->old_chunk;
ce                713 drivers/md/dm-snap-persistent.c 	ce.new_chunk = e->new_chunk;
ce                714 drivers/md/dm-snap-persistent.c 	write_exception(ps, ps->current_committed++, &ce);
ce                770 drivers/md/dm-snap-persistent.c 	struct core_exception ce;
ce                791 drivers/md/dm-snap-persistent.c 	read_exception(ps, ps->area, ps->current_committed - 1, &ce);
ce                792 drivers/md/dm-snap-persistent.c 	*last_old_chunk = ce.old_chunk;
ce                793 drivers/md/dm-snap-persistent.c 	*last_new_chunk = ce.new_chunk;
ce                802 drivers/md/dm-snap-persistent.c 			       ps->current_committed - 1 - nr_consecutive, &ce);
ce                803 drivers/md/dm-snap-persistent.c 		if (ce.old_chunk != *last_old_chunk - nr_consecutive ||
ce                804 drivers/md/dm-snap-persistent.c 		    ce.new_chunk != *last_new_chunk - nr_consecutive)
ce                747 drivers/media/i2c/vs6624.c 	const unsigned *ce;
ce                754 drivers/media/i2c/vs6624.c 	ce = client->dev.platform_data;
ce                755 drivers/media/i2c/vs6624.c 	if (ce == NULL)
ce                758 drivers/media/i2c/vs6624.c 	ret = devm_gpio_request_one(&client->dev, *ce, GPIOF_OUT_INIT_HIGH,
ce                761 drivers/media/i2c/vs6624.c 		v4l_err(client, "failed to request GPIO %d\n", *ce);
ce                796 drivers/media/i2c/vs6624.c 	sensor->ce_pin = *ce;
ce                412 drivers/mtd/nand/raw/lpc32xx_slc.c 		uint32_t ce = ecc[i / 3];
ce                413 drivers/mtd/nand/raw/lpc32xx_slc.c 		ce = ~(ce << 2) & 0xFFFFFF;
ce                414 drivers/mtd/nand/raw/lpc32xx_slc.c 		spare[i + 2] = (uint8_t)(ce & 0xFF);
ce                415 drivers/mtd/nand/raw/lpc32xx_slc.c 		ce >>= 8;
ce                416 drivers/mtd/nand/raw/lpc32xx_slc.c 		spare[i + 1] = (uint8_t)(ce & 0xFF);
ce                417 drivers/mtd/nand/raw/lpc32xx_slc.c 		ce >>= 8;
ce                418 drivers/mtd/nand/raw/lpc32xx_slc.c 		spare[i] = (uint8_t)(ce & 0xFF);
ce                344 drivers/mtd/nand/raw/nandsim.c                 int ce;  /* chip Enable */
ce               1856 drivers/mtd/nand/raw/nandsim.c 	if (!ns->lines.ce) {
ce               1916 drivers/mtd/nand/raw/nandsim.c 	if (!ns->lines.ce) {
ce               2098 drivers/mtd/nand/raw/nandsim.c 	if (!ns->lines.ce) {
ce               2147 drivers/mtd/nand/raw/nandsim.c 	ns->lines.ce = 1;
ce                510 drivers/net/ethernet/chelsio/cxgb/sge.c 		struct freelQ_ce *ce = &q->centries[cidx];
ce                512 drivers/net/ethernet/chelsio/cxgb/sge.c 		pci_unmap_single(pdev, dma_unmap_addr(ce, dma_addr),
ce                513 drivers/net/ethernet/chelsio/cxgb/sge.c 				 dma_unmap_len(ce, dma_len),
ce                515 drivers/net/ethernet/chelsio/cxgb/sge.c 		dev_kfree_skb(ce->skb);
ce                516 drivers/net/ethernet/chelsio/cxgb/sge.c 		ce->skb = NULL;
ce                619 drivers/net/ethernet/chelsio/cxgb/sge.c 	struct cmdQ_ce *ce;
ce                624 drivers/net/ethernet/chelsio/cxgb/sge.c 	ce = &q->centries[cidx];
ce                626 drivers/net/ethernet/chelsio/cxgb/sge.c 		if (likely(dma_unmap_len(ce, dma_len))) {
ce                627 drivers/net/ethernet/chelsio/cxgb/sge.c 			pci_unmap_single(pdev, dma_unmap_addr(ce, dma_addr),
ce                628 drivers/net/ethernet/chelsio/cxgb/sge.c 					 dma_unmap_len(ce, dma_len),
ce                633 drivers/net/ethernet/chelsio/cxgb/sge.c 		if (ce->skb) {
ce                634 drivers/net/ethernet/chelsio/cxgb/sge.c 			dev_kfree_skb_any(ce->skb);
ce                637 drivers/net/ethernet/chelsio/cxgb/sge.c 		ce++;
ce                640 drivers/net/ethernet/chelsio/cxgb/sge.c 			ce = q->centries;
ce                827 drivers/net/ethernet/chelsio/cxgb/sge.c 	struct freelQ_ce *ce = &q->centries[q->pidx];
ce                844 drivers/net/ethernet/chelsio/cxgb/sge.c 		ce->skb = skb;
ce                845 drivers/net/ethernet/chelsio/cxgb/sge.c 		dma_unmap_addr_set(ce, dma_addr, mapping);
ce                846 drivers/net/ethernet/chelsio/cxgb/sge.c 		dma_unmap_len_set(ce, dma_len, dma_len);
ce                854 drivers/net/ethernet/chelsio/cxgb/sge.c 		ce++;
ce                858 drivers/net/ethernet/chelsio/cxgb/sge.c 			ce = q->centries;
ce               1042 drivers/net/ethernet/chelsio/cxgb/sge.c 	const struct freelQ_ce *ce = &fl->centries[fl->cidx];
ce               1053 drivers/net/ethernet/chelsio/cxgb/sge.c 					    dma_unmap_addr(ce, dma_addr),
ce               1054 drivers/net/ethernet/chelsio/cxgb/sge.c 					    dma_unmap_len(ce, dma_len),
ce               1056 drivers/net/ethernet/chelsio/cxgb/sge.c 		skb_copy_from_linear_data(ce->skb, skb->data, len);
ce               1058 drivers/net/ethernet/chelsio/cxgb/sge.c 					       dma_unmap_addr(ce, dma_addr),
ce               1059 drivers/net/ethernet/chelsio/cxgb/sge.c 					       dma_unmap_len(ce, dma_len),
ce               1071 drivers/net/ethernet/chelsio/cxgb/sge.c 	pci_unmap_single(pdev, dma_unmap_addr(ce, dma_addr),
ce               1072 drivers/net/ethernet/chelsio/cxgb/sge.c 			 dma_unmap_len(ce, dma_len), PCI_DMA_FROMDEVICE);
ce               1073 drivers/net/ethernet/chelsio/cxgb/sge.c 	skb = ce->skb;
ce               1091 drivers/net/ethernet/chelsio/cxgb/sge.c 	struct freelQ_ce *ce = &fl->centries[fl->cidx];
ce               1092 drivers/net/ethernet/chelsio/cxgb/sge.c 	struct sk_buff *skb = ce->skb;
ce               1094 drivers/net/ethernet/chelsio/cxgb/sge.c 	pci_dma_sync_single_for_cpu(adapter->pdev, dma_unmap_addr(ce, dma_addr),
ce               1095 drivers/net/ethernet/chelsio/cxgb/sge.c 			    dma_unmap_len(ce, dma_len), PCI_DMA_FROMDEVICE);
ce               1160 drivers/net/ethernet/chelsio/cxgb/sge.c 						     struct cmdQ_ce **ce,
ce               1169 drivers/net/ethernet/chelsio/cxgb/sge.c 		struct cmdQ_ce *ce1 = *ce;
ce               1190 drivers/net/ethernet/chelsio/cxgb/sge.c 		*ce = ce1;
ce               1205 drivers/net/ethernet/chelsio/cxgb/sge.c 	struct cmdQ_ce *ce;
ce               1210 drivers/net/ethernet/chelsio/cxgb/sge.c 	ce = &q->centries[pidx];
ce               1226 drivers/net/ethernet/chelsio/cxgb/sge.c 	ce->skb = NULL;
ce               1227 drivers/net/ethernet/chelsio/cxgb/sge.c 	dma_unmap_len_set(ce, dma_len, 0);
ce               1234 drivers/net/ethernet/chelsio/cxgb/sge.c 		ce++;
ce               1239 drivers/net/ethernet/chelsio/cxgb/sge.c 			ce = q->centries;
ce               1241 drivers/net/ethernet/chelsio/cxgb/sge.c 		pidx = write_large_page_tx_descs(pidx, &e1, &ce, &gen,
ce               1250 drivers/net/ethernet/chelsio/cxgb/sge.c 	ce->skb = NULL;
ce               1251 drivers/net/ethernet/chelsio/cxgb/sge.c 	dma_unmap_addr_set(ce, dma_addr, mapping);
ce               1252 drivers/net/ethernet/chelsio/cxgb/sge.c 	dma_unmap_len_set(ce, dma_len, skb_headlen(skb));
ce               1257 drivers/net/ethernet/chelsio/cxgb/sge.c 		ce++;
ce               1262 drivers/net/ethernet/chelsio/cxgb/sge.c 			ce = q->centries;
ce               1270 drivers/net/ethernet/chelsio/cxgb/sge.c 		pidx = write_large_page_tx_descs(pidx, &e1, &ce, &gen,
ce               1276 drivers/net/ethernet/chelsio/cxgb/sge.c 		ce->skb = NULL;
ce               1277 drivers/net/ethernet/chelsio/cxgb/sge.c 		dma_unmap_addr_set(ce, dma_addr, mapping);
ce               1278 drivers/net/ethernet/chelsio/cxgb/sge.c 		dma_unmap_len_set(ce, dma_len, skb_frag_size(frag));
ce               1280 drivers/net/ethernet/chelsio/cxgb/sge.c 	ce->skb = skb;
ce                 78 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 	struct clip_entry *ce, *cte;
ce                 97 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 			ce = cte;
ce                 99 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 			refcount_inc(&ce->refcnt);
ce                107 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 		ce = list_first_entry(&ctbl->ce_free_head,
ce                109 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 		list_del(&ce->list);
ce                110 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 		INIT_LIST_HEAD(&ce->list);
ce                111 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 		spin_lock_init(&ce->lock);
ce                112 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 		refcount_set(&ce->refcnt, 0);
ce                114 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 		list_add_tail(&ce->list, &ctbl->hash_list[hash]);
ce                116 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 			ce->addr6.sin6_family = AF_INET6;
ce                117 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 			memcpy(ce->addr6.sin6_addr.s6_addr,
ce                126 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 					ret, ce->addr6.sin6_addr.s6_addr);
ce                130 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 			ce->addr.sin_family = AF_INET;
ce                131 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 			memcpy((char *)(&ce->addr.sin_addr), lip,
ce                142 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 	refcount_set(&ce->refcnt, 1);
ce                151 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 	struct clip_entry *ce, *cte;
ce                170 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 			ce = cte;
ce                180 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 	spin_lock_bh(&ce->lock);
ce                181 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 	if (refcount_dec_and_test(&ce->refcnt)) {
ce                182 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 		list_del(&ce->list);
ce                183 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 		INIT_LIST_HEAD(&ce->list);
ce                184 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 		list_add_tail(&ce->list, &ctbl->ce_free_head);
ce                189 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 	spin_unlock_bh(&ce->lock);
ce                256 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 	struct clip_entry *ce;
ce                264 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 		list_for_each_entry(ce, &ctbl->hash_list[i], list) {
ce                266 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 			sprintf(ip, "%pISc", &ce->addr);
ce                268 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 				   refcount_read(&ce->refcnt));
ce                444 drivers/net/fddi/skfp/h/smc.h 	struct s_cem	ce[NUMPHYS] ;	/* cem */
ce               2934 drivers/net/ppp/ppp_generic.c 	struct compressor_entry *ce;
ce               2936 drivers/net/ppp/ppp_generic.c 	list_for_each_entry(ce, &compressor_list, list) {
ce               2937 drivers/net/ppp/ppp_generic.c 		if (ce->comp->compress_proto == proto)
ce               2938 drivers/net/ppp/ppp_generic.c 			return ce;
ce               2947 drivers/net/ppp/ppp_generic.c 	struct compressor_entry *ce;
ce               2954 drivers/net/ppp/ppp_generic.c 	ce = kmalloc(sizeof(struct compressor_entry), GFP_ATOMIC);
ce               2955 drivers/net/ppp/ppp_generic.c 	if (!ce)
ce               2958 drivers/net/ppp/ppp_generic.c 	ce->comp = cp;
ce               2959 drivers/net/ppp/ppp_generic.c 	list_add(&ce->list, &compressor_list);
ce               2969 drivers/net/ppp/ppp_generic.c 	struct compressor_entry *ce;
ce               2972 drivers/net/ppp/ppp_generic.c 	ce = find_comp_entry(cp->compress_proto);
ce               2973 drivers/net/ppp/ppp_generic.c 	if (ce && ce->comp == cp) {
ce               2974 drivers/net/ppp/ppp_generic.c 		list_del(&ce->list);
ce               2975 drivers/net/ppp/ppp_generic.c 		kfree(ce);
ce               2984 drivers/net/ppp/ppp_generic.c 	struct compressor_entry *ce;
ce               2988 drivers/net/ppp/ppp_generic.c 	ce = find_comp_entry(type);
ce               2989 drivers/net/ppp/ppp_generic.c 	if (ce) {
ce               2990 drivers/net/ppp/ppp_generic.c 		cp = ce->comp;
ce                777 drivers/net/wireless/ath/ath10k/ahb.c 	ar_pci->ce.bus_ops = &ath10k_ahb_bus_ops;
ce                779 drivers/net/wireless/ath/ath10k/ahb.c 	ar->ce_priv = &ar_pci->ce;
ce                135 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                137 drivers/net/wireless/ath/ath10k/ce.c 	return ce->bus_ops->read32(ar, offset);
ce                142 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                144 drivers/net/wireless/ath/ath10k/ce.c 	ce->bus_ops->write32(ar, offset, value);
ce                180 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                182 drivers/net/wireless/ath/ath10k/ce.c 	return ce->vaddr_rri[ce_id] & CE_DDR_RRI_MASK;
ce                188 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                190 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce                223 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                224 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce                300 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                302 drivers/net/wireless/ath/ath10k/ce.c 	return (ce->vaddr_rri[ce_id] >> CE_DDR_DRRI_SHIFT) &
ce                309 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                311 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce                328 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                329 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce                641 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                645 drivers/net/wireless/ath/ath10k/ce.c 	lockdep_assert_held(&ce->ce_lock);
ce                674 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                677 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce                680 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce                689 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                692 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce                696 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce                705 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                711 drivers/net/wireless/ath/ath10k/ce.c 	lockdep_assert_held(&ce->ce_lock);
ce                721 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                730 drivers/net/wireless/ath/ath10k/ce.c 	lockdep_assert_held(&ce->ce_lock);
ce                752 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                762 drivers/net/wireless/ath/ath10k/ce.c 	lockdep_assert_held(&ce->ce_lock);
ce                805 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                808 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce                810 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce                934 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                937 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce                942 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce                958 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce;
ce                966 drivers/net/wireless/ath/ath10k/ce.c 	ce = ath10k_ce_priv(ar);
ce                968 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce                996 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce               1011 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce;
ce               1019 drivers/net/wireless/ath/ath10k/ce.c 	ce = ath10k_ce_priv(ar);
ce               1021 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce               1050 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce               1231 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce;
ce               1239 drivers/net/wireless/ath/ath10k/ce.c 	ce = ath10k_ce_priv(ar);
ce               1241 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce               1267 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce               1277 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1280 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce               1283 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce               1297 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1298 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce               1302 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce               1308 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce               1316 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce               1324 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce               1377 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1383 drivers/net/wireless/ath/ath10k/ce.c 		ce_state  = &ce->ce_states[ce_id];
ce               1400 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1408 drivers/net/wireless/ath/ath10k/ce.c 		ce_state  = &ce->ce_states[ce_id];
ce               1421 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1422 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce               1462 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1463 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce               1782 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1783 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce               1813 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1814 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce               1844 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1845 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce               1854 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1862 drivers/net/wireless/ath/ath10k/ce.c 	spin_lock_bh(&ce->ce_lock);
ce               1887 drivers/net/wireless/ath/ath10k/ce.c 	spin_unlock_bh(&ce->ce_lock);
ce               1936 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1937 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce_pipe *ce_state = &ce->ce_states[ce_id];
ce               2000 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               2002 drivers/net/wireless/ath/ath10k/ce.c 	ce->vaddr_rri = dma_alloc_coherent(ar->dev,
ce               2004 drivers/net/wireless/ath/ath10k/ce.c 					   &ce->paddr_rri, GFP_KERNEL);
ce               2006 drivers/net/wireless/ath/ath10k/ce.c 	if (!ce->vaddr_rri)
ce               2010 drivers/net/wireless/ath/ath10k/ce.c 			  lower_32_bits(ce->paddr_rri));
ce               2012 drivers/net/wireless/ath/ath10k/ce.c 			  (upper_32_bits(ce->paddr_rri) &
ce               2023 drivers/net/wireless/ath/ath10k/ce.c 	memset(ce->vaddr_rri, 0, CE_COUNT * sizeof(u32));
ce               2029 drivers/net/wireless/ath/ath10k/ce.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               2032 drivers/net/wireless/ath/ath10k/ce.c 			  ce->vaddr_rri,
ce               2033 drivers/net/wireless/ath/ath10k/ce.c 			  ce->paddr_rri);
ce                376 drivers/net/wireless/ath/ath10k/ce.h 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                380 drivers/net/wireless/ath/ath10k/ce.h 			ce->bus_ops->read32((ar), CE_WRAPPER_BASE_ADDRESS +
ce                678 drivers/net/wireless/ath/ath10k/pci.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                680 drivers/net/wireless/ath/ath10k/pci.c 	ce->bus_ops->write32(ar, offset, value);
ce                685 drivers/net/wireless/ath/ath10k/pci.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                687 drivers/net/wireless/ath/ath10k/pci.c 	return ce->bus_ops->read32(ar, offset);
ce                767 drivers/net/wireless/ath/ath10k/pci.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                790 drivers/net/wireless/ath/ath10k/pci.c 	spin_lock_bh(&ce->ce_lock);
ce                792 drivers/net/wireless/ath/ath10k/pci.c 	spin_unlock_bh(&ce->ce_lock);
ce                807 drivers/net/wireless/ath/ath10k/pci.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                817 drivers/net/wireless/ath/ath10k/pci.c 	spin_lock_bh(&ce->ce_lock);
ce                819 drivers/net/wireless/ath/ath10k/pci.c 	spin_unlock_bh(&ce->ce_lock);
ce               1353 drivers/net/wireless/ath/ath10k/pci.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1362 drivers/net/wireless/ath/ath10k/pci.c 	spin_lock_bh(&ce->ce_lock);
ce               1408 drivers/net/wireless/ath/ath10k/pci.c 	spin_unlock_bh(&ce->ce_lock);
ce               1415 drivers/net/wireless/ath/ath10k/pci.c 	spin_unlock_bh(&ce->ce_lock);
ce               2311 drivers/net/wireless/ath/ath10k/pci.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               2313 drivers/net/wireless/ath/ath10k/pci.c 	return ce->bus_ops->get_num_banks(ar);
ce               2484 drivers/net/wireless/ath/ath10k/pci.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               2489 drivers/net/wireless/ath/ath10k/pci.c 		pipe->ce_hdl = &ce->ce_states[i];
ce               3462 drivers/net/wireless/ath/ath10k/pci.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               3465 drivers/net/wireless/ath/ath10k/pci.c 	spin_lock_init(&ce->ce_lock);
ce               3588 drivers/net/wireless/ath/ath10k/pci.c 	ar_pci->ce.bus_ops = &ath10k_pci_bus_ops;
ce               3592 drivers/net/wireless/ath/ath10k/pci.c 	ar->ce_priv = &ar_pci->ce;
ce                126 drivers/net/wireless/ath/ath10k/pci.h 	struct ath10k_ce ce;
ce                488 drivers/net/wireless/ath/ath10k/snoc.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                510 drivers/net/wireless/ath/ath10k/snoc.c 	spin_lock_bh(&ce->ce_lock);
ce                512 drivers/net/wireless/ath/ath10k/snoc.c 	spin_unlock_bh(&ce->ce_lock);
ce                526 drivers/net/wireless/ath/ath10k/snoc.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                537 drivers/net/wireless/ath/ath10k/snoc.c 	spin_lock_bh(&ce->ce_lock);
ce                539 drivers/net/wireless/ath/ath10k/snoc.c 	spin_unlock_bh(&ce->ce_lock);
ce                683 drivers/net/wireless/ath/ath10k/snoc.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce                690 drivers/net/wireless/ath/ath10k/snoc.c 	spin_lock_bh(&ce->ce_lock);
ce                720 drivers/net/wireless/ath/ath10k/snoc.c 	spin_unlock_bh(&ce->ce_lock);
ce                728 drivers/net/wireless/ath/ath10k/snoc.c 	spin_unlock_bh(&ce->ce_lock);
ce               1314 drivers/net/wireless/ath/ath10k/snoc.c 	struct ath10k_ce *ce = ath10k_ce_priv(ar);
ce               1319 drivers/net/wireless/ath/ath10k/snoc.c 	spin_lock_init(&ce->ce_lock);
ce               1322 drivers/net/wireless/ath/ath10k/snoc.c 		pipe->ce_hdl = &ce->ce_states[i];
ce               1686 drivers/net/wireless/ath/ath10k/snoc.c 	ar_snoc->ce.bus_ops = &ath10k_snoc_bus_ops;
ce               1687 drivers/net/wireless/ath/ath10k/snoc.c 	ar->ce_priv = &ar_snoc->ce;
ce                 78 drivers/net/wireless/ath/ath10k/snoc.h 	struct ath10k_ce ce;
ce                460 drivers/of/dynamic.c static void __of_changeset_entry_destroy(struct of_changeset_entry *ce)
ce                462 drivers/of/dynamic.c 	if (ce->action == OF_RECONFIG_ATTACH_NODE &&
ce                463 drivers/of/dynamic.c 	    of_node_check_flag(ce->np, OF_OVERLAY)) {
ce                464 drivers/of/dynamic.c 		if (kref_read(&ce->np->kobj.kref) > 1) {
ce                466 drivers/of/dynamic.c 			       kref_read(&ce->np->kobj.kref), ce->np);
ce                468 drivers/of/dynamic.c 			of_node_set_flag(ce->np, OF_OVERLAY_FREE_CSET);
ce                472 drivers/of/dynamic.c 	of_node_put(ce->np);
ce                473 drivers/of/dynamic.c 	list_del(&ce->node);
ce                474 drivers/of/dynamic.c 	kfree(ce);
ce                478 drivers/of/dynamic.c static void __of_changeset_entry_dump(struct of_changeset_entry *ce)
ce                480 drivers/of/dynamic.c 	switch (ce->action) {
ce                484 drivers/of/dynamic.c 		pr_debug("cset<%p> %-15s %pOF/%s\n", ce, action_names[ce->action],
ce                485 drivers/of/dynamic.c 			ce->np, ce->prop->name);
ce                489 drivers/of/dynamic.c 		pr_debug("cset<%p> %-15s %pOF\n", ce, action_names[ce->action],
ce                490 drivers/of/dynamic.c 			ce->np);
ce                495 drivers/of/dynamic.c static inline void __of_changeset_entry_dump(struct of_changeset_entry *ce)
ce                501 drivers/of/dynamic.c static void __of_changeset_entry_invert(struct of_changeset_entry *ce,
ce                504 drivers/of/dynamic.c 	memcpy(rce, ce, sizeof(*rce));
ce                506 drivers/of/dynamic.c 	switch (ce->action) {
ce                520 drivers/of/dynamic.c 		rce->old_prop = ce->prop;
ce                521 drivers/of/dynamic.c 		rce->prop = ce->old_prop;
ce                525 drivers/of/dynamic.c 			rce->prop = ce->prop;
ce                531 drivers/of/dynamic.c static int __of_changeset_entry_notify(struct of_changeset_entry *ce,
ce                539 drivers/of/dynamic.c 		__of_changeset_entry_invert(ce, &ce_inverted);
ce                540 drivers/of/dynamic.c 		ce = &ce_inverted;
ce                543 drivers/of/dynamic.c 	switch (ce->action) {
ce                547 drivers/of/dynamic.c 		rd.dn = ce->np;
ce                548 drivers/of/dynamic.c 		ret = of_reconfig_notify(ce->action, &rd);
ce                553 drivers/of/dynamic.c 		ret = of_property_notify(ce->action, ce->np, ce->prop, ce->old_prop);
ce                557 drivers/of/dynamic.c 			(int)ce->action);
ce                562 drivers/of/dynamic.c 		pr_err("changeset notifier error @%pOF\n", ce->np);
ce                566 drivers/of/dynamic.c static int __of_changeset_entry_apply(struct of_changeset_entry *ce)
ce                572 drivers/of/dynamic.c 	__of_changeset_entry_dump(ce);
ce                575 drivers/of/dynamic.c 	switch (ce->action) {
ce                577 drivers/of/dynamic.c 		__of_attach_node(ce->np);
ce                580 drivers/of/dynamic.c 		__of_detach_node(ce->np);
ce                584 drivers/of/dynamic.c 		for (propp = &ce->np->deadprops; *propp; propp = &(*propp)->next) {
ce                585 drivers/of/dynamic.c 			if (*propp == ce->prop) {
ce                586 drivers/of/dynamic.c 				*propp = ce->prop->next;
ce                587 drivers/of/dynamic.c 				ce->prop->next = NULL;
ce                592 drivers/of/dynamic.c 		ret = __of_add_property(ce->np, ce->prop);
ce                595 drivers/of/dynamic.c 				ce->np,
ce                596 drivers/of/dynamic.c 				ce->prop->name);
ce                601 drivers/of/dynamic.c 		ret = __of_remove_property(ce->np, ce->prop);
ce                604 drivers/of/dynamic.c 				ce->np,
ce                605 drivers/of/dynamic.c 				ce->prop->name);
ce                612 drivers/of/dynamic.c 		for (propp = &ce->np->deadprops; *propp; propp = &(*propp)->next) {
ce                613 drivers/of/dynamic.c 			if (*propp == ce->prop) {
ce                614 drivers/of/dynamic.c 				*propp = ce->prop->next;
ce                615 drivers/of/dynamic.c 				ce->prop->next = NULL;
ce                620 drivers/of/dynamic.c 		ret = __of_update_property(ce->np, ce->prop, &old_prop);
ce                623 drivers/of/dynamic.c 				ce->np,
ce                624 drivers/of/dynamic.c 				ce->prop->name);
ce                636 drivers/of/dynamic.c 	switch (ce->action) {
ce                638 drivers/of/dynamic.c 		__of_attach_node_sysfs(ce->np);
ce                641 drivers/of/dynamic.c 		__of_detach_node_sysfs(ce->np);
ce                645 drivers/of/dynamic.c 		__of_add_property_sysfs(ce->np, ce->prop);
ce                648 drivers/of/dynamic.c 		__of_remove_property_sysfs(ce->np, ce->prop);
ce                651 drivers/of/dynamic.c 		__of_update_property_sysfs(ce->np, ce->prop, ce->old_prop);
ce                658 drivers/of/dynamic.c static inline int __of_changeset_entry_revert(struct of_changeset_entry *ce)
ce                662 drivers/of/dynamic.c 	__of_changeset_entry_invert(ce, &ce_inverted);
ce                690 drivers/of/dynamic.c 	struct of_changeset_entry *ce, *cen;
ce                692 drivers/of/dynamic.c 	list_for_each_entry_safe_reverse(ce, cen, &ocs->entries, node)
ce                693 drivers/of/dynamic.c 		__of_changeset_entry_destroy(ce);
ce                709 drivers/of/dynamic.c 	struct of_changeset_entry *ce;
ce                713 drivers/of/dynamic.c 	list_for_each_entry(ce, &ocs->entries, node) {
ce                714 drivers/of/dynamic.c 		ret = __of_changeset_entry_apply(ce);
ce                717 drivers/of/dynamic.c 			list_for_each_entry_continue_reverse(ce, &ocs->entries,
ce                719 drivers/of/dynamic.c 				ret_tmp = __of_changeset_entry_revert(ce);
ce                738 drivers/of/dynamic.c 	struct of_changeset_entry *ce;
ce                745 drivers/of/dynamic.c 	list_for_each_entry(ce, &ocs->entries, node) {
ce                746 drivers/of/dynamic.c 		ret_tmp = __of_changeset_entry_notify(ce, 0);
ce                812 drivers/of/dynamic.c 	struct of_changeset_entry *ce;
ce                816 drivers/of/dynamic.c 	list_for_each_entry_reverse(ce, &ocs->entries, node) {
ce                817 drivers/of/dynamic.c 		ret = __of_changeset_entry_revert(ce);
ce                820 drivers/of/dynamic.c 			list_for_each_entry_continue(ce, &ocs->entries, node) {
ce                821 drivers/of/dynamic.c 				ret_tmp = __of_changeset_entry_apply(ce);
ce                838 drivers/of/dynamic.c 	struct of_changeset_entry *ce;
ce                845 drivers/of/dynamic.c 	list_for_each_entry_reverse(ce, &ocs->entries, node) {
ce                846 drivers/of/dynamic.c 		ret_tmp = __of_changeset_entry_notify(ce, 1);
ce                911 drivers/of/dynamic.c 	struct of_changeset_entry *ce;
ce                913 drivers/of/dynamic.c 	ce = kzalloc(sizeof(*ce), GFP_KERNEL);
ce                914 drivers/of/dynamic.c 	if (!ce)
ce                918 drivers/of/dynamic.c 	ce->action = action;
ce                919 drivers/of/dynamic.c 	ce->np = of_node_get(np);
ce                920 drivers/of/dynamic.c 	ce->prop = prop;
ce                923 drivers/of/dynamic.c 		ce->old_prop = of_find_property(np, prop->name, NULL);
ce                926 drivers/of/dynamic.c 	list_add_tail(&ce->node, &ocs->entries);
ce               1105 drivers/of/overlay.c 	struct of_changeset_entry *ce;
ce               1111 drivers/of/overlay.c 		list_for_each_entry(ce, &ovcs->cset.entries, node) {
ce               1112 drivers/of/overlay.c 			if (find_node(ce->np, remove_ce_node)) {
ce               1118 drivers/of/overlay.c 			if (find_node(remove_ce_node, ce->np)) {
ce                179 drivers/soc/fsl/qbman/bman.c 	void *ce;		/* cache-enabled */
ce                204 drivers/soc/fsl/qbman/bman.c 	dpaa_invalidate(p->addr.ce + offset);
ce                209 drivers/soc/fsl/qbman/bman.c 	dpaa_touch_ro(p->addr.ce + offset);
ce                387 drivers/soc/fsl/qbman/bman.c 	rcr->ring = portal->addr.ce + BM_CL_RCR;
ce                432 drivers/soc/fsl/qbman/bman.c 	mc->cr = portal->addr.ce + BM_CL_CR;
ce                433 drivers/soc/fsl/qbman/bman.c 	mc->rr = portal->addr.ce + BM_CL_RR0;
ce                538 drivers/soc/fsl/qbman/bman.c 	p->addr.ce = c->addr_virt_ce;
ce                343 drivers/soc/fsl/qbman/qman.c 	void *ce;		/* cache-enabled */
ce                376 drivers/soc/fsl/qbman/qman.c 	dpaa_invalidate(p->addr.ce + offset);
ce                381 drivers/soc/fsl/qbman/qman.c 	dpaa_touch_ro(p->addr.ce + offset);
ce                430 drivers/soc/fsl/qbman/qman.c 	eqcr->ring = portal->addr.ce + QM_CL_EQCR;
ce                619 drivers/soc/fsl/qbman/qman.c 	dqrr->ring = portal->addr.ce + QM_CL_DQRR;
ce                773 drivers/soc/fsl/qbman/qman.c 	mr->ring = portal->addr.ce + QM_CL_MR;
ce                864 drivers/soc/fsl/qbman/qman.c 	mc->cr = portal->addr.ce + QM_CL_CR;
ce                865 drivers/soc/fsl/qbman/qman.c 	mc->rr = portal->addr.ce + QM_CL_RR0;
ce               1249 drivers/soc/fsl/qbman/qman.c 	p->addr.ce = c->addr_virt_ce;
ce                305 drivers/video/fbdev/core/svgalib.c 	u8 ce = 0x0e;
ce                340 drivers/video/fbdev/core/svgalib.c 	vga_wcrt(regbase, 0x0B, ce); /* set cursor end */
ce                103 fs/cifs/dfs_cache.c static inline bool cache_entry_expired(const struct dfs_cache_entry *ce)
ce                108 fs/cifs/dfs_cache.c 	return timespec64_compare(&ts, &ce->ce_etime) >= 0;
ce                111 fs/cifs/dfs_cache.c static inline void free_tgts(struct dfs_cache_entry *ce)
ce                115 fs/cifs/dfs_cache.c 	list_for_each_entry_safe(t, n, &ce->ce_tlist, t_list) {
ce                124 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce = container_of(rcu, struct dfs_cache_entry,
ce                126 fs/cifs/dfs_cache.c 	kmem_cache_free(dfs_cache_slab, ce);
ce                129 fs/cifs/dfs_cache.c static inline void flush_cache_ent(struct dfs_cache_entry *ce)
ce                131 fs/cifs/dfs_cache.c 	if (hlist_unhashed(&ce->ce_hlist))
ce                134 fs/cifs/dfs_cache.c 	hlist_del_init_rcu(&ce->ce_hlist);
ce                135 fs/cifs/dfs_cache.c 	kfree_const(ce->ce_path);
ce                136 fs/cifs/dfs_cache.c 	free_tgts(ce);
ce                138 fs/cifs/dfs_cache.c 	call_rcu(&ce->ce_rcu, free_cache_entry);
ce                148 fs/cifs/dfs_cache.c 		struct dfs_cache_entry *ce;
ce                150 fs/cifs/dfs_cache.c 		hlist_for_each_entry_rcu(ce, l, ce_hlist)
ce                151 fs/cifs/dfs_cache.c 			flush_cache_ent(ce);
ce                162 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce                170 fs/cifs/dfs_cache.c 	hash_for_each_rcu(dfs_cache_htable, bucket, ce, ce_hlist) {
ce                174 fs/cifs/dfs_cache.c 			   ce->ce_path,
ce                175 fs/cifs/dfs_cache.c 			   ce->ce_srvtype == DFS_TYPE_ROOT ? "root" : "link",
ce                176 fs/cifs/dfs_cache.c 			   ce->ce_ttl, ce->ce_etime.tv_nsec,
ce                177 fs/cifs/dfs_cache.c 			   IS_INTERLINK_SET(ce->ce_flags) ? "yes" : "no",
ce                178 fs/cifs/dfs_cache.c 			   ce->ce_path_consumed,
ce                179 fs/cifs/dfs_cache.c 			   cache_entry_expired(ce) ? "yes" : "no");
ce                181 fs/cifs/dfs_cache.c 		list_for_each_entry(t, &ce->ce_tlist, t_list) {
ce                184 fs/cifs/dfs_cache.c 				   ce->ce_tgthint == t ? " (target hint)" : "");
ce                229 fs/cifs/dfs_cache.c static inline void dump_tgts(const struct dfs_cache_entry *ce)
ce                234 fs/cifs/dfs_cache.c 	list_for_each_entry(t, &ce->ce_tlist, t_list) {
ce                236 fs/cifs/dfs_cache.c 			 ce->ce_tgthint == t ? " (target hint)" : "");
ce                240 fs/cifs/dfs_cache.c static inline void dump_ce(const struct dfs_cache_entry *ce)
ce                243 fs/cifs/dfs_cache.c 		 "interlink=%s,path_consumed=%d,expired=%s\n", ce->ce_path,
ce                244 fs/cifs/dfs_cache.c 		 ce->ce_srvtype == DFS_TYPE_ROOT ? "root" : "link", ce->ce_ttl,
ce                245 fs/cifs/dfs_cache.c 		 ce->ce_etime.tv_nsec,
ce                246 fs/cifs/dfs_cache.c 		 IS_INTERLINK_SET(ce->ce_flags) ? "yes" : "no",
ce                247 fs/cifs/dfs_cache.c 		 ce->ce_path_consumed,
ce                248 fs/cifs/dfs_cache.c 		 cache_entry_expired(ce) ? "yes" : "no");
ce                249 fs/cifs/dfs_cache.c 	dump_tgts(ce);
ce                328 fs/cifs/dfs_cache.c static inline char *get_tgt_name(const struct dfs_cache_entry *ce)
ce                330 fs/cifs/dfs_cache.c 	struct dfs_cache_tgt *t = ce->ce_tgthint;
ce                370 fs/cifs/dfs_cache.c 			 struct dfs_cache_entry *ce, const char *tgthint)
ce                374 fs/cifs/dfs_cache.c 	ce->ce_ttl = refs[0].ttl;
ce                375 fs/cifs/dfs_cache.c 	ce->ce_etime = get_expire_time(ce->ce_ttl);
ce                376 fs/cifs/dfs_cache.c 	ce->ce_srvtype = refs[0].server_type;
ce                377 fs/cifs/dfs_cache.c 	ce->ce_flags = refs[0].ref_flag;
ce                378 fs/cifs/dfs_cache.c 	ce->ce_path_consumed = refs[0].path_consumed;
ce                385 fs/cifs/dfs_cache.c 			free_tgts(ce);
ce                389 fs/cifs/dfs_cache.c 			list_add(&t->t_list, &ce->ce_tlist);
ce                392 fs/cifs/dfs_cache.c 			list_add_tail(&t->t_list, &ce->ce_tlist);
ce                394 fs/cifs/dfs_cache.c 		ce->ce_numtgts++;
ce                397 fs/cifs/dfs_cache.c 	ce->ce_tgthint = list_first_entry_or_null(&ce->ce_tlist,
ce                408 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce                411 fs/cifs/dfs_cache.c 	ce = kmem_cache_zalloc(dfs_cache_slab, GFP_KERNEL);
ce                412 fs/cifs/dfs_cache.c 	if (!ce)
ce                415 fs/cifs/dfs_cache.c 	ce->ce_path = kstrdup_const(path, GFP_KERNEL);
ce                416 fs/cifs/dfs_cache.c 	if (!ce->ce_path) {
ce                417 fs/cifs/dfs_cache.c 		kmem_cache_free(dfs_cache_slab, ce);
ce                420 fs/cifs/dfs_cache.c 	INIT_HLIST_NODE(&ce->ce_hlist);
ce                421 fs/cifs/dfs_cache.c 	INIT_LIST_HEAD(&ce->ce_tlist);
ce                423 fs/cifs/dfs_cache.c 	rc = copy_ref_data(refs, numrefs, ce, NULL);
ce                425 fs/cifs/dfs_cache.c 		kfree_const(ce->ce_path);
ce                426 fs/cifs/dfs_cache.c 		kmem_cache_free(dfs_cache_slab, ce);
ce                427 fs/cifs/dfs_cache.c 		ce = ERR_PTR(rc);
ce                429 fs/cifs/dfs_cache.c 	return ce;
ce                435 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce                439 fs/cifs/dfs_cache.c 	hash_for_each_rcu(dfs_cache_htable, bucket, ce, ce_hlist) {
ce                440 fs/cifs/dfs_cache.c 		if (!to_del || timespec64_compare(&ce->ce_etime,
ce                442 fs/cifs/dfs_cache.c 			to_del = ce;
ce                460 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce                462 fs/cifs/dfs_cache.c 	ce = alloc_cache_entry(path, refs, numrefs);
ce                463 fs/cifs/dfs_cache.c 	if (IS_ERR(ce))
ce                464 fs/cifs/dfs_cache.c 		return ce;
ce                466 fs/cifs/dfs_cache.c 	hlist_add_head_rcu(&ce->ce_hlist, &dfs_cache_htable[hash]);
ce                470 fs/cifs/dfs_cache.c 		dfs_cache.dc_ttl = ce->ce_ttl;
ce                474 fs/cifs/dfs_cache.c 		dfs_cache.dc_ttl = min_t(int, dfs_cache.dc_ttl, ce->ce_ttl);
ce                480 fs/cifs/dfs_cache.c 	return ce;
ce                486 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce                490 fs/cifs/dfs_cache.c 	hlist_for_each_entry_rcu(ce, &dfs_cache_htable[hash], ce_hlist) {
ce                491 fs/cifs/dfs_cache.c 		if (!strcasecmp(path, ce->ce_path)) {
ce                493 fs/cifs/dfs_cache.c 			char *name = get_tgt_name(ce);
ce                507 fs/cifs/dfs_cache.c 	return found ? ce : ERR_PTR(-ENOENT);
ce                569 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce                572 fs/cifs/dfs_cache.c 	ce = find_cache_entry(path, &h);
ce                573 fs/cifs/dfs_cache.c 	if (IS_ERR(ce))
ce                574 fs/cifs/dfs_cache.c 		return ce;
ce                576 fs/cifs/dfs_cache.c 	if (ce->ce_tgthint) {
ce                577 fs/cifs/dfs_cache.c 		s = ce->ce_tgthint->t_name;
ce                583 fs/cifs/dfs_cache.c 	free_tgts(ce);
ce                584 fs/cifs/dfs_cache.c 	ce->ce_numtgts = 0;
ce                586 fs/cifs/dfs_cache.c 	rc = copy_ref_data(refs, numrefs, ce, th);
ce                590 fs/cifs/dfs_cache.c 		ce = ERR_PTR(rc);
ce                592 fs/cifs/dfs_cache.c 	return ce;
ce                599 fs/cifs/dfs_cache.c 		   const char *path, struct dfs_cache_entry *ce)
ce                620 fs/cifs/dfs_cache.c 		ce = ERR_PTR(rc);
ce                622 fs/cifs/dfs_cache.c 		ce = __update_cache_entry(path, refs, numrefs);
ce                627 fs/cifs/dfs_cache.c 	return ce;
ce                646 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce                652 fs/cifs/dfs_cache.c 	ce = find_cache_entry(path, &h);
ce                653 fs/cifs/dfs_cache.c 	if (IS_ERR(ce)) {
ce                660 fs/cifs/dfs_cache.c 			return ce;
ce                667 fs/cifs/dfs_cache.c 			ce = ERR_PTR(-EOPNOTSUPP);
ce                668 fs/cifs/dfs_cache.c 			return ce;
ce                671 fs/cifs/dfs_cache.c 			ce = ERR_PTR(-EINVAL);
ce                672 fs/cifs/dfs_cache.c 			return ce;
ce                685 fs/cifs/dfs_cache.c 			ce = ERR_PTR(rc);
ce                686 fs/cifs/dfs_cache.c 			return ce;
ce                698 fs/cifs/dfs_cache.c 		ce = add_cache_entry(h, path, nrefs, numnrefs);
ce                701 fs/cifs/dfs_cache.c 		if (IS_ERR(ce))
ce                702 fs/cifs/dfs_cache.c 			return ce;
ce                707 fs/cifs/dfs_cache.c 	dump_ce(ce);
ce                711 fs/cifs/dfs_cache.c 		return ce;
ce                713 fs/cifs/dfs_cache.c 	if (cache_entry_expired(ce)) {
ce                715 fs/cifs/dfs_cache.c 		ce = update_cache_entry(xid, ses, nls_codepage, remap, path,
ce                716 fs/cifs/dfs_cache.c 					ce);
ce                717 fs/cifs/dfs_cache.c 		if (IS_ERR(ce)) {
ce                722 fs/cifs/dfs_cache.c 	return ce;
ce                726 fs/cifs/dfs_cache.c static int setup_ref(const char *path, const struct dfs_cache_entry *ce,
ce                739 fs/cifs/dfs_cache.c 	ref->path_consumed = ce->ce_path_consumed;
ce                747 fs/cifs/dfs_cache.c 	ref->ttl = ce->ce_ttl;
ce                748 fs/cifs/dfs_cache.c 	ref->server_type = ce->ce_srvtype;
ce                749 fs/cifs/dfs_cache.c 	ref->ref_flag = ce->ce_flags;
ce                760 fs/cifs/dfs_cache.c static int get_tgt_list(const struct dfs_cache_entry *ce,
ce                771 fs/cifs/dfs_cache.c 	list_for_each_entry(t, &ce->ce_tlist, t_list) {
ce                786 fs/cifs/dfs_cache.c 		if (ce->ce_tgthint == t)
ce                791 fs/cifs/dfs_cache.c 	tl->tl_numtgts = ce->ce_numtgts;
ce                832 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce                842 fs/cifs/dfs_cache.c 	ce = do_dfs_cache_find(xid, ses, nls_codepage, remap, npath, false);
ce                843 fs/cifs/dfs_cache.c 	if (!IS_ERR(ce)) {
ce                845 fs/cifs/dfs_cache.c 			rc = setup_ref(path, ce, ref, get_tgt_name(ce));
ce                849 fs/cifs/dfs_cache.c 			rc = get_tgt_list(ce, tgt_list);
ce                851 fs/cifs/dfs_cache.c 		rc = PTR_ERR(ce);
ce                879 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce                889 fs/cifs/dfs_cache.c 	ce = do_dfs_cache_find(0, NULL, NULL, 0, npath, true);
ce                890 fs/cifs/dfs_cache.c 	if (IS_ERR(ce)) {
ce                891 fs/cifs/dfs_cache.c 		rc = PTR_ERR(ce);
ce                896 fs/cifs/dfs_cache.c 		rc = setup_ref(path, ce, ref, get_tgt_name(ce));
ce                900 fs/cifs/dfs_cache.c 		rc = get_tgt_list(ce, tgt_list);
ce                932 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce                945 fs/cifs/dfs_cache.c 	ce = do_dfs_cache_find(xid, ses, nls_codepage, remap, npath, false);
ce                946 fs/cifs/dfs_cache.c 	if (IS_ERR(ce)) {
ce                947 fs/cifs/dfs_cache.c 		rc = PTR_ERR(ce);
ce                953 fs/cifs/dfs_cache.c 	t = ce->ce_tgthint;
ce                958 fs/cifs/dfs_cache.c 	list_for_each_entry(t, &ce->ce_tlist, t_list) {
ce                960 fs/cifs/dfs_cache.c 			ce->ce_tgthint = t;
ce                992 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce               1006 fs/cifs/dfs_cache.c 	ce = do_dfs_cache_find(0, NULL, NULL, 0, npath, true);
ce               1007 fs/cifs/dfs_cache.c 	if (IS_ERR(ce)) {
ce               1008 fs/cifs/dfs_cache.c 		rc = PTR_ERR(ce);
ce               1014 fs/cifs/dfs_cache.c 	t = ce->ce_tgthint;
ce               1019 fs/cifs/dfs_cache.c 	list_for_each_entry(t, &ce->ce_tlist, t_list) {
ce               1021 fs/cifs/dfs_cache.c 			ce->ce_tgthint = t;
ce               1050 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce               1066 fs/cifs/dfs_cache.c 	ce = find_cache_entry(npath, &h);
ce               1067 fs/cifs/dfs_cache.c 	if (IS_ERR(ce)) {
ce               1068 fs/cifs/dfs_cache.c 		rc = PTR_ERR(ce);
ce               1074 fs/cifs/dfs_cache.c 	rc = setup_ref(path, ce, ref, it->it_name);
ce               1383 fs/cifs/dfs_cache.c 	struct dfs_cache_entry *ce;
ce               1397 fs/cifs/dfs_cache.c 	ce = find_cache_entry(npath, &h);
ce               1400 fs/cifs/dfs_cache.c 	if (IS_ERR(ce)) {
ce               1401 fs/cifs/dfs_cache.c 		rc = PTR_ERR(ce);
ce               1405 fs/cifs/dfs_cache.c 	if (!cache_entry_expired(ce))
ce               1428 fs/cifs/dfs_cache.c 			ce = __update_cache_entry(npath, refs, numrefs);
ce               1432 fs/cifs/dfs_cache.c 			if (IS_ERR(ce))
ce               1433 fs/cifs/dfs_cache.c 				rc = PTR_ERR(ce);
ce                928 fs/ext2/xattr.c 	struct mb_cache_entry *ce;
ce                935 fs/ext2/xattr.c 	ce = mb_cache_entry_find_first(ea_block_cache, hash);
ce                936 fs/ext2/xattr.c 	while (ce) {
ce                939 fs/ext2/xattr.c 		bh = sb_bread(inode->i_sb, ce->e_value);
ce                943 fs/ext2/xattr.c 				inode->i_ino, (unsigned long) ce->e_value);
ce                955 fs/ext2/xattr.c 			if (hlist_bl_unhashed(&ce->e_hash_list)) {
ce                956 fs/ext2/xattr.c 				mb_cache_entry_put(ea_block_cache, ce);
ce                963 fs/ext2/xattr.c 					  (unsigned long) ce->e_value,
ce                969 fs/ext2/xattr.c 				mb_cache_entry_touch(ea_block_cache, ce);
ce                970 fs/ext2/xattr.c 				mb_cache_entry_put(ea_block_cache, ce);
ce                976 fs/ext2/xattr.c 		ce = mb_cache_entry_find_next(ea_block_cache, ce);
ce               1281 fs/ext4/xattr.c 			struct mb_cache_entry *ce;
ce               1284 fs/ext4/xattr.c 				ce = mb_cache_entry_get(ea_block_cache, hash,
ce               1286 fs/ext4/xattr.c 				if (ce) {
ce               1287 fs/ext4/xattr.c 					ce->e_reusable = 1;
ce               1288 fs/ext4/xattr.c 					mb_cache_entry_put(ea_block_cache, ce);
ce               1468 fs/ext4/xattr.c 	struct mb_cache_entry *ce;
ce               1475 fs/ext4/xattr.c 	ce = mb_cache_entry_find_first(ea_inode_cache, hash);
ce               1476 fs/ext4/xattr.c 	if (!ce)
ce               1481 fs/ext4/xattr.c 		mb_cache_entry_put(ea_inode_cache, ce);
ce               1485 fs/ext4/xattr.c 	while (ce) {
ce               1486 fs/ext4/xattr.c 		ea_inode = ext4_iget(inode->i_sb, ce->e_value,
ce               1496 fs/ext4/xattr.c 			mb_cache_entry_touch(ea_inode_cache, ce);
ce               1497 fs/ext4/xattr.c 			mb_cache_entry_put(ea_inode_cache, ce);
ce               1504 fs/ext4/xattr.c 		ce = mb_cache_entry_find_next(ea_inode_cache, ce);
ce               1854 fs/ext4/xattr.c 	struct mb_cache_entry *ce = NULL;
ce               1982 fs/ext4/xattr.c 						     &ce);
ce               2016 fs/ext4/xattr.c 				if (hlist_bl_unhashed(&ce->e_hash_list) ||
ce               2017 fs/ext4/xattr.c 				    !ce->e_reusable) {
ce               2027 fs/ext4/xattr.c 					mb_cache_entry_put(ea_block_cache, ce);
ce               2028 fs/ext4/xattr.c 					ce = NULL;
ce               2035 fs/ext4/xattr.c 					ce->e_reusable = 0;
ce               2046 fs/ext4/xattr.c 			mb_cache_entry_touch(ea_block_cache, ce);
ce               2047 fs/ext4/xattr.c 			mb_cache_entry_put(ea_block_cache, ce);
ce               2048 fs/ext4/xattr.c 			ce = NULL;
ce               2154 fs/ext4/xattr.c 	if (ce)
ce               2155 fs/ext4/xattr.c 		mb_cache_entry_put(ea_block_cache, ce);
ce               3048 fs/ext4/xattr.c 	struct mb_cache_entry *ce;
ce               3056 fs/ext4/xattr.c 	ce = mb_cache_entry_find_first(ea_block_cache, hash);
ce               3057 fs/ext4/xattr.c 	while (ce) {
ce               3060 fs/ext4/xattr.c 		bh = ext4_sb_bread(inode->i_sb, ce->e_value, REQ_PRIO);
ce               3066 fs/ext4/xattr.c 					 (unsigned long)ce->e_value);
ce               3068 fs/ext4/xattr.c 			*pce = ce;
ce               3072 fs/ext4/xattr.c 		ce = mb_cache_entry_find_next(ea_block_cache, ce);
ce                190 include/linux/clockchips.h extern int clockevents_update_freq(struct clock_event_device *ce, u32 freq);
ce                193 include/linux/clockchips.h clockevents_calc_mult_shift(struct clock_event_device *ce, u32 freq, u32 maxsec)
ce                195 include/linux/clockchips.h 	return clocks_calc_mult_shift(&ce->mult, &ce->shift, NSEC_PER_SEC, freq, maxsec);
ce               2898 kernel/printk/printk.c 	initcall_entry_t *ce;
ce               2907 kernel/printk/printk.c 	ce = __con_initcall_start;
ce               2909 kernel/printk/printk.c 	while (ce < __con_initcall_end) {
ce               2910 kernel/printk/printk.c 		call = initcall_from_entry(ce);
ce               2914 kernel/printk/printk.c 		ce++;
ce                 28 kernel/time/clockevents.c 	struct clock_event_device *ce;
ce                407 kernel/time/clockevents.c 	res = __clockevents_try_unbind(cu->ce, smp_processor_id());
ce                409 kernel/time/clockevents.c 		res = clockevents_replace(cu->ce);
ce                420 kernel/time/clockevents.c 	struct ce_unbind cu = { .ce = ced, .res = -ENODEV };
ce                694 kernel/time/clockevents.c 	struct clock_event_device *ce;
ce                702 kernel/time/clockevents.c 	list_for_each_entry(ce, &clockevent_devices, list) {
ce                703 kernel/time/clockevents.c 		if (!strcmp(ce->name, name)) {
ce                704 kernel/time/clockevents.c 			ret = __clockevents_try_unbind(ce, dev->id);
ce                713 kernel/time/clockevents.c 		ret = clockevents_unbind(ce, dev->id);
ce               1664 net/bridge/netfilter/ebtables.c 	struct ebt_entry __user *ce;
ce               1680 net/bridge/netfilter/ebtables.c 	if (*size < sizeof(*ce))
ce               1683 net/bridge/netfilter/ebtables.c 	ce = *dstptr;
ce               1684 net/bridge/netfilter/ebtables.c 	if (copy_to_user(ce, e, sizeof(*ce)))
ce               1688 net/bridge/netfilter/ebtables.c 	*dstptr += sizeof(*ce);
ce               1707 net/bridge/netfilter/ebtables.c 	if (put_user(watchers_offset, &ce->watchers_offset) ||
ce               1708 net/bridge/netfilter/ebtables.c 	    put_user(target_offset, &ce->target_offset) ||
ce               1709 net/bridge/netfilter/ebtables.c 	    put_user(next_offset, &ce->next_offset))
ce               1712 net/bridge/netfilter/ebtables.c 	*size -= sizeof(*ce);
ce               1327 net/ipv4/netfilter/arp_tables.c 	struct compat_arpt_entry __user *ce;
ce               1333 net/ipv4/netfilter/arp_tables.c 	ce = *dstptr;
ce               1334 net/ipv4/netfilter/arp_tables.c 	if (copy_to_user(ce, e, sizeof(struct arpt_entry)) != 0 ||
ce               1335 net/ipv4/netfilter/arp_tables.c 	    copy_to_user(&ce->counters, &counters[i],
ce               1349 net/ipv4/netfilter/arp_tables.c 	if (put_user(target_offset, &ce->target_offset) != 0 ||
ce               1350 net/ipv4/netfilter/arp_tables.c 	    put_user(next_offset, &ce->next_offset) != 0)
ce               1223 net/ipv4/netfilter/ip_tables.c 	struct compat_ipt_entry __user *ce;
ce               1230 net/ipv4/netfilter/ip_tables.c 	ce = *dstptr;
ce               1231 net/ipv4/netfilter/ip_tables.c 	if (copy_to_user(ce, e, sizeof(struct ipt_entry)) != 0 ||
ce               1232 net/ipv4/netfilter/ip_tables.c 	    copy_to_user(&ce->counters, &counters[i],
ce               1250 net/ipv4/netfilter/ip_tables.c 	if (put_user(target_offset, &ce->target_offset) != 0 ||
ce               1251 net/ipv4/netfilter/ip_tables.c 	    put_user(next_offset, &ce->next_offset) != 0)
ce               1239 net/ipv6/netfilter/ip6_tables.c 	struct compat_ip6t_entry __user *ce;
ce               1246 net/ipv6/netfilter/ip6_tables.c 	ce = *dstptr;
ce               1247 net/ipv6/netfilter/ip6_tables.c 	if (copy_to_user(ce, e, sizeof(struct ip6t_entry)) != 0 ||
ce               1248 net/ipv6/netfilter/ip6_tables.c 	    copy_to_user(&ce->counters, &counters[i],
ce               1266 net/ipv6/netfilter/ip6_tables.c 	if (put_user(target_offset, &ce->target_offset) != 0 ||
ce               1267 net/ipv6/netfilter/ip6_tables.c 	    put_user(next_offset, &ce->next_offset) != 0)
ce               1394 scripts/asn1_compiler.c 	struct element *e, *ce;
ce               1406 scripts/asn1_compiler.c 		for (ce = e->children; ce; ce = ce->next)
ce               1407 scripts/asn1_compiler.c 			render_element(out, ce, NULL);
ce                 27 scripts/kconfig/expr.c struct expr *expr_alloc_one(enum expr_type type, struct expr *ce)
ce                 31 scripts/kconfig/expr.c 	e->left.expr = ce;
ce                296 scripts/kconfig/expr.h struct expr *expr_alloc_one(enum expr_type type, struct expr *ce);
ce                 51 tools/perf/util/s390-sample-raw.c 	struct cf_ctrset_entry *cep, ce;
ce                 57 tools/perf/util/s390-sample-raw.c 		ce.def = be16_to_cpu(cep->def);
ce                 58 tools/perf/util/s390-sample-raw.c 		ce.set = be16_to_cpu(cep->set);
ce                 59 tools/perf/util/s390-sample-raw.c 		ce.ctr = be16_to_cpu(cep->ctr);
ce                 60 tools/perf/util/s390-sample-raw.c 		ce.res1 = be16_to_cpu(cep->res1);
ce                 62 tools/perf/util/s390-sample-raw.c 		if (!ctrset_valid(&ce) || offset + ctrset_size(&ce) > len) {
ce                 77 tools/perf/util/s390-sample-raw.c 		offset += ctrset_size(&ce);
ce                161 tools/perf/util/s390-sample-raw.c 	struct cf_ctrset_entry *cep, ce;
ce                171 tools/perf/util/s390-sample-raw.c 		ce.def = be16_to_cpu(cep->def);
ce                172 tools/perf/util/s390-sample-raw.c 		ce.set = be16_to_cpu(cep->set);
ce                173 tools/perf/util/s390-sample-raw.c 		ce.ctr = be16_to_cpu(cep->ctr);
ce                174 tools/perf/util/s390-sample-raw.c 		ce.res1 = be16_to_cpu(cep->res1);
ce                176 tools/perf/util/s390-sample-raw.c 		if (!ctrset_valid(&ce)) {	/* Print trailer */
ce                183 tools/perf/util/s390-sample-raw.c 			      " Counters:%d\n", offset, ce.set, ce.ctr);
ce                184 tools/perf/util/s390-sample-raw.c 		for (i = 0, p = (u64 *)(cep + 1); i < ce.ctr; ++i, ++p) {
ce                185 tools/perf/util/s390-sample-raw.c 			const char *ev_name = get_counter_name(ce.set, i, map);
ce                191 tools/perf/util/s390-sample-raw.c 		offset += ctrset_size(&ce);