Lines Matching refs:trig

48 	struct iio_trigger *trig = to_iio_trigger(dev);  in iio_trigger_read_name()  local
49 return sprintf(buf, "%s\n", trig->name); in iio_trigger_read_name()
104 struct iio_trigger *trig = NULL, *iter; in iio_trigger_find_by_name() local
109 trig = iter; in iio_trigger_find_by_name()
114 return trig; in iio_trigger_find_by_name()
117 void iio_trigger_poll(struct iio_trigger *trig) in iio_trigger_poll() argument
121 if (!atomic_read(&trig->use_count)) { in iio_trigger_poll()
122 atomic_set(&trig->use_count, CONFIG_IIO_CONSUMERS_PER_TRIGGER); in iio_trigger_poll()
125 if (trig->subirqs[i].enabled) in iio_trigger_poll()
126 generic_handle_irq(trig->subirq_base + i); in iio_trigger_poll()
128 iio_trigger_notify_done(trig); in iio_trigger_poll()
141 void iio_trigger_poll_chained(struct iio_trigger *trig) in iio_trigger_poll_chained() argument
145 if (!atomic_read(&trig->use_count)) { in iio_trigger_poll_chained()
146 atomic_set(&trig->use_count, CONFIG_IIO_CONSUMERS_PER_TRIGGER); in iio_trigger_poll_chained()
149 if (trig->subirqs[i].enabled) in iio_trigger_poll_chained()
150 handle_nested_irq(trig->subirq_base + i); in iio_trigger_poll_chained()
152 iio_trigger_notify_done(trig); in iio_trigger_poll_chained()
158 void iio_trigger_notify_done(struct iio_trigger *trig) in iio_trigger_notify_done() argument
160 if (atomic_dec_and_test(&trig->use_count) && trig->ops && in iio_trigger_notify_done()
161 trig->ops->try_reenable) in iio_trigger_notify_done()
162 if (trig->ops->try_reenable(trig)) in iio_trigger_notify_done()
164 iio_trigger_poll(trig); in iio_trigger_notify_done()
169 static int iio_trigger_get_irq(struct iio_trigger *trig) in iio_trigger_get_irq() argument
172 mutex_lock(&trig->pool_lock); in iio_trigger_get_irq()
173 ret = bitmap_find_free_region(trig->pool, in iio_trigger_get_irq()
176 mutex_unlock(&trig->pool_lock); in iio_trigger_get_irq()
178 ret += trig->subirq_base; in iio_trigger_get_irq()
183 static void iio_trigger_put_irq(struct iio_trigger *trig, int irq) in iio_trigger_put_irq() argument
185 mutex_lock(&trig->pool_lock); in iio_trigger_put_irq()
186 clear_bit(irq - trig->subirq_base, trig->pool); in iio_trigger_put_irq()
187 mutex_unlock(&trig->pool_lock); in iio_trigger_put_irq()
197 static int iio_trigger_attach_poll_func(struct iio_trigger *trig, in iio_trigger_attach_poll_func() argument
202 = bitmap_empty(trig->pool, CONFIG_IIO_CONSUMERS_PER_TRIGGER); in iio_trigger_attach_poll_func()
206 pf->irq = iio_trigger_get_irq(trig); in iio_trigger_attach_poll_func()
215 if (trig->ops && trig->ops->set_trigger_state && notinuse) { in iio_trigger_attach_poll_func()
216 ret = trig->ops->set_trigger_state(trig, true); in iio_trigger_attach_poll_func()
224 static int iio_trigger_detach_poll_func(struct iio_trigger *trig, in iio_trigger_detach_poll_func() argument
229 = (bitmap_weight(trig->pool, in iio_trigger_detach_poll_func()
232 if (trig->ops && trig->ops->set_trigger_state && no_other_users) { in iio_trigger_detach_poll_func()
233 ret = trig->ops->set_trigger_state(trig, false); in iio_trigger_detach_poll_func()
237 iio_trigger_put_irq(trig, pf->irq); in iio_trigger_detach_poll_func()
301 if (indio_dev->trig) in iio_trigger_read_current()
302 return sprintf(buf, "%s\n", indio_dev->trig->name); in iio_trigger_read_current()
319 struct iio_trigger *oldtrig = indio_dev->trig; in iio_trigger_write_current()
320 struct iio_trigger *trig; in iio_trigger_write_current() local
330 trig = iio_trigger_find_by_name(buf, len); in iio_trigger_write_current()
331 if (oldtrig == trig) in iio_trigger_write_current()
334 if (trig && indio_dev->info->validate_trigger) { in iio_trigger_write_current()
335 ret = indio_dev->info->validate_trigger(indio_dev, trig); in iio_trigger_write_current()
340 if (trig && trig->ops && trig->ops->validate_device) { in iio_trigger_write_current()
341 ret = trig->ops->validate_device(trig, indio_dev); in iio_trigger_write_current()
346 indio_dev->trig = trig; in iio_trigger_write_current()
350 if (indio_dev->trig) in iio_trigger_write_current()
351 iio_trigger_get(indio_dev->trig); in iio_trigger_write_current()
372 struct iio_trigger *trig = to_iio_trigger(device); in iio_trig_release() local
375 if (trig->subirq_base) { in iio_trig_release()
377 irq_modify_status(trig->subirq_base + i, in iio_trig_release()
380 irq_set_chip(trig->subirq_base + i, in iio_trig_release()
382 irq_set_handler(trig->subirq_base + i, in iio_trig_release()
386 irq_free_descs(trig->subirq_base, in iio_trig_release()
389 kfree(trig->name); in iio_trig_release()
390 kfree(trig); in iio_trig_release()
401 struct iio_trigger *trig in iio_trig_subirqmask() local
404 trig->subirqs[d->irq - trig->subirq_base].enabled = false; in iio_trig_subirqmask()
410 struct iio_trigger *trig in iio_trig_subirqunmask() local
413 trig->subirqs[d->irq - trig->subirq_base].enabled = true; in iio_trig_subirqunmask()
418 struct iio_trigger *trig; in viio_trigger_alloc() local
419 trig = kzalloc(sizeof *trig, GFP_KERNEL); in viio_trigger_alloc()
420 if (trig) { in viio_trigger_alloc()
422 trig->dev.type = &iio_trig_type; in viio_trigger_alloc()
423 trig->dev.bus = &iio_bus_type; in viio_trigger_alloc()
424 device_initialize(&trig->dev); in viio_trigger_alloc()
426 mutex_init(&trig->pool_lock); in viio_trigger_alloc()
427 trig->subirq_base in viio_trigger_alloc()
431 if (trig->subirq_base < 0) { in viio_trigger_alloc()
432 kfree(trig); in viio_trigger_alloc()
436 trig->name = kvasprintf(GFP_KERNEL, fmt, vargs); in viio_trigger_alloc()
437 if (trig->name == NULL) { in viio_trigger_alloc()
438 irq_free_descs(trig->subirq_base, in viio_trigger_alloc()
440 kfree(trig); in viio_trigger_alloc()
443 trig->subirq_chip.name = trig->name; in viio_trigger_alloc()
444 trig->subirq_chip.irq_mask = &iio_trig_subirqmask; in viio_trigger_alloc()
445 trig->subirq_chip.irq_unmask = &iio_trig_subirqunmask; in viio_trigger_alloc()
447 irq_set_chip(trig->subirq_base + i, in viio_trigger_alloc()
448 &trig->subirq_chip); in viio_trigger_alloc()
449 irq_set_handler(trig->subirq_base + i, in viio_trigger_alloc()
451 irq_modify_status(trig->subirq_base + i, in viio_trigger_alloc()
455 get_device(&trig->dev); in viio_trigger_alloc()
458 return trig; in viio_trigger_alloc()
463 struct iio_trigger *trig; in iio_trigger_alloc() local
467 trig = viio_trigger_alloc(fmt, vargs); in iio_trigger_alloc()
470 return trig; in iio_trigger_alloc()
474 void iio_trigger_free(struct iio_trigger *trig) in iio_trigger_free() argument
476 if (trig) in iio_trigger_free()
477 put_device(&trig->dev); in iio_trigger_free()
518 struct iio_trigger **ptr, *trig; in devm_iio_trigger_alloc() local
528 trig = viio_trigger_alloc(fmt, vargs); in devm_iio_trigger_alloc()
530 if (trig) { in devm_iio_trigger_alloc()
531 *ptr = trig; in devm_iio_trigger_alloc()
537 return trig; in devm_iio_trigger_alloc()
567 if (indio_dev->trig) in iio_device_unregister_trigger_consumer()
568 iio_trigger_put(indio_dev->trig); in iio_device_unregister_trigger_consumer()
573 return iio_trigger_attach_poll_func(indio_dev->trig, in iio_triggered_buffer_postenable()
580 return iio_trigger_detach_poll_func(indio_dev->trig, in iio_triggered_buffer_predisable()