Lines Matching refs:indio_dev
45 static int iio_buffer_flush_hwfifo(struct iio_dev *indio_dev, in iio_buffer_flush_hwfifo() argument
48 if (!indio_dev->info->hwfifo_flush_to_buffer) in iio_buffer_flush_hwfifo()
51 return indio_dev->info->hwfifo_flush_to_buffer(indio_dev, required); in iio_buffer_flush_hwfifo()
54 static bool iio_buffer_ready(struct iio_dev *indio_dev, struct iio_buffer *buf, in iio_buffer_ready() argument
61 if (!indio_dev->info) in iio_buffer_ready()
75 iio_buffer_flush_hwfifo(indio_dev, buf, to_flush); in iio_buffer_ready()
80 flushed = iio_buffer_flush_hwfifo(indio_dev, buf, in iio_buffer_ready()
100 struct iio_dev *indio_dev = filp->private_data; in iio_buffer_read_first_n_outer() local
101 struct iio_buffer *rb = indio_dev->buffer; in iio_buffer_read_first_n_outer()
107 if (!indio_dev->info) in iio_buffer_read_first_n_outer()
129 iio_buffer_ready(indio_dev, rb, to_wait, to_read)); in iio_buffer_read_first_n_outer()
133 if (!indio_dev->info) in iio_buffer_read_first_n_outer()
150 struct iio_dev *indio_dev = filp->private_data; in iio_buffer_poll() local
151 struct iio_buffer *rb = indio_dev->buffer; in iio_buffer_poll()
153 if (!indio_dev->info) in iio_buffer_poll()
157 if (iio_buffer_ready(indio_dev, rb, rb->watermark, 0)) in iio_buffer_poll()
169 void iio_buffer_wakeup_poll(struct iio_dev *indio_dev) in iio_buffer_wakeup_poll() argument
171 if (!indio_dev->buffer) in iio_buffer_wakeup_poll()
174 wake_up(&indio_dev->buffer->pollq); in iio_buffer_wakeup_poll()
230 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_scan_el_show() local
234 indio_dev->buffer->scan_mask); in iio_scan_el_show()
254 static bool iio_validate_scan_mask(struct iio_dev *indio_dev, in iio_validate_scan_mask() argument
257 if (!indio_dev->setup_ops->validate_scan_mask) in iio_validate_scan_mask()
260 return indio_dev->setup_ops->validate_scan_mask(indio_dev, mask); in iio_validate_scan_mask()
273 static int iio_scan_mask_set(struct iio_dev *indio_dev, in iio_scan_mask_set() argument
280 BITS_TO_LONGS(indio_dev->masklength), in iio_scan_mask_set()
285 if (!indio_dev->masklength) { in iio_scan_mask_set()
289 bitmap_copy(trialmask, buffer->scan_mask, indio_dev->masklength); in iio_scan_mask_set()
292 if (!iio_validate_scan_mask(indio_dev, trialmask)) in iio_scan_mask_set()
295 if (indio_dev->available_scan_masks) { in iio_scan_mask_set()
296 mask = iio_scan_mask_match(indio_dev->available_scan_masks, in iio_scan_mask_set()
297 indio_dev->masklength, in iio_scan_mask_set()
302 bitmap_copy(buffer->scan_mask, trialmask, indio_dev->masklength); in iio_scan_mask_set()
326 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_scan_el_store() local
327 struct iio_buffer *buffer = indio_dev->buffer; in iio_scan_el_store()
333 mutex_lock(&indio_dev->mlock); in iio_scan_el_store()
334 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_scan_el_store()
338 ret = iio_scan_mask_query(indio_dev, buffer, this_attr->address); in iio_scan_el_store()
346 ret = iio_scan_mask_set(indio_dev, buffer, this_attr->address); in iio_scan_el_store()
352 mutex_unlock(&indio_dev->mlock); in iio_scan_el_store()
362 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_scan_el_ts_show() local
363 return sprintf(buf, "%d\n", indio_dev->buffer->scan_timestamp); in iio_scan_el_ts_show()
372 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_scan_el_ts_store() local
379 mutex_lock(&indio_dev->mlock); in iio_scan_el_ts_store()
380 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_scan_el_ts_store()
384 indio_dev->buffer->scan_timestamp = state; in iio_scan_el_ts_store()
386 mutex_unlock(&indio_dev->mlock); in iio_scan_el_ts_store()
391 static int iio_buffer_add_channel_sysfs(struct iio_dev *indio_dev, in iio_buffer_add_channel_sysfs() argument
395 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_add_channel_sysfs()
403 &indio_dev->dev, in iio_buffer_add_channel_sysfs()
414 &indio_dev->dev, in iio_buffer_add_channel_sysfs()
426 &indio_dev->dev, in iio_buffer_add_channel_sysfs()
435 &indio_dev->dev, in iio_buffer_add_channel_sysfs()
448 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_buffer_read_length() local
449 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_read_length()
458 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_buffer_write_length() local
459 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_write_length()
470 mutex_lock(&indio_dev->mlock); in iio_buffer_write_length()
471 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_buffer_write_length()
482 mutex_unlock(&indio_dev->mlock); in iio_buffer_write_length()
491 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_buffer_show_enable() local
492 return sprintf(buf, "%d\n", iio_buffer_is_active(indio_dev->buffer)); in iio_buffer_show_enable()
495 static int iio_compute_scan_bytes(struct iio_dev *indio_dev, in iio_compute_scan_bytes() argument
504 indio_dev->masklength) { in iio_compute_scan_bytes()
505 ch = iio_find_channel_from_si(indio_dev, i); in iio_compute_scan_bytes()
515 ch = iio_find_channel_from_si(indio_dev, in iio_compute_scan_bytes()
516 indio_dev->scan_index_timestamp); in iio_compute_scan_bytes()
528 static void iio_buffer_activate(struct iio_dev *indio_dev, in iio_buffer_activate() argument
532 list_add(&buffer->buffer_list, &indio_dev->buffer_list); in iio_buffer_activate()
542 void iio_disable_all_buffers(struct iio_dev *indio_dev) in iio_disable_all_buffers() argument
546 if (list_empty(&indio_dev->buffer_list)) in iio_disable_all_buffers()
549 if (indio_dev->setup_ops->predisable) in iio_disable_all_buffers()
550 indio_dev->setup_ops->predisable(indio_dev); in iio_disable_all_buffers()
553 &indio_dev->buffer_list, buffer_list) in iio_disable_all_buffers()
556 indio_dev->currentmode = INDIO_DIRECT_MODE; in iio_disable_all_buffers()
557 if (indio_dev->setup_ops->postdisable) in iio_disable_all_buffers()
558 indio_dev->setup_ops->postdisable(indio_dev); in iio_disable_all_buffers()
560 if (indio_dev->available_scan_masks == NULL) in iio_disable_all_buffers()
561 kfree(indio_dev->active_scan_mask); in iio_disable_all_buffers()
564 static void iio_buffer_update_bytes_per_datum(struct iio_dev *indio_dev, in iio_buffer_update_bytes_per_datum() argument
572 bytes = iio_compute_scan_bytes(indio_dev, buffer->scan_mask, in iio_buffer_update_bytes_per_datum()
578 static int __iio_update_buffers(struct iio_dev *indio_dev, in __iio_update_buffers() argument
589 if (!list_empty(&indio_dev->buffer_list)) { in __iio_update_buffers()
590 if (indio_dev->setup_ops->predisable) { in __iio_update_buffers()
591 ret = indio_dev->setup_ops->predisable(indio_dev); in __iio_update_buffers()
595 indio_dev->currentmode = INDIO_DIRECT_MODE; in __iio_update_buffers()
596 if (indio_dev->setup_ops->postdisable) { in __iio_update_buffers()
597 ret = indio_dev->setup_ops->postdisable(indio_dev); in __iio_update_buffers()
603 old_mask = indio_dev->active_scan_mask; in __iio_update_buffers()
604 if (!indio_dev->available_scan_masks) in __iio_update_buffers()
605 indio_dev->active_scan_mask = NULL; in __iio_update_buffers()
610 iio_buffer_activate(indio_dev, insert_buffer); in __iio_update_buffers()
613 if (list_empty(&indio_dev->buffer_list)) { in __iio_update_buffers()
614 indio_dev->currentmode = INDIO_DIRECT_MODE; in __iio_update_buffers()
615 if (indio_dev->available_scan_masks == NULL) in __iio_update_buffers()
621 compound_mask = kcalloc(BITS_TO_LONGS(indio_dev->masklength), in __iio_update_buffers()
624 if (indio_dev->available_scan_masks == NULL) in __iio_update_buffers()
628 indio_dev->scan_timestamp = 0; in __iio_update_buffers()
630 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in __iio_update_buffers()
632 indio_dev->masklength); in __iio_update_buffers()
633 indio_dev->scan_timestamp |= buffer->scan_timestamp; in __iio_update_buffers()
635 if (indio_dev->available_scan_masks) { in __iio_update_buffers()
636 indio_dev->active_scan_mask = in __iio_update_buffers()
637 iio_scan_mask_match(indio_dev->available_scan_masks, in __iio_update_buffers()
638 indio_dev->masklength, in __iio_update_buffers()
640 if (indio_dev->active_scan_mask == NULL) { in __iio_update_buffers()
647 indio_dev->active_scan_mask = old_mask; in __iio_update_buffers()
657 indio_dev->active_scan_mask = compound_mask; in __iio_update_buffers()
660 iio_update_demux(indio_dev); in __iio_update_buffers()
663 if (indio_dev->setup_ops->preenable) { in __iio_update_buffers()
664 ret = indio_dev->setup_ops->preenable(indio_dev); in __iio_update_buffers()
671 indio_dev->scan_bytes = in __iio_update_buffers()
672 iio_compute_scan_bytes(indio_dev, in __iio_update_buffers()
673 indio_dev->active_scan_mask, in __iio_update_buffers()
674 indio_dev->scan_timestamp); in __iio_update_buffers()
675 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in __iio_update_buffers()
676 iio_buffer_update_bytes_per_datum(indio_dev, buffer); in __iio_update_buffers()
686 if (indio_dev->info->update_scan_mode) { in __iio_update_buffers()
687 ret = indio_dev->info in __iio_update_buffers()
688 ->update_scan_mode(indio_dev, in __iio_update_buffers()
689 indio_dev->active_scan_mask); in __iio_update_buffers()
696 if ((indio_dev->modes & INDIO_BUFFER_TRIGGERED) && indio_dev->trig) { in __iio_update_buffers()
697 indio_dev->currentmode = INDIO_BUFFER_TRIGGERED; in __iio_update_buffers()
698 } else if (indio_dev->modes & INDIO_BUFFER_HARDWARE) { in __iio_update_buffers()
699 indio_dev->currentmode = INDIO_BUFFER_HARDWARE; in __iio_update_buffers()
700 } else if (indio_dev->modes & INDIO_BUFFER_SOFTWARE) { in __iio_update_buffers()
701 indio_dev->currentmode = INDIO_BUFFER_SOFTWARE; in __iio_update_buffers()
704 if (indio_dev->modes & INDIO_BUFFER_TRIGGERED) in __iio_update_buffers()
710 if (indio_dev->setup_ops->postenable) { in __iio_update_buffers()
711 ret = indio_dev->setup_ops->postenable(indio_dev); in __iio_update_buffers()
715 indio_dev->currentmode = INDIO_DIRECT_MODE; in __iio_update_buffers()
716 if (indio_dev->setup_ops->postdisable) in __iio_update_buffers()
717 indio_dev->setup_ops->postdisable(indio_dev); in __iio_update_buffers()
722 if (indio_dev->available_scan_masks) in __iio_update_buffers()
730 indio_dev->currentmode = INDIO_DIRECT_MODE; in __iio_update_buffers()
732 if (indio_dev->setup_ops->postdisable) in __iio_update_buffers()
733 indio_dev->setup_ops->postdisable(indio_dev); in __iio_update_buffers()
737 indio_dev->active_scan_mask = old_mask; in __iio_update_buffers()
742 int iio_update_buffers(struct iio_dev *indio_dev, in iio_update_buffers() argument
751 mutex_lock(&indio_dev->info_exist_lock); in iio_update_buffers()
752 mutex_lock(&indio_dev->mlock); in iio_update_buffers()
765 if (indio_dev->info == NULL) { in iio_update_buffers()
770 ret = __iio_update_buffers(indio_dev, insert_buffer, remove_buffer); in iio_update_buffers()
773 mutex_unlock(&indio_dev->mlock); in iio_update_buffers()
774 mutex_unlock(&indio_dev->info_exist_lock); in iio_update_buffers()
787 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_buffer_store_enable() local
794 mutex_lock(&indio_dev->mlock); in iio_buffer_store_enable()
797 inlist = iio_buffer_is_active(indio_dev->buffer); in iio_buffer_store_enable()
803 ret = __iio_update_buffers(indio_dev, in iio_buffer_store_enable()
804 indio_dev->buffer, NULL); in iio_buffer_store_enable()
806 ret = __iio_update_buffers(indio_dev, in iio_buffer_store_enable()
807 NULL, indio_dev->buffer); in iio_buffer_store_enable()
812 mutex_unlock(&indio_dev->mlock); in iio_buffer_store_enable()
822 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_buffer_show_watermark() local
823 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_show_watermark()
833 struct iio_dev *indio_dev = dev_to_iio_dev(dev); in iio_buffer_store_watermark() local
834 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_store_watermark()
844 mutex_lock(&indio_dev->mlock); in iio_buffer_store_watermark()
851 if (iio_buffer_is_active(indio_dev->buffer)) { in iio_buffer_store_watermark()
858 if (indio_dev->info->hwfifo_set_watermark) in iio_buffer_store_watermark()
859 indio_dev->info->hwfifo_set_watermark(indio_dev, val); in iio_buffer_store_watermark()
861 mutex_unlock(&indio_dev->mlock); in iio_buffer_store_watermark()
881 int iio_buffer_alloc_sysfs_and_mask(struct iio_dev *indio_dev) in iio_buffer_alloc_sysfs_and_mask() argument
885 struct iio_buffer *buffer = indio_dev->buffer; in iio_buffer_alloc_sysfs_and_mask()
916 indio_dev->groups[indio_dev->groupcounter++] = &buffer->buffer_group; in iio_buffer_alloc_sysfs_and_mask()
925 channels = indio_dev->channels; in iio_buffer_alloc_sysfs_and_mask()
928 for (i = 0; i < indio_dev->num_channels; i++) { in iio_buffer_alloc_sysfs_and_mask()
934 (int)indio_dev->masklength - 1) in iio_buffer_alloc_sysfs_and_mask()
935 indio_dev->masklength in iio_buffer_alloc_sysfs_and_mask()
938 ret = iio_buffer_add_channel_sysfs(indio_dev, in iio_buffer_alloc_sysfs_and_mask()
944 indio_dev->scan_index_timestamp = in iio_buffer_alloc_sysfs_and_mask()
947 if (indio_dev->masklength && buffer->scan_mask == NULL) { in iio_buffer_alloc_sysfs_and_mask()
948 buffer->scan_mask = kcalloc(BITS_TO_LONGS(indio_dev->masklength), in iio_buffer_alloc_sysfs_and_mask()
974 indio_dev->groups[indio_dev->groupcounter++] = &buffer->scan_el_group; in iio_buffer_alloc_sysfs_and_mask()
982 kfree(indio_dev->buffer->buffer_group.attrs); in iio_buffer_alloc_sysfs_and_mask()
987 void iio_buffer_free_sysfs_and_mask(struct iio_dev *indio_dev) in iio_buffer_free_sysfs_and_mask() argument
989 if (!indio_dev->buffer) in iio_buffer_free_sysfs_and_mask()
992 kfree(indio_dev->buffer->scan_mask); in iio_buffer_free_sysfs_and_mask()
993 kfree(indio_dev->buffer->buffer_group.attrs); in iio_buffer_free_sysfs_and_mask()
994 kfree(indio_dev->buffer->scan_el_group.attrs); in iio_buffer_free_sysfs_and_mask()
995 iio_free_chan_devattr_list(&indio_dev->buffer->scan_el_dev_attr_list); in iio_buffer_free_sysfs_and_mask()
1007 bool iio_validate_scan_mask_onehot(struct iio_dev *indio_dev, in iio_validate_scan_mask_onehot() argument
1010 return bitmap_weight(mask, indio_dev->masklength) == 1; in iio_validate_scan_mask_onehot()
1014 int iio_scan_mask_query(struct iio_dev *indio_dev, in iio_scan_mask_query() argument
1017 if (bit > indio_dev->masklength) in iio_scan_mask_query()
1083 int iio_push_to_buffers(struct iio_dev *indio_dev, const void *data) in iio_push_to_buffers() argument
1088 list_for_each_entry(buf, &indio_dev->buffer_list, buffer_list) { in iio_push_to_buffers()
1119 static int iio_buffer_update_demux(struct iio_dev *indio_dev, in iio_buffer_update_demux() argument
1133 if (bitmap_equal(indio_dev->active_scan_mask, in iio_buffer_update_demux()
1135 indio_dev->masklength)) in iio_buffer_update_demux()
1141 indio_dev->masklength) { in iio_buffer_update_demux()
1142 in_ind = find_next_bit(indio_dev->active_scan_mask, in iio_buffer_update_demux()
1143 indio_dev->masklength, in iio_buffer_update_demux()
1146 in_ind = find_next_bit(indio_dev->active_scan_mask, in iio_buffer_update_demux()
1147 indio_dev->masklength, in iio_buffer_update_demux()
1149 ch = iio_find_channel_from_si(indio_dev, in_ind); in iio_buffer_update_demux()
1158 ch = iio_find_channel_from_si(indio_dev, in_ind); in iio_buffer_update_demux()
1174 ch = iio_find_channel_from_si(indio_dev, in iio_buffer_update_demux()
1175 indio_dev->scan_index_timestamp); in iio_buffer_update_demux()
1202 int iio_update_demux(struct iio_dev *indio_dev) in iio_update_demux() argument
1207 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) { in iio_update_demux()
1208 ret = iio_buffer_update_demux(indio_dev, buffer); in iio_update_demux()
1215 list_for_each_entry(buffer, &indio_dev->buffer_list, buffer_list) in iio_update_demux()