1
2
3
4
5
6
7 #ifndef __INDUSTRIALIO_DMA_BUFFER_H__
8 #define __INDUSTRIALIO_DMA_BUFFER_H__
9
10 #include <linux/list.h>
11 #include <linux/kref.h>
12 #include <linux/spinlock.h>
13 #include <linux/mutex.h>
14 #include <linux/iio/buffer.h>
15
16 struct iio_dma_buffer_queue;
17 struct iio_dma_buffer_ops;
18 struct device;
19
20 struct iio_buffer_block {
21 u32 size;
22 u32 bytes_used;
23 };
24
25
26
27
28
29
30
31
32
33 enum iio_block_state {
34 IIO_BLOCK_STATE_DEQUEUED,
35 IIO_BLOCK_STATE_QUEUED,
36 IIO_BLOCK_STATE_ACTIVE,
37 IIO_BLOCK_STATE_DONE,
38 IIO_BLOCK_STATE_DEAD,
39 };
40
41
42
43
44
45
46
47
48
49
50
51
52 struct iio_dma_buffer_block {
53
54 struct list_head head;
55 size_t bytes_used;
56
57
58
59
60
61 void *vaddr;
62 dma_addr_t phys_addr;
63 size_t size;
64 struct iio_dma_buffer_queue *queue;
65
66
67 struct kref kref;
68
69
70
71
72 enum iio_block_state state;
73 };
74
75
76
77
78
79
80
81
82 struct iio_dma_buffer_queue_fileio {
83 struct iio_dma_buffer_block *blocks[2];
84 struct iio_dma_buffer_block *active_block;
85 size_t pos;
86 size_t block_size;
87 };
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105 struct iio_dma_buffer_queue {
106 struct iio_buffer buffer;
107 struct device *dev;
108 const struct iio_dma_buffer_ops *ops;
109
110 struct mutex lock;
111 spinlock_t list_lock;
112 struct list_head incoming;
113 struct list_head outgoing;
114
115 bool active;
116
117 struct iio_dma_buffer_queue_fileio fileio;
118 };
119
120
121
122
123
124
125 struct iio_dma_buffer_ops {
126 int (*submit)(struct iio_dma_buffer_queue *queue,
127 struct iio_dma_buffer_block *block);
128 void (*abort)(struct iio_dma_buffer_queue *queue);
129 };
130
131 void iio_dma_buffer_block_done(struct iio_dma_buffer_block *block);
132 void iio_dma_buffer_block_list_abort(struct iio_dma_buffer_queue *queue,
133 struct list_head *list);
134
135 int iio_dma_buffer_enable(struct iio_buffer *buffer,
136 struct iio_dev *indio_dev);
137 int iio_dma_buffer_disable(struct iio_buffer *buffer,
138 struct iio_dev *indio_dev);
139 int iio_dma_buffer_read(struct iio_buffer *buffer, size_t n,
140 char __user *user_buffer);
141 size_t iio_dma_buffer_data_available(struct iio_buffer *buffer);
142 int iio_dma_buffer_set_bytes_per_datum(struct iio_buffer *buffer, size_t bpd);
143 int iio_dma_buffer_set_length(struct iio_buffer *buffer, unsigned int length);
144 int iio_dma_buffer_request_update(struct iio_buffer *buffer);
145
146 int iio_dma_buffer_init(struct iio_dma_buffer_queue *queue,
147 struct device *dma_dev, const struct iio_dma_buffer_ops *ops);
148 void iio_dma_buffer_exit(struct iio_dma_buffer_queue *queue);
149 void iio_dma_buffer_release(struct iio_dma_buffer_queue *queue);
150
151 #endif