Searched refs:chan (Results 1 - 200 of 1127) sorted by relevance

123456

/linux-4.1.27/drivers/video/fbdev/savage/
H A Dsavagefb-i2c.c47 struct savagefb_i2c_chan *chan = data; savage4_gpio_setscl() local
50 r = readl(chan->ioaddr + chan->reg); savage4_gpio_setscl()
55 writel(r, chan->ioaddr + chan->reg); savage4_gpio_setscl()
56 readl(chan->ioaddr + chan->reg); /* flush posted write */ savage4_gpio_setscl()
61 struct savagefb_i2c_chan *chan = data; savage4_gpio_setsda() local
64 r = readl(chan->ioaddr + chan->reg); savage4_gpio_setsda()
69 writel(r, chan->ioaddr + chan->reg); savage4_gpio_setsda()
70 readl(chan->ioaddr + chan->reg); /* flush posted write */ savage4_gpio_setsda()
75 struct savagefb_i2c_chan *chan = data; savage4_gpio_getscl() local
77 return (0 != (readl(chan->ioaddr + chan->reg) & SAVAGE4_I2C_SCL_IN)); savage4_gpio_getscl()
82 struct savagefb_i2c_chan *chan = data; savage4_gpio_getsda() local
84 return (0 != (readl(chan->ioaddr + chan->reg) & SAVAGE4_I2C_SDA_IN)); savage4_gpio_getsda()
89 struct savagefb_i2c_chan *chan = data; prosavage_gpio_setscl() local
92 r = VGArCR(chan->reg, chan->par); prosavage_gpio_setscl()
100 VGAwCR(chan->reg, r, chan->par); prosavage_gpio_setscl()
105 struct savagefb_i2c_chan *chan = data; prosavage_gpio_setsda() local
108 r = VGArCR(chan->reg, chan->par); prosavage_gpio_setsda()
116 VGAwCR(chan->reg, r, chan->par); prosavage_gpio_setsda()
121 struct savagefb_i2c_chan *chan = data; prosavage_gpio_getscl() local
123 return (VGArCR(chan->reg, chan->par) & PROSAVAGE_I2C_SCL_IN) ? 1 : 0; prosavage_gpio_getscl()
128 struct savagefb_i2c_chan *chan = data; prosavage_gpio_getsda() local
130 return (VGArCR(chan->reg, chan->par) & PROSAVAGE_I2C_SDA_IN) ? 1 : 0; prosavage_gpio_getsda()
133 static int savage_setup_i2c_bus(struct savagefb_i2c_chan *chan, savage_setup_i2c_bus() argument
138 if (chan->par) { savage_setup_i2c_bus()
139 strcpy(chan->adapter.name, name); savage_setup_i2c_bus()
140 chan->adapter.owner = THIS_MODULE; savage_setup_i2c_bus()
141 chan->adapter.algo_data = &chan->algo; savage_setup_i2c_bus()
142 chan->adapter.dev.parent = &chan->par->pcidev->dev; savage_setup_i2c_bus()
143 chan->algo.udelay = 10; savage_setup_i2c_bus()
144 chan->algo.timeout = 20; savage_setup_i2c_bus()
145 chan->algo.data = chan; savage_setup_i2c_bus()
147 i2c_set_adapdata(&chan->adapter, chan); savage_setup_i2c_bus()
150 chan->algo.setsda(chan, 1); savage_setup_i2c_bus()
151 chan->algo.setscl(chan, 1); savage_setup_i2c_bus()
154 rc = i2c_bit_add_bus(&chan->adapter); savage_setup_i2c_bus()
157 dev_dbg(&chan->par->pcidev->dev, savage_setup_i2c_bus()
160 dev_warn(&chan->par->pcidev->dev, savage_setup_i2c_bus()
170 par->chan.par = par; savagefb_create_i2c_busses()
176 par->chan.reg = CR_SERIAL2; savagefb_create_i2c_busses()
177 par->chan.ioaddr = par->mmio.vbase; savagefb_create_i2c_busses()
178 par->chan.algo.setsda = prosavage_gpio_setsda; savagefb_create_i2c_busses()
179 par->chan.algo.setscl = prosavage_gpio_setscl; savagefb_create_i2c_busses()
180 par->chan.algo.getsda = prosavage_gpio_getsda; savagefb_create_i2c_busses()
181 par->chan.algo.getscl = prosavage_gpio_getscl; savagefb_create_i2c_busses()
184 par->chan.reg = CR_SERIAL1; savagefb_create_i2c_busses()
186 par->chan.reg = CR_SERIAL2; savagefb_create_i2c_busses()
187 par->chan.ioaddr = par->mmio.vbase; savagefb_create_i2c_busses()
188 par->chan.algo.setsda = prosavage_gpio_setsda; savagefb_create_i2c_busses()
189 par->chan.algo.setscl = prosavage_gpio_setscl; savagefb_create_i2c_busses()
190 par->chan.algo.getsda = prosavage_gpio_getsda; savagefb_create_i2c_busses()
191 par->chan.algo.getscl = prosavage_gpio_getscl; savagefb_create_i2c_busses()
194 par->chan.reg = MM_SERIAL1; savagefb_create_i2c_busses()
195 par->chan.ioaddr = par->mmio.vbase; savagefb_create_i2c_busses()
196 par->chan.algo.setsda = savage4_gpio_setsda; savagefb_create_i2c_busses()
197 par->chan.algo.setscl = savage4_gpio_setscl; savagefb_create_i2c_busses()
198 par->chan.algo.getsda = savage4_gpio_getsda; savagefb_create_i2c_busses()
199 par->chan.algo.getscl = savage4_gpio_getscl; savagefb_create_i2c_busses()
202 par->chan.par = NULL; savagefb_create_i2c_busses()
205 savage_setup_i2c_bus(&par->chan, "SAVAGE DDC2"); savagefb_create_i2c_busses()
212 if (par->chan.par) savagefb_delete_i2c_busses()
213 i2c_del_adapter(&par->chan.adapter); savagefb_delete_i2c_busses()
215 par->chan.par = NULL; savagefb_delete_i2c_busses()
223 if (par->chan.par) savagefb_probe_i2c_connector()
224 edid = fb_ddc_read(&par->chan.adapter); savagefb_probe_i2c_connector()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/gr/
H A Dnv2a.c15 struct nv20_gr_chan *chan; nv2a_gr_context_ctor() local
19 16, NVOBJ_FLAG_ZERO_ALLOC, &chan); nv2a_gr_context_ctor()
20 *pobject = nv_object(chan); nv2a_gr_context_ctor()
24 chan->chid = nvkm_fifo_chan(parent)->chid; nv2a_gr_context_ctor()
26 nv_wo32(chan, 0x0000, 0x00000001 | (chan->chid << 24)); nv2a_gr_context_ctor()
27 nv_wo32(chan, 0x033c, 0xffff0000); nv2a_gr_context_ctor()
28 nv_wo32(chan, 0x03a0, 0x0fff0000); nv2a_gr_context_ctor()
29 nv_wo32(chan, 0x03a4, 0x0fff0000); nv2a_gr_context_ctor()
30 nv_wo32(chan, 0x047c, 0x00000101); nv2a_gr_context_ctor()
31 nv_wo32(chan, 0x0490, 0x00000111); nv2a_gr_context_ctor()
32 nv_wo32(chan, 0x04a8, 0x44400000); nv2a_gr_context_ctor()
34 nv_wo32(chan, i, 0x00030303); nv2a_gr_context_ctor()
36 nv_wo32(chan, i, 0x00080000); nv2a_gr_context_ctor()
38 nv_wo32(chan, i, 0x01012000); nv2a_gr_context_ctor()
40 nv_wo32(chan, i, 0x000105b8); nv2a_gr_context_ctor()
42 nv_wo32(chan, i, 0x00080008); nv2a_gr_context_ctor()
44 nv_wo32(chan, i, 0x07ff0000); nv2a_gr_context_ctor()
45 nv_wo32(chan, 0x05a4, 0x4b7fffff); nv2a_gr_context_ctor()
46 nv_wo32(chan, 0x05fc, 0x00000001); nv2a_gr_context_ctor()
47 nv_wo32(chan, 0x0604, 0x00004000); nv2a_gr_context_ctor()
48 nv_wo32(chan, 0x0610, 0x00000001); nv2a_gr_context_ctor()
49 nv_wo32(chan, 0x0618, 0x00040000); nv2a_gr_context_ctor()
50 nv_wo32(chan, 0x061c, 0x00010000); nv2a_gr_context_ctor()
52 nv_wo32(chan, (i + 0), 0x10700ff9); nv2a_gr_context_ctor()
53 nv_wo32(chan, (i + 4), 0x0436086c); nv2a_gr_context_ctor()
54 nv_wo32(chan, (i + 8), 0x000c001b); nv2a_gr_context_ctor()
56 nv_wo32(chan, 0x269c, 0x3f800000); nv2a_gr_context_ctor()
57 nv_wo32(chan, 0x26b0, 0x3f800000); nv2a_gr_context_ctor()
58 nv_wo32(chan, 0x26dc, 0x40000000); nv2a_gr_context_ctor()
59 nv_wo32(chan, 0x26e0, 0x3f800000); nv2a_gr_context_ctor()
60 nv_wo32(chan, 0x26e4, 0x3f000000); nv2a_gr_context_ctor()
61 nv_wo32(chan, 0x26ec, 0x40000000); nv2a_gr_context_ctor()
62 nv_wo32(chan, 0x26f0, 0x3f800000); nv2a_gr_context_ctor()
63 nv_wo32(chan, 0x26f8, 0xbf800000); nv2a_gr_context_ctor()
64 nv_wo32(chan, 0x2700, 0xbf800000); nv2a_gr_context_ctor()
65 nv_wo32(chan, 0x3024, 0x000fe000); nv2a_gr_context_ctor()
66 nv_wo32(chan, 0x30a0, 0x000003f8); nv2a_gr_context_ctor()
67 nv_wo32(chan, 0x33fc, 0x002fe000); nv2a_gr_context_ctor()
69 nv_wo32(chan, i, 0x001c527c); nv2a_gr_context_ctor()
H A Dnv25.c39 struct nv20_gr_chan *chan; nv25_gr_context_ctor() local
43 16, NVOBJ_FLAG_ZERO_ALLOC, &chan); nv25_gr_context_ctor()
44 *pobject = nv_object(chan); nv25_gr_context_ctor()
48 chan->chid = nvkm_fifo_chan(parent)->chid; nv25_gr_context_ctor()
50 nv_wo32(chan, 0x0028, 0x00000001 | (chan->chid << 24)); nv25_gr_context_ctor()
51 nv_wo32(chan, 0x035c, 0xffff0000); nv25_gr_context_ctor()
52 nv_wo32(chan, 0x03c0, 0x0fff0000); nv25_gr_context_ctor()
53 nv_wo32(chan, 0x03c4, 0x0fff0000); nv25_gr_context_ctor()
54 nv_wo32(chan, 0x049c, 0x00000101); nv25_gr_context_ctor()
55 nv_wo32(chan, 0x04b0, 0x00000111); nv25_gr_context_ctor()
56 nv_wo32(chan, 0x04c8, 0x00000080); nv25_gr_context_ctor()
57 nv_wo32(chan, 0x04cc, 0xffff0000); nv25_gr_context_ctor()
58 nv_wo32(chan, 0x04d0, 0x00000001); nv25_gr_context_ctor()
59 nv_wo32(chan, 0x04e4, 0x44400000); nv25_gr_context_ctor()
60 nv_wo32(chan, 0x04fc, 0x4b800000); nv25_gr_context_ctor()
62 nv_wo32(chan, i, 0x00030303); nv25_gr_context_ctor()
64 nv_wo32(chan, i, 0x00080000); nv25_gr_context_ctor()
66 nv_wo32(chan, i, 0x01012000); nv25_gr_context_ctor()
68 nv_wo32(chan, i, 0x000105b8); nv25_gr_context_ctor()
70 nv_wo32(chan, i, 0x00080008); nv25_gr_context_ctor()
72 nv_wo32(chan, i, 0x07ff0000); nv25_gr_context_ctor()
73 nv_wo32(chan, 0x05e0, 0x4b7fffff); nv25_gr_context_ctor()
74 nv_wo32(chan, 0x0620, 0x00000080); nv25_gr_context_ctor()
75 nv_wo32(chan, 0x0624, 0x30201000); nv25_gr_context_ctor()
76 nv_wo32(chan, 0x0628, 0x70605040); nv25_gr_context_ctor()
77 nv_wo32(chan, 0x062c, 0xb0a09080); nv25_gr_context_ctor()
78 nv_wo32(chan, 0x0630, 0xf0e0d0c0); nv25_gr_context_ctor()
79 nv_wo32(chan, 0x0664, 0x00000001); nv25_gr_context_ctor()
80 nv_wo32(chan, 0x066c, 0x00004000); nv25_gr_context_ctor()
81 nv_wo32(chan, 0x0678, 0x00000001); nv25_gr_context_ctor()
82 nv_wo32(chan, 0x0680, 0x00040000); nv25_gr_context_ctor()
83 nv_wo32(chan, 0x0684, 0x00010000); nv25_gr_context_ctor()
85 nv_wo32(chan, (i + 0), 0x10700ff9); nv25_gr_context_ctor()
86 nv_wo32(chan, (i + 4), 0x0436086c); nv25_gr_context_ctor()
87 nv_wo32(chan, (i + 8), 0x000c001b); nv25_gr_context_ctor()
89 nv_wo32(chan, 0x2704, 0x3f800000); nv25_gr_context_ctor()
90 nv_wo32(chan, 0x2718, 0x3f800000); nv25_gr_context_ctor()
91 nv_wo32(chan, 0x2744, 0x40000000); nv25_gr_context_ctor()
92 nv_wo32(chan, 0x2748, 0x3f800000); nv25_gr_context_ctor()
93 nv_wo32(chan, 0x274c, 0x3f000000); nv25_gr_context_ctor()
94 nv_wo32(chan, 0x2754, 0x40000000); nv25_gr_context_ctor()
95 nv_wo32(chan, 0x2758, 0x3f800000); nv25_gr_context_ctor()
96 nv_wo32(chan, 0x2760, 0xbf800000); nv25_gr_context_ctor()
97 nv_wo32(chan, 0x2768, 0xbf800000); nv25_gr_context_ctor()
98 nv_wo32(chan, 0x308c, 0x000fe000); nv25_gr_context_ctor()
99 nv_wo32(chan, 0x3108, 0x000003f8); nv25_gr_context_ctor()
100 nv_wo32(chan, 0x3468, 0x002fe000); nv25_gr_context_ctor()
102 nv_wo32(chan, i, 0x001c527c); nv25_gr_context_ctor()
H A Dnv34.c41 struct nv20_gr_chan *chan; nv34_gr_context_ctor() local
45 16, NVOBJ_FLAG_ZERO_ALLOC, &chan); nv34_gr_context_ctor()
46 *pobject = nv_object(chan); nv34_gr_context_ctor()
50 chan->chid = nvkm_fifo_chan(parent)->chid; nv34_gr_context_ctor()
52 nv_wo32(chan, 0x0028, 0x00000001 | (chan->chid << 24)); nv34_gr_context_ctor()
53 nv_wo32(chan, 0x040c, 0x01000101); nv34_gr_context_ctor()
54 nv_wo32(chan, 0x0420, 0x00000111); nv34_gr_context_ctor()
55 nv_wo32(chan, 0x0424, 0x00000060); nv34_gr_context_ctor()
56 nv_wo32(chan, 0x0440, 0x00000080); nv34_gr_context_ctor()
57 nv_wo32(chan, 0x0444, 0xffff0000); nv34_gr_context_ctor()
58 nv_wo32(chan, 0x0448, 0x00000001); nv34_gr_context_ctor()
59 nv_wo32(chan, 0x045c, 0x44400000); nv34_gr_context_ctor()
60 nv_wo32(chan, 0x0480, 0xffff0000); nv34_gr_context_ctor()
62 nv_wo32(chan, i, 0x0fff0000); nv34_gr_context_ctor()
63 nv_wo32(chan, 0x04e0, 0x00011100); nv34_gr_context_ctor()
65 nv_wo32(chan, i, 0x07ff0000); nv34_gr_context_ctor()
66 nv_wo32(chan, 0x0544, 0x4b7fffff); nv34_gr_context_ctor()
67 nv_wo32(chan, 0x057c, 0x00000080); nv34_gr_context_ctor()
68 nv_wo32(chan, 0x0580, 0x30201000); nv34_gr_context_ctor()
69 nv_wo32(chan, 0x0584, 0x70605040); nv34_gr_context_ctor()
70 nv_wo32(chan, 0x0588, 0xb8a89888); nv34_gr_context_ctor()
71 nv_wo32(chan, 0x058c, 0xf8e8d8c8); nv34_gr_context_ctor()
72 nv_wo32(chan, 0x05a0, 0xb0000000); nv34_gr_context_ctor()
74 nv_wo32(chan, i, 0x00010588); nv34_gr_context_ctor()
76 nv_wo32(chan, i, 0x00030303); nv34_gr_context_ctor()
78 nv_wo32(chan, i, 0x0008aae4); nv34_gr_context_ctor()
80 nv_wo32(chan, i, 0x01012000); nv34_gr_context_ctor()
82 nv_wo32(chan, i, 0x00080008); nv34_gr_context_ctor()
83 nv_wo32(chan, 0x0850, 0x00040000); nv34_gr_context_ctor()
84 nv_wo32(chan, 0x0854, 0x00010000); nv34_gr_context_ctor()
86 nv_wo32(chan, i, 0x00040004); nv34_gr_context_ctor()
88 nv_wo32(chan, i + 0, 0x10700ff9); nv34_gr_context_ctor()
89 nv_wo32(chan, i + 1, 0x0436086c); nv34_gr_context_ctor()
90 nv_wo32(chan, i + 2, 0x000c001b); nv34_gr_context_ctor()
93 nv_wo32(chan, i, 0x0000ffff); nv34_gr_context_ctor()
94 nv_wo32(chan, 0x2ae0, 0x3f800000); nv34_gr_context_ctor()
95 nv_wo32(chan, 0x2e9c, 0x3f800000); nv34_gr_context_ctor()
96 nv_wo32(chan, 0x2eb0, 0x3f800000); nv34_gr_context_ctor()
97 nv_wo32(chan, 0x2edc, 0x40000000); nv34_gr_context_ctor()
98 nv_wo32(chan, 0x2ee0, 0x3f800000); nv34_gr_context_ctor()
99 nv_wo32(chan, 0x2ee4, 0x3f000000); nv34_gr_context_ctor()
100 nv_wo32(chan, 0x2eec, 0x40000000); nv34_gr_context_ctor()
101 nv_wo32(chan, 0x2ef0, 0x3f800000); nv34_gr_context_ctor()
102 nv_wo32(chan, 0x2ef8, 0xbf800000); nv34_gr_context_ctor()
103 nv_wo32(chan, 0x2f00, 0xbf800000); nv34_gr_context_ctor()
H A Dnv35.c41 struct nv20_gr_chan *chan; nv35_gr_context_ctor() local
45 16, NVOBJ_FLAG_ZERO_ALLOC, &chan); nv35_gr_context_ctor()
46 *pobject = nv_object(chan); nv35_gr_context_ctor()
50 chan->chid = nvkm_fifo_chan(parent)->chid; nv35_gr_context_ctor()
52 nv_wo32(chan, 0x0028, 0x00000001 | (chan->chid << 24)); nv35_gr_context_ctor()
53 nv_wo32(chan, 0x040c, 0x00000101); nv35_gr_context_ctor()
54 nv_wo32(chan, 0x0420, 0x00000111); nv35_gr_context_ctor()
55 nv_wo32(chan, 0x0424, 0x00000060); nv35_gr_context_ctor()
56 nv_wo32(chan, 0x0440, 0x00000080); nv35_gr_context_ctor()
57 nv_wo32(chan, 0x0444, 0xffff0000); nv35_gr_context_ctor()
58 nv_wo32(chan, 0x0448, 0x00000001); nv35_gr_context_ctor()
59 nv_wo32(chan, 0x045c, 0x44400000); nv35_gr_context_ctor()
60 nv_wo32(chan, 0x0488, 0xffff0000); nv35_gr_context_ctor()
62 nv_wo32(chan, i, 0x0fff0000); nv35_gr_context_ctor()
63 nv_wo32(chan, 0x04e8, 0x00011100); nv35_gr_context_ctor()
65 nv_wo32(chan, i, 0x07ff0000); nv35_gr_context_ctor()
66 nv_wo32(chan, 0x054c, 0x4b7fffff); nv35_gr_context_ctor()
67 nv_wo32(chan, 0x0588, 0x00000080); nv35_gr_context_ctor()
68 nv_wo32(chan, 0x058c, 0x30201000); nv35_gr_context_ctor()
69 nv_wo32(chan, 0x0590, 0x70605040); nv35_gr_context_ctor()
70 nv_wo32(chan, 0x0594, 0xb8a89888); nv35_gr_context_ctor()
71 nv_wo32(chan, 0x0598, 0xf8e8d8c8); nv35_gr_context_ctor()
72 nv_wo32(chan, 0x05ac, 0xb0000000); nv35_gr_context_ctor()
74 nv_wo32(chan, i, 0x00010588); nv35_gr_context_ctor()
76 nv_wo32(chan, i, 0x00030303); nv35_gr_context_ctor()
78 nv_wo32(chan, i, 0x0008aae4); nv35_gr_context_ctor()
80 nv_wo32(chan, i, 0x01012000); nv35_gr_context_ctor()
82 nv_wo32(chan, i, 0x00080008); nv35_gr_context_ctor()
83 nv_wo32(chan, 0x0860, 0x00040000); nv35_gr_context_ctor()
84 nv_wo32(chan, 0x0864, 0x00010000); nv35_gr_context_ctor()
86 nv_wo32(chan, i, 0x00040004); nv35_gr_context_ctor()
88 nv_wo32(chan, i + 0, 0x10700ff9); nv35_gr_context_ctor()
89 nv_wo32(chan, i + 4, 0x0436086c); nv35_gr_context_ctor()
90 nv_wo32(chan, i + 8, 0x000c001b); nv35_gr_context_ctor()
93 nv_wo32(chan, i, 0x0000ffff); nv35_gr_context_ctor()
94 nv_wo32(chan, 0x3450, 0x3f800000); nv35_gr_context_ctor()
95 nv_wo32(chan, 0x380c, 0x3f800000); nv35_gr_context_ctor()
96 nv_wo32(chan, 0x3820, 0x3f800000); nv35_gr_context_ctor()
97 nv_wo32(chan, 0x384c, 0x40000000); nv35_gr_context_ctor()
98 nv_wo32(chan, 0x3850, 0x3f800000); nv35_gr_context_ctor()
99 nv_wo32(chan, 0x3854, 0x3f000000); nv35_gr_context_ctor()
100 nv_wo32(chan, 0x385c, 0x40000000); nv35_gr_context_ctor()
101 nv_wo32(chan, 0x3860, 0x3f800000); nv35_gr_context_ctor()
102 nv_wo32(chan, 0x3868, 0xbf800000); nv35_gr_context_ctor()
103 nv_wo32(chan, 0x3870, 0xbf800000); nv35_gr_context_ctor()
H A Dnv30.c43 struct nv20_gr_chan *chan; nv30_gr_context_ctor() local
47 16, NVOBJ_FLAG_ZERO_ALLOC, &chan); nv30_gr_context_ctor()
48 *pobject = nv_object(chan); nv30_gr_context_ctor()
52 chan->chid = nvkm_fifo_chan(parent)->chid; nv30_gr_context_ctor()
54 nv_wo32(chan, 0x0028, 0x00000001 | (chan->chid << 24)); nv30_gr_context_ctor()
55 nv_wo32(chan, 0x0410, 0x00000101); nv30_gr_context_ctor()
56 nv_wo32(chan, 0x0424, 0x00000111); nv30_gr_context_ctor()
57 nv_wo32(chan, 0x0428, 0x00000060); nv30_gr_context_ctor()
58 nv_wo32(chan, 0x0444, 0x00000080); nv30_gr_context_ctor()
59 nv_wo32(chan, 0x0448, 0xffff0000); nv30_gr_context_ctor()
60 nv_wo32(chan, 0x044c, 0x00000001); nv30_gr_context_ctor()
61 nv_wo32(chan, 0x0460, 0x44400000); nv30_gr_context_ctor()
62 nv_wo32(chan, 0x048c, 0xffff0000); nv30_gr_context_ctor()
64 nv_wo32(chan, i, 0x0fff0000); nv30_gr_context_ctor()
65 nv_wo32(chan, 0x04ec, 0x00011100); nv30_gr_context_ctor()
67 nv_wo32(chan, i, 0x07ff0000); nv30_gr_context_ctor()
68 nv_wo32(chan, 0x0550, 0x4b7fffff); nv30_gr_context_ctor()
69 nv_wo32(chan, 0x058c, 0x00000080); nv30_gr_context_ctor()
70 nv_wo32(chan, 0x0590, 0x30201000); nv30_gr_context_ctor()
71 nv_wo32(chan, 0x0594, 0x70605040); nv30_gr_context_ctor()
72 nv_wo32(chan, 0x0598, 0xb8a89888); nv30_gr_context_ctor()
73 nv_wo32(chan, 0x059c, 0xf8e8d8c8); nv30_gr_context_ctor()
74 nv_wo32(chan, 0x05b0, 0xb0000000); nv30_gr_context_ctor()
76 nv_wo32(chan, i, 0x00010588); nv30_gr_context_ctor()
78 nv_wo32(chan, i, 0x00030303); nv30_gr_context_ctor()
80 nv_wo32(chan, i, 0x0008aae4); nv30_gr_context_ctor()
82 nv_wo32(chan, i, 0x01012000); nv30_gr_context_ctor()
84 nv_wo32(chan, i, 0x00080008); nv30_gr_context_ctor()
85 nv_wo32(chan, 0x085c, 0x00040000); nv30_gr_context_ctor()
86 nv_wo32(chan, 0x0860, 0x00010000); nv30_gr_context_ctor()
88 nv_wo32(chan, i, 0x00040004); nv30_gr_context_ctor()
90 nv_wo32(chan, i + 0, 0x10700ff9); nv30_gr_context_ctor()
91 nv_wo32(chan, i + 1, 0x0436086c); nv30_gr_context_ctor()
92 nv_wo32(chan, i + 2, 0x000c001b); nv30_gr_context_ctor()
95 nv_wo32(chan, i, 0x0000ffff); nv30_gr_context_ctor()
96 nv_wo32(chan, 0x344c, 0x3f800000); nv30_gr_context_ctor()
97 nv_wo32(chan, 0x3808, 0x3f800000); nv30_gr_context_ctor()
98 nv_wo32(chan, 0x381c, 0x3f800000); nv30_gr_context_ctor()
99 nv_wo32(chan, 0x3848, 0x40000000); nv30_gr_context_ctor()
100 nv_wo32(chan, 0x384c, 0x3f800000); nv30_gr_context_ctor()
101 nv_wo32(chan, 0x3850, 0x3f000000); nv30_gr_context_ctor()
102 nv_wo32(chan, 0x3858, 0x40000000); nv30_gr_context_ctor()
103 nv_wo32(chan, 0x385c, 0x3f800000); nv30_gr_context_ctor()
104 nv_wo32(chan, 0x3864, 0xbf800000); nv30_gr_context_ctor()
105 nv_wo32(chan, 0x386c, 0xbf800000); nv30_gr_context_ctor()
H A Dnv20.c44 struct nv20_gr_chan *chan; nv20_gr_context_ctor() local
48 16, NVOBJ_FLAG_ZERO_ALLOC, &chan); nv20_gr_context_ctor()
49 *pobject = nv_object(chan); nv20_gr_context_ctor()
53 chan->chid = nvkm_fifo_chan(parent)->chid; nv20_gr_context_ctor()
55 nv_wo32(chan, 0x0000, 0x00000001 | (chan->chid << 24)); nv20_gr_context_ctor()
56 nv_wo32(chan, 0x033c, 0xffff0000); nv20_gr_context_ctor()
57 nv_wo32(chan, 0x03a0, 0x0fff0000); nv20_gr_context_ctor()
58 nv_wo32(chan, 0x03a4, 0x0fff0000); nv20_gr_context_ctor()
59 nv_wo32(chan, 0x047c, 0x00000101); nv20_gr_context_ctor()
60 nv_wo32(chan, 0x0490, 0x00000111); nv20_gr_context_ctor()
61 nv_wo32(chan, 0x04a8, 0x44400000); nv20_gr_context_ctor()
63 nv_wo32(chan, i, 0x00030303); nv20_gr_context_ctor()
65 nv_wo32(chan, i, 0x00080000); nv20_gr_context_ctor()
67 nv_wo32(chan, i, 0x01012000); nv20_gr_context_ctor()
69 nv_wo32(chan, i, 0x000105b8); nv20_gr_context_ctor()
71 nv_wo32(chan, i, 0x00080008); nv20_gr_context_ctor()
73 nv_wo32(chan, i, 0x07ff0000); nv20_gr_context_ctor()
74 nv_wo32(chan, 0x05a4, 0x4b7fffff); nv20_gr_context_ctor()
75 nv_wo32(chan, 0x05fc, 0x00000001); nv20_gr_context_ctor()
76 nv_wo32(chan, 0x0604, 0x00004000); nv20_gr_context_ctor()
77 nv_wo32(chan, 0x0610, 0x00000001); nv20_gr_context_ctor()
78 nv_wo32(chan, 0x0618, 0x00040000); nv20_gr_context_ctor()
79 nv_wo32(chan, 0x061c, 0x00010000); nv20_gr_context_ctor()
81 nv_wo32(chan, (i + 0), 0x10700ff9); nv20_gr_context_ctor()
82 nv_wo32(chan, (i + 4), 0x0436086c); nv20_gr_context_ctor()
83 nv_wo32(chan, (i + 8), 0x000c001b); nv20_gr_context_ctor()
85 nv_wo32(chan, 0x281c, 0x3f800000); nv20_gr_context_ctor()
86 nv_wo32(chan, 0x2830, 0x3f800000); nv20_gr_context_ctor()
87 nv_wo32(chan, 0x285c, 0x40000000); nv20_gr_context_ctor()
88 nv_wo32(chan, 0x2860, 0x3f800000); nv20_gr_context_ctor()
89 nv_wo32(chan, 0x2864, 0x3f000000); nv20_gr_context_ctor()
90 nv_wo32(chan, 0x286c, 0x40000000); nv20_gr_context_ctor()
91 nv_wo32(chan, 0x2870, 0x3f800000); nv20_gr_context_ctor()
92 nv_wo32(chan, 0x2878, 0xbf800000); nv20_gr_context_ctor()
93 nv_wo32(chan, 0x2880, 0xbf800000); nv20_gr_context_ctor()
94 nv_wo32(chan, 0x34a4, 0x000fe000); nv20_gr_context_ctor()
95 nv_wo32(chan, 0x3530, 0x000003f8); nv20_gr_context_ctor()
96 nv_wo32(chan, 0x3540, 0x002fe000); nv20_gr_context_ctor()
98 nv_wo32(chan, i, 0x001c527c); nv20_gr_context_ctor()
106 struct nv20_gr_chan *chan = (void *)object; nv20_gr_context_init() local
109 ret = nvkm_gr_context_init(&chan->base); nv20_gr_context_init()
113 nv_wo32(priv->ctxtab, chan->chid * 4, nv_gpuobj(chan)->addr >> 4); nv20_gr_context_init()
121 struct nv20_gr_chan *chan = (void *)object; nv20_gr_context_fini() local
127 if (chan->chid == chid) { nv20_gr_context_fini()
128 nv_wr32(priv, 0x400784, nv_gpuobj(chan)->addr >> 4); nv20_gr_context_fini()
136 nv_wo32(priv->ctxtab, chan->chid * 4, 0x00000000); nv20_gr_context_fini()
137 return nvkm_gr_context_fini(&chan->base, suspend); nv20_gr_context_fini()
H A Dnv10.c391 struct nv10_gr_chan *chan[32]; member in struct:nv10_gr_priv
406 nv10_gr_priv(struct nv10_gr_chan *chan) nv10_gr_priv() argument
408 return (void *)nv_object(chan)->engine; nv10_gr_priv()
481 struct nv10_gr_chan *chan = (void *)object->parent; nv17_gr_mthd_lma_window() local
482 struct nv10_gr_priv *priv = nv10_gr_priv(chan); nv17_gr_mthd_lma_window()
483 struct pipe_state *pipe = &chan->pipe_state; nv17_gr_mthd_lma_window()
489 chan->lma_window[(mthd - 0x1638) / 4] = data; nv17_gr_mthd_lma_window()
499 PIPE_RESTORE(priv, chan->lma_window, 0x6790); nv17_gr_mthd_lma_window()
558 struct nv10_gr_chan *chan = (void *)object->parent; nv17_gr_mthd_lma_enable() local
559 struct nv10_gr_priv *priv = nv10_gr_priv(chan); nv17_gr_mthd_lma_enable()
608 struct nv10_gr_chan *chan = NULL; nv10_gr_channel() local
611 if (chid < ARRAY_SIZE(priv->chan)) nv10_gr_channel()
612 chan = priv->chan[chid]; nv10_gr_channel()
614 return chan; nv10_gr_channel()
618 nv10_gr_save_pipe(struct nv10_gr_chan *chan) nv10_gr_save_pipe() argument
620 struct nv10_gr_priv *priv = nv10_gr_priv(chan); nv10_gr_save_pipe()
621 struct pipe_state *pipe = &chan->pipe_state; nv10_gr_save_pipe()
636 nv10_gr_load_pipe(struct nv10_gr_chan *chan) nv10_gr_load_pipe() argument
638 struct nv10_gr_priv *priv = nv10_gr_priv(chan); nv10_gr_load_pipe()
639 struct pipe_state *pipe = &chan->pipe_state; nv10_gr_load_pipe()
686 nv10_gr_create_pipe(struct nv10_gr_chan *chan) nv10_gr_create_pipe() argument
688 struct nv10_gr_priv *priv = nv10_gr_priv(chan); nv10_gr_create_pipe()
689 struct pipe_state *pipe_state = &chan->pipe_state; nv10_gr_create_pipe()
865 nv10_gr_load_dma_vtxbuf(struct nv10_gr_chan *chan, int chid, u32 inst) nv10_gr_load_dma_vtxbuf() argument
867 struct nv10_gr_priv *priv = nv10_gr_priv(chan); nv10_gr_load_dma_vtxbuf()
935 nv10_gr_load_context(struct nv10_gr_chan *chan, int chid) nv10_gr_load_context() argument
937 struct nv10_gr_priv *priv = nv10_gr_priv(chan); nv10_gr_load_context()
942 nv_wr32(priv, nv10_gr_ctx_regs[i], chan->nv10[i]); nv10_gr_load_context()
947 nv_wr32(priv, nv17_gr_ctx_regs[i], chan->nv17[i]); nv10_gr_load_context()
950 nv10_gr_load_pipe(chan); nv10_gr_load_context()
953 nv10_gr_load_dma_vtxbuf(chan, chid, inst); nv10_gr_load_context()
962 nv10_gr_unload_context(struct nv10_gr_chan *chan) nv10_gr_unload_context() argument
964 struct nv10_gr_priv *priv = nv10_gr_priv(chan); nv10_gr_unload_context()
968 chan->nv10[i] = nv_rd32(priv, nv10_gr_ctx_regs[i]); nv10_gr_unload_context()
973 chan->nv17[i] = nv_rd32(priv, nv17_gr_ctx_regs[i]); nv10_gr_unload_context()
976 nv10_gr_save_pipe(chan); nv10_gr_unload_context()
1001 next = priv->chan[chid]; nv10_gr_context_switch()
1011 chan->nv10[offset] = val; \
1017 chan->nv17[offset] = val; \
1027 struct nv10_gr_chan *chan; nv10_gr_context_ctor() local
1031 ret = nvkm_object_create(parent, engine, oclass, 0, &chan); nv10_gr_context_ctor()
1032 *pobject = nv_object(chan); nv10_gr_context_ctor()
1037 if (priv->chan[fifo->chid]) { nv10_gr_context_ctor()
1038 *pobject = nv_object(priv->chan[fifo->chid]); nv10_gr_context_ctor()
1041 nvkm_object_destroy(&chan->base); nv10_gr_context_ctor()
1063 NV_WRITE_CTX(NV10_PGRAPH_CTX_USER, chan->chid << 24); nv10_gr_context_ctor()
1065 nv10_gr_create_pipe(chan); nv10_gr_context_ctor()
1067 priv->chan[fifo->chid] = chan; nv10_gr_context_ctor()
1068 chan->chid = fifo->chid; nv10_gr_context_ctor()
1077 struct nv10_gr_chan *chan = (void *)object; nv10_gr_context_dtor() local
1081 priv->chan[chan->chid] = NULL; nv10_gr_context_dtor()
1084 nvkm_object_destroy(&chan->base); nv10_gr_context_dtor()
1091 struct nv10_gr_chan *chan = (void *)object; nv10_gr_context_fini() local
1096 if (nv10_gr_channel(priv) == chan) nv10_gr_context_fini()
1097 nv10_gr_unload_context(chan); nv10_gr_context_fini()
1101 return nvkm_object_fini(&chan->base, suspend); nv10_gr_context_fini()
1155 struct nv10_gr_chan *chan = NULL; nv10_gr_intr() local
1171 chan = priv->chan[chid]; nv10_gr_intr()
1172 if (chan) nv10_gr_intr()
1173 namedb = (void *)nv_pclass(nv_object(chan), NV_NAMEDB_CLASS); nv10_gr_intr()
1177 if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) { nv10_gr_intr()
1204 chid, nvkm_client_name(chan), subc, class, mthd, nv10_gr_intr()
/linux-4.1.27/drivers/video/fbdev/nvidia/
H A Dnv_i2c.c30 struct nvidia_i2c_chan *chan = data; nvidia_gpio_setscl() local
31 struct nvidia_par *par = chan->par; nvidia_gpio_setscl()
34 val = NVReadCrtc(par, chan->ddc_base + 1) & 0xf0; nvidia_gpio_setscl()
41 NVWriteCrtc(par, chan->ddc_base + 1, val | 0x01); nvidia_gpio_setscl()
46 struct nvidia_i2c_chan *chan = data; nvidia_gpio_setsda() local
47 struct nvidia_par *par = chan->par; nvidia_gpio_setsda()
50 val = NVReadCrtc(par, chan->ddc_base + 1) & 0xf0; nvidia_gpio_setsda()
57 NVWriteCrtc(par, chan->ddc_base + 1, val | 0x01); nvidia_gpio_setsda()
62 struct nvidia_i2c_chan *chan = data; nvidia_gpio_getscl() local
63 struct nvidia_par *par = chan->par; nvidia_gpio_getscl()
66 if (NVReadCrtc(par, chan->ddc_base) & 0x04) nvidia_gpio_getscl()
74 struct nvidia_i2c_chan *chan = data; nvidia_gpio_getsda() local
75 struct nvidia_par *par = chan->par; nvidia_gpio_getsda()
78 if (NVReadCrtc(par, chan->ddc_base) & 0x08) nvidia_gpio_getsda()
84 static int nvidia_setup_i2c_bus(struct nvidia_i2c_chan *chan, const char *name, nvidia_setup_i2c_bus() argument
89 strcpy(chan->adapter.name, name); nvidia_setup_i2c_bus()
90 chan->adapter.owner = THIS_MODULE; nvidia_setup_i2c_bus()
91 chan->adapter.class = i2c_class; nvidia_setup_i2c_bus()
92 chan->adapter.algo_data = &chan->algo; nvidia_setup_i2c_bus()
93 chan->adapter.dev.parent = &chan->par->pci_dev->dev; nvidia_setup_i2c_bus()
94 chan->algo.setsda = nvidia_gpio_setsda; nvidia_setup_i2c_bus()
95 chan->algo.setscl = nvidia_gpio_setscl; nvidia_setup_i2c_bus()
96 chan->algo.getsda = nvidia_gpio_getsda; nvidia_setup_i2c_bus()
97 chan->algo.getscl = nvidia_gpio_getscl; nvidia_setup_i2c_bus()
98 chan->algo.udelay = 40; nvidia_setup_i2c_bus()
99 chan->algo.timeout = msecs_to_jiffies(2); nvidia_setup_i2c_bus()
100 chan->algo.data = chan; nvidia_setup_i2c_bus()
102 i2c_set_adapdata(&chan->adapter, chan); nvidia_setup_i2c_bus()
105 nvidia_gpio_setsda(chan, 1); nvidia_setup_i2c_bus()
106 nvidia_gpio_setscl(chan, 1); nvidia_setup_i2c_bus()
109 rc = i2c_bit_add_bus(&chan->adapter); nvidia_setup_i2c_bus()
111 dev_dbg(&chan->par->pci_dev->dev, nvidia_setup_i2c_bus()
114 dev_warn(&chan->par->pci_dev->dev, nvidia_setup_i2c_bus()
116 chan->par = NULL; nvidia_setup_i2c_bus()
124 par->chan[0].par = par; nvidia_create_i2c_busses()
125 par->chan[1].par = par; nvidia_create_i2c_busses()
126 par->chan[2].par = par; nvidia_create_i2c_busses()
128 par->chan[0].ddc_base = (par->reverse_i2c) ? 0x36 : 0x3e; nvidia_create_i2c_busses()
129 nvidia_setup_i2c_bus(&par->chan[0], "nvidia #0", nvidia_create_i2c_busses()
132 par->chan[1].ddc_base = (par->reverse_i2c) ? 0x3e : 0x36; nvidia_create_i2c_busses()
133 nvidia_setup_i2c_bus(&par->chan[1], "nvidia #1", nvidia_create_i2c_busses()
136 par->chan[2].ddc_base = 0x50; nvidia_create_i2c_busses()
137 nvidia_setup_i2c_bus(&par->chan[2], "nvidia #2", 0); nvidia_create_i2c_busses()
145 if (!par->chan[i].par) nvidia_delete_i2c_busses()
147 i2c_del_adapter(&par->chan[i].adapter); nvidia_delete_i2c_busses()
148 par->chan[i].par = NULL; nvidia_delete_i2c_busses()
157 if (par->chan[conn - 1].par) nvidia_probe_i2c_connector()
158 edid = fb_ddc_read(&par->chan[conn - 1].adapter); nvidia_probe_i2c_connector()
/linux-4.1.27/drivers/gpu/drm/nouveau/
H A Dnv50_fbcon.c34 struct nouveau_channel *chan = drm->channel; nv50_fbcon_fillrect() local
37 ret = RING_SPACE(chan, rect->rop == ROP_COPY ? 7 : 11); nv50_fbcon_fillrect()
42 BEGIN_NV04(chan, NvSub2D, 0x02ac, 1); nv50_fbcon_fillrect()
43 OUT_RING(chan, 1); nv50_fbcon_fillrect()
45 BEGIN_NV04(chan, NvSub2D, 0x0588, 1); nv50_fbcon_fillrect()
48 OUT_RING(chan, ((uint32_t *)info->pseudo_palette)[rect->color]); nv50_fbcon_fillrect()
50 OUT_RING(chan, rect->color); nv50_fbcon_fillrect()
51 BEGIN_NV04(chan, NvSub2D, 0x0600, 4); nv50_fbcon_fillrect()
52 OUT_RING(chan, rect->dx); nv50_fbcon_fillrect()
53 OUT_RING(chan, rect->dy); nv50_fbcon_fillrect()
54 OUT_RING(chan, rect->dx + rect->width); nv50_fbcon_fillrect()
55 OUT_RING(chan, rect->dy + rect->height); nv50_fbcon_fillrect()
57 BEGIN_NV04(chan, NvSub2D, 0x02ac, 1); nv50_fbcon_fillrect()
58 OUT_RING(chan, 3); nv50_fbcon_fillrect()
60 FIRE_RING(chan); nv50_fbcon_fillrect()
69 struct nouveau_channel *chan = drm->channel; nv50_fbcon_copyarea() local
72 ret = RING_SPACE(chan, 12); nv50_fbcon_copyarea()
76 BEGIN_NV04(chan, NvSub2D, 0x0110, 1); nv50_fbcon_copyarea()
77 OUT_RING(chan, 0); nv50_fbcon_copyarea()
78 BEGIN_NV04(chan, NvSub2D, 0x08b0, 4); nv50_fbcon_copyarea()
79 OUT_RING(chan, region->dx); nv50_fbcon_copyarea()
80 OUT_RING(chan, region->dy); nv50_fbcon_copyarea()
81 OUT_RING(chan, region->width); nv50_fbcon_copyarea()
82 OUT_RING(chan, region->height); nv50_fbcon_copyarea()
83 BEGIN_NV04(chan, NvSub2D, 0x08d0, 4); nv50_fbcon_copyarea()
84 OUT_RING(chan, 0); nv50_fbcon_copyarea()
85 OUT_RING(chan, region->sx); nv50_fbcon_copyarea()
86 OUT_RING(chan, 0); nv50_fbcon_copyarea()
87 OUT_RING(chan, region->sy); nv50_fbcon_copyarea()
88 FIRE_RING(chan); nv50_fbcon_copyarea()
97 struct nouveau_channel *chan = drm->channel; nv50_fbcon_imageblit() local
106 ret = RING_SPACE(chan, 11); nv50_fbcon_imageblit()
110 BEGIN_NV04(chan, NvSub2D, 0x0814, 2); nv50_fbcon_imageblit()
113 OUT_RING(chan, palette[image->bg_color] | mask); nv50_fbcon_imageblit()
114 OUT_RING(chan, palette[image->fg_color] | mask); nv50_fbcon_imageblit()
116 OUT_RING(chan, image->bg_color); nv50_fbcon_imageblit()
117 OUT_RING(chan, image->fg_color); nv50_fbcon_imageblit()
119 BEGIN_NV04(chan, NvSub2D, 0x0838, 2); nv50_fbcon_imageblit()
120 OUT_RING(chan, image->width); nv50_fbcon_imageblit()
121 OUT_RING(chan, image->height); nv50_fbcon_imageblit()
122 BEGIN_NV04(chan, NvSub2D, 0x0850, 4); nv50_fbcon_imageblit()
123 OUT_RING(chan, 0); nv50_fbcon_imageblit()
124 OUT_RING(chan, image->dx); nv50_fbcon_imageblit()
125 OUT_RING(chan, 0); nv50_fbcon_imageblit()
126 OUT_RING(chan, image->dy); nv50_fbcon_imageblit()
132 ret = RING_SPACE(chan, push + 1); nv50_fbcon_imageblit()
138 BEGIN_NI04(chan, NvSub2D, 0x0860, push); nv50_fbcon_imageblit()
139 OUT_RINGp(chan, data, push); nv50_fbcon_imageblit()
143 FIRE_RING(chan); nv50_fbcon_imageblit()
154 struct nouveau_channel *chan = drm->channel; nv50_fbcon_accel_init() local
184 ret = nvif_object_init(chan->object, NULL, 0x502d, 0x502d, NULL, 0, nv50_fbcon_accel_init()
189 ret = RING_SPACE(chan, 59); nv50_fbcon_accel_init()
195 BEGIN_NV04(chan, NvSub2D, 0x0000, 1); nv50_fbcon_accel_init()
196 OUT_RING(chan, nfbdev->twod.handle); nv50_fbcon_accel_init()
197 BEGIN_NV04(chan, NvSub2D, 0x0184, 3); nv50_fbcon_accel_init()
198 OUT_RING(chan, chan->vram.handle); nv50_fbcon_accel_init()
199 OUT_RING(chan, chan->vram.handle); nv50_fbcon_accel_init()
200 OUT_RING(chan, chan->vram.handle); nv50_fbcon_accel_init()
201 BEGIN_NV04(chan, NvSub2D, 0x0290, 1); nv50_fbcon_accel_init()
202 OUT_RING(chan, 0); nv50_fbcon_accel_init()
203 BEGIN_NV04(chan, NvSub2D, 0x0888, 1); nv50_fbcon_accel_init()
204 OUT_RING(chan, 1); nv50_fbcon_accel_init()
205 BEGIN_NV04(chan, NvSub2D, 0x02ac, 1); nv50_fbcon_accel_init()
206 OUT_RING(chan, 3); nv50_fbcon_accel_init()
207 BEGIN_NV04(chan, NvSub2D, 0x02a0, 1); nv50_fbcon_accel_init()
208 OUT_RING(chan, 0x55); nv50_fbcon_accel_init()
209 BEGIN_NV04(chan, NvSub2D, 0x08c0, 4); nv50_fbcon_accel_init()
210 OUT_RING(chan, 0); nv50_fbcon_accel_init()
211 OUT_RING(chan, 1); nv50_fbcon_accel_init()
212 OUT_RING(chan, 0); nv50_fbcon_accel_init()
213 OUT_RING(chan, 1); nv50_fbcon_accel_init()
214 BEGIN_NV04(chan, NvSub2D, 0x0580, 2); nv50_fbcon_accel_init()
215 OUT_RING(chan, 4); nv50_fbcon_accel_init()
216 OUT_RING(chan, format); nv50_fbcon_accel_init()
217 BEGIN_NV04(chan, NvSub2D, 0x02e8, 2); nv50_fbcon_accel_init()
218 OUT_RING(chan, 2); nv50_fbcon_accel_init()
219 OUT_RING(chan, 1); nv50_fbcon_accel_init()
220 BEGIN_NV04(chan, NvSub2D, 0x0804, 1); nv50_fbcon_accel_init()
221 OUT_RING(chan, format); nv50_fbcon_accel_init()
222 BEGIN_NV04(chan, NvSub2D, 0x0800, 1); nv50_fbcon_accel_init()
223 OUT_RING(chan, 1); nv50_fbcon_accel_init()
224 BEGIN_NV04(chan, NvSub2D, 0x0808, 3); nv50_fbcon_accel_init()
225 OUT_RING(chan, 0); nv50_fbcon_accel_init()
226 OUT_RING(chan, 0); nv50_fbcon_accel_init()
227 OUT_RING(chan, 1); nv50_fbcon_accel_init()
228 BEGIN_NV04(chan, NvSub2D, 0x081c, 1); nv50_fbcon_accel_init()
229 OUT_RING(chan, 1); nv50_fbcon_accel_init()
230 BEGIN_NV04(chan, NvSub2D, 0x0840, 4); nv50_fbcon_accel_init()
231 OUT_RING(chan, 0); nv50_fbcon_accel_init()
232 OUT_RING(chan, 1); nv50_fbcon_accel_init()
233 OUT_RING(chan, 0); nv50_fbcon_accel_init()
234 OUT_RING(chan, 1); nv50_fbcon_accel_init()
235 BEGIN_NV04(chan, NvSub2D, 0x0200, 2); nv50_fbcon_accel_init()
236 OUT_RING(chan, format); nv50_fbcon_accel_init()
237 OUT_RING(chan, 1); nv50_fbcon_accel_init()
238 BEGIN_NV04(chan, NvSub2D, 0x0214, 5); nv50_fbcon_accel_init()
239 OUT_RING(chan, info->fix.line_length); nv50_fbcon_accel_init()
240 OUT_RING(chan, info->var.xres_virtual); nv50_fbcon_accel_init()
241 OUT_RING(chan, info->var.yres_virtual); nv50_fbcon_accel_init()
242 OUT_RING(chan, upper_32_bits(fb->vma.offset)); nv50_fbcon_accel_init()
243 OUT_RING(chan, lower_32_bits(fb->vma.offset)); nv50_fbcon_accel_init()
244 BEGIN_NV04(chan, NvSub2D, 0x0230, 2); nv50_fbcon_accel_init()
245 OUT_RING(chan, format); nv50_fbcon_accel_init()
246 OUT_RING(chan, 1); nv50_fbcon_accel_init()
247 BEGIN_NV04(chan, NvSub2D, 0x0244, 5); nv50_fbcon_accel_init()
248 OUT_RING(chan, info->fix.line_length); nv50_fbcon_accel_init()
249 OUT_RING(chan, info->var.xres_virtual); nv50_fbcon_accel_init()
250 OUT_RING(chan, info->var.yres_virtual); nv50_fbcon_accel_init()
251 OUT_RING(chan, upper_32_bits(fb->vma.offset)); nv50_fbcon_accel_init()
252 OUT_RING(chan, lower_32_bits(fb->vma.offset)); nv50_fbcon_accel_init()
H A Dnvc0_fbcon.c34 struct nouveau_channel *chan = drm->channel; nvc0_fbcon_fillrect() local
37 ret = RING_SPACE(chan, rect->rop == ROP_COPY ? 7 : 11); nvc0_fbcon_fillrect()
42 BEGIN_NVC0(chan, NvSub2D, 0x02ac, 1); nvc0_fbcon_fillrect()
43 OUT_RING (chan, 1); nvc0_fbcon_fillrect()
45 BEGIN_NVC0(chan, NvSub2D, 0x0588, 1); nvc0_fbcon_fillrect()
48 OUT_RING (chan, ((uint32_t *)info->pseudo_palette)[rect->color]); nvc0_fbcon_fillrect()
50 OUT_RING (chan, rect->color); nvc0_fbcon_fillrect()
51 BEGIN_NVC0(chan, NvSub2D, 0x0600, 4); nvc0_fbcon_fillrect()
52 OUT_RING (chan, rect->dx); nvc0_fbcon_fillrect()
53 OUT_RING (chan, rect->dy); nvc0_fbcon_fillrect()
54 OUT_RING (chan, rect->dx + rect->width); nvc0_fbcon_fillrect()
55 OUT_RING (chan, rect->dy + rect->height); nvc0_fbcon_fillrect()
57 BEGIN_NVC0(chan, NvSub2D, 0x02ac, 1); nvc0_fbcon_fillrect()
58 OUT_RING (chan, 3); nvc0_fbcon_fillrect()
60 FIRE_RING(chan); nvc0_fbcon_fillrect()
69 struct nouveau_channel *chan = drm->channel; nvc0_fbcon_copyarea() local
72 ret = RING_SPACE(chan, 12); nvc0_fbcon_copyarea()
76 BEGIN_NVC0(chan, NvSub2D, 0x0110, 1); nvc0_fbcon_copyarea()
77 OUT_RING (chan, 0); nvc0_fbcon_copyarea()
78 BEGIN_NVC0(chan, NvSub2D, 0x08b0, 4); nvc0_fbcon_copyarea()
79 OUT_RING (chan, region->dx); nvc0_fbcon_copyarea()
80 OUT_RING (chan, region->dy); nvc0_fbcon_copyarea()
81 OUT_RING (chan, region->width); nvc0_fbcon_copyarea()
82 OUT_RING (chan, region->height); nvc0_fbcon_copyarea()
83 BEGIN_NVC0(chan, NvSub2D, 0x08d0, 4); nvc0_fbcon_copyarea()
84 OUT_RING (chan, 0); nvc0_fbcon_copyarea()
85 OUT_RING (chan, region->sx); nvc0_fbcon_copyarea()
86 OUT_RING (chan, 0); nvc0_fbcon_copyarea()
87 OUT_RING (chan, region->sy); nvc0_fbcon_copyarea()
88 FIRE_RING(chan); nvc0_fbcon_copyarea()
97 struct nouveau_channel *chan = drm->channel; nvc0_fbcon_imageblit() local
106 ret = RING_SPACE(chan, 11); nvc0_fbcon_imageblit()
110 BEGIN_NVC0(chan, NvSub2D, 0x0814, 2); nvc0_fbcon_imageblit()
113 OUT_RING (chan, palette[image->bg_color] | mask); nvc0_fbcon_imageblit()
114 OUT_RING (chan, palette[image->fg_color] | mask); nvc0_fbcon_imageblit()
116 OUT_RING (chan, image->bg_color); nvc0_fbcon_imageblit()
117 OUT_RING (chan, image->fg_color); nvc0_fbcon_imageblit()
119 BEGIN_NVC0(chan, NvSub2D, 0x0838, 2); nvc0_fbcon_imageblit()
120 OUT_RING (chan, image->width); nvc0_fbcon_imageblit()
121 OUT_RING (chan, image->height); nvc0_fbcon_imageblit()
122 BEGIN_NVC0(chan, NvSub2D, 0x0850, 4); nvc0_fbcon_imageblit()
123 OUT_RING (chan, 0); nvc0_fbcon_imageblit()
124 OUT_RING (chan, image->dx); nvc0_fbcon_imageblit()
125 OUT_RING (chan, 0); nvc0_fbcon_imageblit()
126 OUT_RING (chan, image->dy); nvc0_fbcon_imageblit()
132 ret = RING_SPACE(chan, push + 1); nvc0_fbcon_imageblit()
138 BEGIN_NIC0(chan, NvSub2D, 0x0860, push); nvc0_fbcon_imageblit()
139 OUT_RINGp(chan, data, push); nvc0_fbcon_imageblit()
143 FIRE_RING(chan); nvc0_fbcon_imageblit()
154 struct nouveau_channel *chan = drm->channel; nvc0_fbcon_accel_init() local
157 ret = nvif_object_init(chan->object, NULL, 0x902d, 0x902d, NULL, 0, nvc0_fbcon_accel_init()
189 ret = RING_SPACE(chan, 60); nvc0_fbcon_accel_init()
196 BEGIN_NVC0(chan, NvSub2D, 0x0000, 1); nvc0_fbcon_accel_init()
197 OUT_RING (chan, nfbdev->twod.handle); nvc0_fbcon_accel_init()
198 BEGIN_NVC0(chan, NvSub2D, 0x0290, 1); nvc0_fbcon_accel_init()
199 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
200 BEGIN_NVC0(chan, NvSub2D, 0x0888, 1); nvc0_fbcon_accel_init()
201 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
202 BEGIN_NVC0(chan, NvSub2D, 0x02ac, 1); nvc0_fbcon_accel_init()
203 OUT_RING (chan, 3); nvc0_fbcon_accel_init()
204 BEGIN_NVC0(chan, NvSub2D, 0x02a0, 1); nvc0_fbcon_accel_init()
205 OUT_RING (chan, 0x55); nvc0_fbcon_accel_init()
206 BEGIN_NVC0(chan, NvSub2D, 0x08c0, 4); nvc0_fbcon_accel_init()
207 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
208 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
209 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
210 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
211 BEGIN_NVC0(chan, NvSub2D, 0x0580, 2); nvc0_fbcon_accel_init()
212 OUT_RING (chan, 4); nvc0_fbcon_accel_init()
213 OUT_RING (chan, format); nvc0_fbcon_accel_init()
214 BEGIN_NVC0(chan, NvSub2D, 0x02e8, 2); nvc0_fbcon_accel_init()
215 OUT_RING (chan, 2); nvc0_fbcon_accel_init()
216 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
218 BEGIN_NVC0(chan, NvSub2D, 0x0804, 1); nvc0_fbcon_accel_init()
219 OUT_RING (chan, format); nvc0_fbcon_accel_init()
220 BEGIN_NVC0(chan, NvSub2D, 0x0800, 1); nvc0_fbcon_accel_init()
221 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
222 BEGIN_NVC0(chan, NvSub2D, 0x0808, 3); nvc0_fbcon_accel_init()
223 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
224 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
225 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
226 BEGIN_NVC0(chan, NvSub2D, 0x081c, 1); nvc0_fbcon_accel_init()
227 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
228 BEGIN_NVC0(chan, NvSub2D, 0x0840, 4); nvc0_fbcon_accel_init()
229 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
230 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
231 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
232 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
233 BEGIN_NVC0(chan, NvSub2D, 0x0200, 10); nvc0_fbcon_accel_init()
234 OUT_RING (chan, format); nvc0_fbcon_accel_init()
235 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
236 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
237 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
238 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
239 OUT_RING (chan, info->fix.line_length); nvc0_fbcon_accel_init()
240 OUT_RING (chan, info->var.xres_virtual); nvc0_fbcon_accel_init()
241 OUT_RING (chan, info->var.yres_virtual); nvc0_fbcon_accel_init()
242 OUT_RING (chan, upper_32_bits(fb->vma.offset)); nvc0_fbcon_accel_init()
243 OUT_RING (chan, lower_32_bits(fb->vma.offset)); nvc0_fbcon_accel_init()
244 BEGIN_NVC0(chan, NvSub2D, 0x0230, 10); nvc0_fbcon_accel_init()
245 OUT_RING (chan, format); nvc0_fbcon_accel_init()
246 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
247 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
248 OUT_RING (chan, 1); nvc0_fbcon_accel_init()
249 OUT_RING (chan, 0); nvc0_fbcon_accel_init()
250 OUT_RING (chan, info->fix.line_length); nvc0_fbcon_accel_init()
251 OUT_RING (chan, info->var.xres_virtual); nvc0_fbcon_accel_init()
252 OUT_RING (chan, info->var.yres_virtual); nvc0_fbcon_accel_init()
253 OUT_RING (chan, upper_32_bits(fb->vma.offset)); nvc0_fbcon_accel_init()
254 OUT_RING (chan, lower_32_bits(fb->vma.offset)); nvc0_fbcon_accel_init()
255 FIRE_RING (chan); nvc0_fbcon_accel_init()
H A Dnv04_fbcon.c34 struct nouveau_channel *chan = drm->channel; nv04_fbcon_copyarea() local
37 ret = RING_SPACE(chan, 4); nv04_fbcon_copyarea()
41 BEGIN_NV04(chan, NvSubImageBlit, 0x0300, 3); nv04_fbcon_copyarea()
42 OUT_RING(chan, (region->sy << 16) | region->sx); nv04_fbcon_copyarea()
43 OUT_RING(chan, (region->dy << 16) | region->dx); nv04_fbcon_copyarea()
44 OUT_RING(chan, (region->height << 16) | region->width); nv04_fbcon_copyarea()
45 FIRE_RING(chan); nv04_fbcon_copyarea()
54 struct nouveau_channel *chan = drm->channel; nv04_fbcon_fillrect() local
57 ret = RING_SPACE(chan, 7); nv04_fbcon_fillrect()
61 BEGIN_NV04(chan, NvSubGdiRect, 0x02fc, 1); nv04_fbcon_fillrect()
62 OUT_RING(chan, (rect->rop != ROP_COPY) ? 1 : 3); nv04_fbcon_fillrect()
63 BEGIN_NV04(chan, NvSubGdiRect, 0x03fc, 1); nv04_fbcon_fillrect()
66 OUT_RING(chan, ((uint32_t *)info->pseudo_palette)[rect->color]); nv04_fbcon_fillrect()
68 OUT_RING(chan, rect->color); nv04_fbcon_fillrect()
69 BEGIN_NV04(chan, NvSubGdiRect, 0x0400, 2); nv04_fbcon_fillrect()
70 OUT_RING(chan, (rect->dx << 16) | rect->dy); nv04_fbcon_fillrect()
71 OUT_RING(chan, (rect->width << 16) | rect->height); nv04_fbcon_fillrect()
72 FIRE_RING(chan); nv04_fbcon_fillrect()
81 struct nouveau_channel *chan = drm->channel; nv04_fbcon_imageblit() local
91 ret = RING_SPACE(chan, 8); nv04_fbcon_imageblit()
104 BEGIN_NV04(chan, NvSubGdiRect, 0x0be4, 7); nv04_fbcon_imageblit()
105 OUT_RING(chan, (image->dy << 16) | (image->dx & 0xffff)); nv04_fbcon_imageblit()
106 OUT_RING(chan, ((image->dy + image->height) << 16) | nv04_fbcon_imageblit()
108 OUT_RING(chan, bg); nv04_fbcon_imageblit()
109 OUT_RING(chan, fg); nv04_fbcon_imageblit()
110 OUT_RING(chan, (image->height << 16) | image->width); nv04_fbcon_imageblit()
111 OUT_RING(chan, (image->height << 16) | image->width); nv04_fbcon_imageblit()
112 OUT_RING(chan, (image->dy << 16) | (image->dx & 0xffff)); nv04_fbcon_imageblit()
118 ret = RING_SPACE(chan, iter_len + 1); nv04_fbcon_imageblit()
122 BEGIN_NV04(chan, NvSubGdiRect, 0x0c00, iter_len); nv04_fbcon_imageblit()
123 OUT_RINGp(chan, data, iter_len); nv04_fbcon_imageblit()
128 FIRE_RING(chan); nv04_fbcon_imageblit()
138 struct nouveau_channel *chan = drm->channel; nv04_fbcon_accel_init() local
171 ret = nvif_object_init(chan->object, NULL, 0x0062, nv04_fbcon_accel_init()
177 ret = nvif_object_init(chan->object, NULL, 0x0019, 0x0019, NULL, 0, nv04_fbcon_accel_init()
182 ret = nvif_object_init(chan->object, NULL, 0x0043, 0x0043, NULL, 0, nv04_fbcon_accel_init()
187 ret = nvif_object_init(chan->object, NULL, 0x0044, 0x0044, NULL, 0, nv04_fbcon_accel_init()
192 ret = nvif_object_init(chan->object, NULL, 0x004a, 0x004a, NULL, 0, nv04_fbcon_accel_init()
197 ret = nvif_object_init(chan->object, NULL, 0x005f, nv04_fbcon_accel_init()
203 if (RING_SPACE(chan, 49 + (device->info.chipset >= 0x11 ? 4 : 0))) { nv04_fbcon_accel_init()
208 BEGIN_NV04(chan, NvSubCtxSurf2D, 0x0000, 1); nv04_fbcon_accel_init()
209 OUT_RING(chan, nfbdev->surf2d.handle); nv04_fbcon_accel_init()
210 BEGIN_NV04(chan, NvSubCtxSurf2D, 0x0184, 2); nv04_fbcon_accel_init()
211 OUT_RING(chan, chan->vram.handle); nv04_fbcon_accel_init()
212 OUT_RING(chan, chan->vram.handle); nv04_fbcon_accel_init()
213 BEGIN_NV04(chan, NvSubCtxSurf2D, 0x0300, 4); nv04_fbcon_accel_init()
214 OUT_RING(chan, surface_fmt); nv04_fbcon_accel_init()
215 OUT_RING(chan, info->fix.line_length | (info->fix.line_length << 16)); nv04_fbcon_accel_init()
216 OUT_RING(chan, info->fix.smem_start - dev->mode_config.fb_base); nv04_fbcon_accel_init()
217 OUT_RING(chan, info->fix.smem_start - dev->mode_config.fb_base); nv04_fbcon_accel_init()
219 BEGIN_NV04(chan, NvSubCtxSurf2D, 0x0000, 1); nv04_fbcon_accel_init()
220 OUT_RING(chan, nfbdev->rop.handle); nv04_fbcon_accel_init()
221 BEGIN_NV04(chan, NvSubCtxSurf2D, 0x0300, 1); nv04_fbcon_accel_init()
222 OUT_RING(chan, 0x55); nv04_fbcon_accel_init()
224 BEGIN_NV04(chan, NvSubCtxSurf2D, 0x0000, 1); nv04_fbcon_accel_init()
225 OUT_RING(chan, nfbdev->patt.handle); nv04_fbcon_accel_init()
226 BEGIN_NV04(chan, NvSubCtxSurf2D, 0x0300, 8); nv04_fbcon_accel_init()
227 OUT_RING(chan, pattern_fmt); nv04_fbcon_accel_init()
229 OUT_RING(chan, 2); nv04_fbcon_accel_init()
231 OUT_RING(chan, 1); nv04_fbcon_accel_init()
233 OUT_RING(chan, 0); nv04_fbcon_accel_init()
234 OUT_RING(chan, 1); nv04_fbcon_accel_init()
235 OUT_RING(chan, ~0); nv04_fbcon_accel_init()
236 OUT_RING(chan, ~0); nv04_fbcon_accel_init()
237 OUT_RING(chan, ~0); nv04_fbcon_accel_init()
238 OUT_RING(chan, ~0); nv04_fbcon_accel_init()
240 BEGIN_NV04(chan, NvSubCtxSurf2D, 0x0000, 1); nv04_fbcon_accel_init()
241 OUT_RING(chan, nfbdev->clip.handle); nv04_fbcon_accel_init()
242 BEGIN_NV04(chan, NvSubCtxSurf2D, 0x0300, 2); nv04_fbcon_accel_init()
243 OUT_RING(chan, 0); nv04_fbcon_accel_init()
244 OUT_RING(chan, (info->var.yres_virtual << 16) | info->var.xres_virtual); nv04_fbcon_accel_init()
246 BEGIN_NV04(chan, NvSubImageBlit, 0x0000, 1); nv04_fbcon_accel_init()
247 OUT_RING(chan, nfbdev->blit.handle); nv04_fbcon_accel_init()
248 BEGIN_NV04(chan, NvSubImageBlit, 0x019c, 1); nv04_fbcon_accel_init()
249 OUT_RING(chan, nfbdev->surf2d.handle); nv04_fbcon_accel_init()
250 BEGIN_NV04(chan, NvSubImageBlit, 0x02fc, 1); nv04_fbcon_accel_init()
251 OUT_RING(chan, 3); nv04_fbcon_accel_init()
253 BEGIN_NV04(chan, NvSubImageBlit, 0x0120, 3); nv04_fbcon_accel_init()
254 OUT_RING(chan, 0); nv04_fbcon_accel_init()
255 OUT_RING(chan, 1); nv04_fbcon_accel_init()
256 OUT_RING(chan, 2); nv04_fbcon_accel_init()
259 BEGIN_NV04(chan, NvSubGdiRect, 0x0000, 1); nv04_fbcon_accel_init()
260 OUT_RING(chan, nfbdev->gdi.handle); nv04_fbcon_accel_init()
261 BEGIN_NV04(chan, NvSubGdiRect, 0x0198, 1); nv04_fbcon_accel_init()
262 OUT_RING(chan, nfbdev->surf2d.handle); nv04_fbcon_accel_init()
263 BEGIN_NV04(chan, NvSubGdiRect, 0x0188, 2); nv04_fbcon_accel_init()
264 OUT_RING(chan, nfbdev->patt.handle); nv04_fbcon_accel_init()
265 OUT_RING(chan, nfbdev->rop.handle); nv04_fbcon_accel_init()
266 BEGIN_NV04(chan, NvSubGdiRect, 0x0304, 1); nv04_fbcon_accel_init()
267 OUT_RING(chan, 1); nv04_fbcon_accel_init()
268 BEGIN_NV04(chan, NvSubGdiRect, 0x0300, 1); nv04_fbcon_accel_init()
269 OUT_RING(chan, rect_fmt); nv04_fbcon_accel_init()
270 BEGIN_NV04(chan, NvSubGdiRect, 0x02fc, 1); nv04_fbcon_accel_init()
271 OUT_RING(chan, 3); nv04_fbcon_accel_init()
273 FIRE_RING(chan); nv04_fbcon_accel_init()
H A Dnouveau_dma.c31 OUT_RINGp(struct nouveau_channel *chan, const void *data, unsigned nr_dwords) OUT_RINGp() argument
34 u32 *mem = ttm_kmap_obj_virtual(&chan->push.buffer->kmap, &is_iomem); OUT_RINGp()
35 mem = &mem[chan->dma.cur]; OUT_RINGp()
40 chan->dma.cur += nr_dwords; OUT_RINGp()
51 READ_GET(struct nouveau_channel *chan, uint64_t *prev_get, int *timeout) READ_GET() argument
55 val = nvif_rd32(chan, chan->user_get); READ_GET()
56 if (chan->user_get_hi) READ_GET()
57 val |= (uint64_t)nvif_rd32(chan, chan->user_get_hi) << 32; READ_GET()
74 if (val < chan->push.vma.offset || READ_GET()
75 val > chan->push.vma.offset + (chan->dma.max << 2)) READ_GET()
78 return (val - chan->push.vma.offset) >> 2; READ_GET()
82 nv50_dma_push(struct nouveau_channel *chan, struct nouveau_bo *bo, nv50_dma_push() argument
85 struct nouveau_cli *cli = (void *)nvif_client(&chan->device->base); nv50_dma_push()
86 struct nouveau_bo *pb = chan->push.buffer; nv50_dma_push()
88 int ip = (chan->dma.ib_put * 2) + chan->dma.ib_base; nv50_dma_push()
95 BUG_ON(chan->dma.ib_free < 1); nv50_dma_push()
100 chan->dma.ib_put = (chan->dma.ib_put + 1) & chan->dma.ib_max; nv50_dma_push()
106 nvif_wr32(chan, 0x8c, chan->dma.ib_put); nv50_dma_push()
107 chan->dma.ib_free--; nv50_dma_push()
111 nv50_dma_push_wait(struct nouveau_channel *chan, int count) nv50_dma_push_wait() argument
115 while (chan->dma.ib_free < count) { nv50_dma_push_wait()
116 uint32_t get = nvif_rd32(chan, 0x88); nv50_dma_push_wait()
128 chan->dma.ib_free = get - chan->dma.ib_put; nv50_dma_push_wait()
129 if (chan->dma.ib_free <= 0) nv50_dma_push_wait()
130 chan->dma.ib_free += chan->dma.ib_max; nv50_dma_push_wait()
137 nv50_dma_wait(struct nouveau_channel *chan, int slots, int count) nv50_dma_wait() argument
142 ret = nv50_dma_push_wait(chan, slots + 1); nv50_dma_wait()
146 while (chan->dma.free < count) { nv50_dma_wait()
147 int get = READ_GET(chan, &prev_get, &cnt); nv50_dma_wait()
155 if (get <= chan->dma.cur) { nv50_dma_wait()
156 chan->dma.free = chan->dma.max - chan->dma.cur; nv50_dma_wait()
157 if (chan->dma.free >= count) nv50_dma_wait()
160 FIRE_RING(chan); nv50_dma_wait()
162 get = READ_GET(chan, &prev_get, &cnt); nv50_dma_wait()
169 chan->dma.cur = 0; nv50_dma_wait()
170 chan->dma.put = 0; nv50_dma_wait()
173 chan->dma.free = get - chan->dma.cur - 1; nv50_dma_wait()
180 nouveau_dma_wait(struct nouveau_channel *chan, int slots, int size) nouveau_dma_wait() argument
185 if (chan->dma.ib_max) nouveau_dma_wait()
186 return nv50_dma_wait(chan, slots, size); nouveau_dma_wait()
188 while (chan->dma.free < size) { nouveau_dma_wait()
189 get = READ_GET(chan, &prev_get, &cnt); nouveau_dma_wait()
205 if (get <= chan->dma.cur) { nouveau_dma_wait()
219 chan->dma.free = chan->dma.max - chan->dma.cur; nouveau_dma_wait()
220 if (chan->dma.free >= size) nouveau_dma_wait()
227 OUT_RING(chan, chan->push.vma.offset | 0x20000000); nouveau_dma_wait()
235 get = READ_GET(chan, &prev_get, &cnt); nouveau_dma_wait()
246 chan->dma.cur = nouveau_dma_wait()
247 chan->dma.put = NOUVEAU_DMA_SKIPS; nouveau_dma_wait()
256 chan->dma.free = get - chan->dma.cur - 1; nouveau_dma_wait()
H A Dnvc0_fence.c32 nvc0_fence_emit32(struct nouveau_channel *chan, u64 virtual, u32 sequence) nvc0_fence_emit32() argument
34 int ret = RING_SPACE(chan, 6); nvc0_fence_emit32()
36 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 5); nvc0_fence_emit32()
37 OUT_RING (chan, upper_32_bits(virtual)); nvc0_fence_emit32()
38 OUT_RING (chan, lower_32_bits(virtual)); nvc0_fence_emit32()
39 OUT_RING (chan, sequence); nvc0_fence_emit32()
40 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG); nvc0_fence_emit32()
41 OUT_RING (chan, 0x00000000); nvc0_fence_emit32()
42 FIRE_RING (chan); nvc0_fence_emit32()
48 nvc0_fence_sync32(struct nouveau_channel *chan, u64 virtual, u32 sequence) nvc0_fence_sync32() argument
50 int ret = RING_SPACE(chan, 5); nvc0_fence_sync32()
52 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); nvc0_fence_sync32()
53 OUT_RING (chan, upper_32_bits(virtual)); nvc0_fence_sync32()
54 OUT_RING (chan, lower_32_bits(virtual)); nvc0_fence_sync32()
55 OUT_RING (chan, sequence); nvc0_fence_sync32()
56 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_GEQUAL | nvc0_fence_sync32()
58 FIRE_RING (chan); nvc0_fence_sync32()
64 nvc0_fence_context_new(struct nouveau_channel *chan) nvc0_fence_context_new() argument
66 int ret = nv84_fence_context_new(chan); nvc0_fence_context_new()
68 struct nv84_fence_chan *fctx = chan->fence; nvc0_fence_context_new()
H A Dnouveau_chan.c43 nouveau_channel_idle(struct nouveau_channel *chan) nouveau_channel_idle() argument
45 struct nouveau_cli *cli = (void *)nvif_client(chan->object); nouveau_channel_idle()
49 ret = nouveau_fence_new(chan, false, &fence); nouveau_channel_idle()
57 chan->object->handle, nvxx_client(&cli->base)->name); nouveau_channel_idle()
64 struct nouveau_channel *chan = *pchan; nouveau_channel_del() local
65 if (chan) { nouveau_channel_del()
66 if (chan->fence) { nouveau_channel_del()
67 nouveau_channel_idle(chan); nouveau_channel_del()
68 nouveau_fence(chan->drm)->context_del(chan); nouveau_channel_del()
70 nvif_object_fini(&chan->nvsw); nouveau_channel_del()
71 nvif_object_fini(&chan->gart); nouveau_channel_del()
72 nvif_object_fini(&chan->vram); nouveau_channel_del()
73 nvif_object_ref(NULL, &chan->object); nouveau_channel_del()
74 nvif_object_fini(&chan->push.ctxdma); nouveau_channel_del()
75 nouveau_bo_vma_del(chan->push.buffer, &chan->push.vma); nouveau_channel_del()
76 nouveau_bo_unmap(chan->push.buffer); nouveau_channel_del()
77 if (chan->push.buffer && chan->push.buffer->pin_refcnt) nouveau_channel_del()
78 nouveau_bo_unpin(chan->push.buffer); nouveau_channel_del()
79 nouveau_bo_ref(NULL, &chan->push.buffer); nouveau_channel_del()
80 nvif_device_ref(NULL, &chan->device); nouveau_channel_del()
81 kfree(chan); nouveau_channel_del()
93 struct nouveau_channel *chan; nouveau_channel_prep() local
97 chan = *pchan = kzalloc(sizeof(*chan), GFP_KERNEL); nouveau_channel_prep()
98 if (!chan) nouveau_channel_prep()
101 nvif_device_ref(device, &chan->device); nouveau_channel_prep()
102 chan->drm = drm; nouveau_channel_prep()
110 &chan->push.buffer); nouveau_channel_prep()
112 ret = nouveau_bo_pin(chan->push.buffer, target, false); nouveau_channel_prep()
114 ret = nouveau_bo_map(chan->push.buffer); nouveau_channel_prep()
126 chan->push.vma.offset = chan->push.buffer->bo.offset; nouveau_channel_prep()
129 ret = nouveau_bo_vma_add(chan->push.buffer, cli->vm, nouveau_channel_prep()
130 &chan->push.vma); nouveau_channel_prep()
141 if (chan->push.buffer->bo.mem.mem_type == TTM_PL_VRAM) { nouveau_channel_prep()
158 if (chan->drm->agp.stat == ENABLED) { nouveau_channel_prep()
161 args.start = chan->drm->agp.base; nouveau_channel_prep()
162 args.limit = chan->drm->agp.base + nouveau_channel_prep()
163 chan->drm->agp.size - 1; nouveau_channel_prep()
174 &args, sizeof(args), &chan->push.ctxdma); nouveau_channel_prep()
198 struct nouveau_channel *chan; nouveau_channel_ind() local
203 ret = nouveau_channel_prep(drm, device, handle, 0x12000, &chan); nouveau_channel_ind()
204 *pchan = chan; nouveau_channel_ind()
213 args.kepler.pushbuf = chan->push.ctxdma.handle; nouveau_channel_ind()
215 args.kepler.ioffset = 0x10000 + chan->push.vma.offset; nouveau_channel_ind()
219 args.nv50.pushbuf = chan->push.ctxdma.handle; nouveau_channel_ind()
221 args.nv50.ioffset = 0x10000 + chan->push.vma.offset; nouveau_channel_ind()
226 &args, size, &chan->object); nouveau_channel_ind()
228 retn = chan->object->data; nouveau_channel_ind()
229 if (chan->object->oclass >= KEPLER_CHANNEL_GPFIFO_A) nouveau_channel_ind()
230 chan->chid = retn->kepler.chid; nouveau_channel_ind()
232 chan->chid = retn->nv50.chid; nouveau_channel_ind()
252 struct nouveau_channel *chan; nouveau_channel_dma() local
256 ret = nouveau_channel_prep(drm, device, handle, 0x10000, &chan); nouveau_channel_dma()
257 *pchan = chan; nouveau_channel_dma()
263 args.pushbuf = chan->push.ctxdma.handle; nouveau_channel_dma()
264 args.offset = chan->push.vma.offset; nouveau_channel_dma()
268 &args, sizeof(args), &chan->object); nouveau_channel_dma()
270 retn = chan->object->data; nouveau_channel_dma()
271 chan->chid = retn->chid; nouveau_channel_dma()
281 nouveau_channel_init(struct nouveau_channel *chan, u32 vram, u32 gart) nouveau_channel_init() argument
283 struct nvif_device *device = chan->device; nouveau_channel_init()
290 nvif_object_map(chan->object); nouveau_channel_init()
306 ret = nvif_object_init(chan->object, NULL, vram, nouveau_channel_init()
308 sizeof(args), &chan->vram); nouveau_channel_init()
318 if (chan->drm->agp.stat == ENABLED) { nouveau_channel_init()
321 args.start = chan->drm->agp.base; nouveau_channel_init()
322 args.limit = chan->drm->agp.base + nouveau_channel_init()
323 chan->drm->agp.size - 1; nouveau_channel_init()
331 ret = nvif_object_init(chan->object, NULL, gart, nouveau_channel_init()
333 sizeof(args), &chan->gart); nouveau_channel_init()
339 switch (chan->object->oclass & 0x00ff) { nouveau_channel_init()
342 chan->user_put = 0x40; nouveau_channel_init()
343 chan->user_get = 0x44; nouveau_channel_init()
344 chan->dma.max = (0x10000 / 4) - 2; nouveau_channel_init()
347 chan->user_put = 0x40; nouveau_channel_init()
348 chan->user_get = 0x44; nouveau_channel_init()
349 chan->user_get_hi = 0x60; nouveau_channel_init()
350 chan->dma.ib_base = 0x10000 / 4; nouveau_channel_init()
351 chan->dma.ib_max = (0x02000 / 8) - 1; nouveau_channel_init()
352 chan->dma.ib_put = 0; nouveau_channel_init()
353 chan->dma.ib_free = chan->dma.ib_max - chan->dma.ib_put; nouveau_channel_init()
354 chan->dma.max = chan->dma.ib_base; nouveau_channel_init()
358 chan->dma.put = 0; nouveau_channel_init()
359 chan->dma.cur = chan->dma.put; nouveau_channel_init()
360 chan->dma.free = chan->dma.max - chan->dma.cur; nouveau_channel_init()
362 ret = RING_SPACE(chan, NOUVEAU_DMA_SKIPS); nouveau_channel_init()
367 OUT_RING(chan, 0x00000000); nouveau_channel_init()
371 ret = nvif_object_init(chan->object, NULL, 0x006e, 0x006e, nouveau_channel_init()
372 NULL, 0, &chan->nvsw); nouveau_channel_init()
376 swch = (void *)nvxx_object(&chan->nvsw)->parent; nouveau_channel_init()
378 swch->flip_data = chan; nouveau_channel_init()
380 ret = RING_SPACE(chan, 2); nouveau_channel_init()
384 BEGIN_NV04(chan, NvSubSw, 0x0000, 1); nouveau_channel_init()
385 OUT_RING (chan, chan->nvsw.handle); nouveau_channel_init()
386 FIRE_RING (chan); nouveau_channel_init()
390 return nouveau_fence(chan->drm)->context_new(chan); nouveau_channel_init()
H A Dnv84_fence.c32 nv84_fence_crtc(struct nouveau_channel *chan, int crtc) nv84_fence_crtc() argument
34 struct nv84_fence_chan *fctx = chan->fence; nv84_fence_crtc()
39 nv84_fence_emit32(struct nouveau_channel *chan, u64 virtual, u32 sequence) nv84_fence_emit32() argument
41 int ret = RING_SPACE(chan, 8); nv84_fence_emit32()
43 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1); nv84_fence_emit32()
44 OUT_RING (chan, chan->vram.handle); nv84_fence_emit32()
45 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 5); nv84_fence_emit32()
46 OUT_RING (chan, upper_32_bits(virtual)); nv84_fence_emit32()
47 OUT_RING (chan, lower_32_bits(virtual)); nv84_fence_emit32()
48 OUT_RING (chan, sequence); nv84_fence_emit32()
49 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG); nv84_fence_emit32()
50 OUT_RING (chan, 0x00000000); nv84_fence_emit32()
51 FIRE_RING (chan); nv84_fence_emit32()
57 nv84_fence_sync32(struct nouveau_channel *chan, u64 virtual, u32 sequence) nv84_fence_sync32() argument
59 int ret = RING_SPACE(chan, 7); nv84_fence_sync32()
61 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1); nv84_fence_sync32()
62 OUT_RING (chan, chan->vram.handle); nv84_fence_sync32()
63 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); nv84_fence_sync32()
64 OUT_RING (chan, upper_32_bits(virtual)); nv84_fence_sync32()
65 OUT_RING (chan, lower_32_bits(virtual)); nv84_fence_sync32()
66 OUT_RING (chan, sequence); nv84_fence_sync32()
67 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_GEQUAL); nv84_fence_sync32()
68 FIRE_RING (chan); nv84_fence_sync32()
76 struct nouveau_channel *chan = fence->channel; nv84_fence_emit() local
77 struct nv84_fence_chan *fctx = chan->fence; nv84_fence_emit()
78 u64 addr = chan->chid * 16; nv84_fence_emit()
85 return fctx->base.emit32(chan, addr, fence->base.seqno); nv84_fence_emit()
90 struct nouveau_channel *prev, struct nouveau_channel *chan) nv84_fence_sync()
92 struct nv84_fence_chan *fctx = chan->fence; nv84_fence_sync()
100 return fctx->base.sync32(chan, addr, fence->base.seqno); nv84_fence_sync()
104 nv84_fence_read(struct nouveau_channel *chan) nv84_fence_read() argument
106 struct nv84_fence_priv *priv = chan->drm->fence; nv84_fence_read()
107 return nouveau_bo_rd32(priv->bo, chan->chid * 16/4); nv84_fence_read()
111 nv84_fence_context_del(struct nouveau_channel *chan) nv84_fence_context_del() argument
113 struct drm_device *dev = chan->drm->dev; nv84_fence_context_del()
114 struct nv84_fence_priv *priv = chan->drm->fence; nv84_fence_context_del()
115 struct nv84_fence_chan *fctx = chan->fence; nv84_fence_context_del()
123 nouveau_bo_wr32(priv->bo, chan->chid * 16 / 4, fctx->base.sequence); nv84_fence_context_del()
127 chan->fence = NULL; nv84_fence_context_del()
132 nv84_fence_context_new(struct nouveau_channel *chan) nv84_fence_context_new() argument
134 struct nouveau_cli *cli = (void *)nvif_client(&chan->device->base); nv84_fence_context_new()
135 struct nv84_fence_priv *priv = chan->drm->fence; nv84_fence_context_new()
139 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); nv84_fence_context_new()
143 nouveau_fence_context_new(chan, &fctx->base); nv84_fence_context_new()
149 fctx->base.sequence = nv84_fence_read(chan); nv84_fence_context_new()
158 for (i = 0; !ret && i < chan->drm->dev->mode_config.num_crtc; i++) { nv84_fence_context_new()
159 struct nouveau_bo *bo = nv50_display_crtc_sema(chan->drm->dev, i); nv84_fence_context_new()
164 nv84_fence_context_del(chan); nv84_fence_context_new()
89 nv84_fence_sync(struct nouveau_fence *fence, struct nouveau_channel *prev, struct nouveau_channel *chan) nv84_fence_sync() argument
H A Dnouveau_abi16.c100 nouveau_abi16_ntfy_fini(struct nouveau_abi16_chan *chan, nouveau_abi16_ntfy_fini() argument
103 nvkm_mm_free(&chan->heap, &ntfy->node); nouveau_abi16_ntfy_fini()
110 struct nouveau_abi16_chan *chan) nouveau_abi16_chan_fini()
116 if (chan->chan && chan->ntfy) nouveau_abi16_chan_fini()
117 nouveau_channel_idle(chan->chan); nouveau_abi16_chan_fini()
120 list_for_each_entry_safe(ntfy, temp, &chan->notifiers, head) { nouveau_abi16_chan_fini()
121 nouveau_abi16_ntfy_fini(chan, ntfy); nouveau_abi16_chan_fini()
124 if (chan->ntfy) { nouveau_abi16_chan_fini()
125 nouveau_bo_vma_del(chan->ntfy, &chan->ntfy_vma); nouveau_abi16_chan_fini()
126 nouveau_bo_unpin(chan->ntfy); nouveau_abi16_chan_fini()
127 drm_gem_object_unreference_unlocked(&chan->ntfy->gem); nouveau_abi16_chan_fini()
130 if (chan->heap.block_size) nouveau_abi16_chan_fini()
131 nvkm_mm_fini(&chan->heap); nouveau_abi16_chan_fini()
134 if (chan->chan) { nouveau_abi16_chan_fini()
135 abi16->handles &= ~(1ULL << (chan->chan->object->handle & 0xffff)); nouveau_abi16_chan_fini()
136 nouveau_channel_del(&chan->chan); nouveau_abi16_chan_fini()
139 list_del(&chan->head); nouveau_abi16_chan_fini()
140 kfree(chan); nouveau_abi16_chan_fini()
147 struct nouveau_abi16_chan *chan, *temp; nouveau_abi16_fini() local
150 list_for_each_entry_safe(chan, temp, &abi16->channels, head) { nouveau_abi16_fini()
151 nouveau_abi16_chan_fini(abi16, chan); nouveau_abi16_fini()
241 struct nouveau_abi16_chan *chan; nouveau_abi16_ioctl_channel_alloc() local
274 chan = kzalloc(sizeof(*chan), GFP_KERNEL); nouveau_abi16_ioctl_channel_alloc()
275 if (!chan) nouveau_abi16_ioctl_channel_alloc()
278 INIT_LIST_HEAD(&chan->notifiers); nouveau_abi16_ioctl_channel_alloc()
279 list_add(&chan->head, &abi16->channels); nouveau_abi16_ioctl_channel_alloc()
286 init->tt_ctxdma_handle, &chan->chan); nouveau_abi16_ioctl_channel_alloc()
294 if (chan->chan->push.buffer->bo.mem.mem_type == TTM_PL_VRAM) nouveau_abi16_ioctl_channel_alloc()
302 init->subchan[1].handle = chan->chan->nvsw.handle; nouveau_abi16_ioctl_channel_alloc()
309 0, 0, &chan->ntfy); nouveau_abi16_ioctl_channel_alloc()
311 ret = nouveau_bo_pin(chan->ntfy, TTM_PL_FLAG_TT, false); nouveau_abi16_ioctl_channel_alloc()
316 ret = nouveau_bo_vma_add(chan->ntfy, cli->vm, nouveau_abi16_ioctl_channel_alloc()
317 &chan->ntfy_vma); nouveau_abi16_ioctl_channel_alloc()
322 ret = drm_gem_handle_create(file_priv, &chan->ntfy->gem, nouveau_abi16_ioctl_channel_alloc()
327 ret = nvkm_mm_init(&chan->heap, 0, PAGE_SIZE, 1); nouveau_abi16_ioctl_channel_alloc()
330 nouveau_abi16_chan_fini(abi16, chan); nouveau_abi16_ioctl_channel_alloc()
337 struct nouveau_abi16_chan *chan; nouveau_abi16_chan() local
339 list_for_each_entry(chan, &abi16->channels, head) { nouveau_abi16_chan()
340 if (chan->chan->object->handle == NOUVEAU_ABI16_CHAN(channel)) nouveau_abi16_chan()
341 return chan; nouveau_abi16_chan()
352 struct nouveau_abi16_chan *chan; nouveau_abi16_ioctl_channel_free() local
357 chan = nouveau_abi16_chan(abi16, req->channel); nouveau_abi16_ioctl_channel_free()
358 if (!chan) nouveau_abi16_ioctl_channel_free()
360 nouveau_abi16_chan_fini(abi16, chan); nouveau_abi16_ioctl_channel_free()
426 struct nouveau_abi16_chan *chan; nouveau_abi16_ioctl_notifierobj_alloc() local
440 chan = nouveau_abi16_chan(abi16, info->channel); nouveau_abi16_ioctl_notifierobj_alloc()
441 if (!chan) nouveau_abi16_ioctl_notifierobj_alloc()
448 list_add(&ntfy->head, &chan->notifiers); nouveau_abi16_ioctl_notifierobj_alloc()
451 ret = nvkm_mm_head(&chan->heap, 0, 1, info->size, info->size, 1, nouveau_abi16_ioctl_notifierobj_alloc()
461 args.ctxdma.start += chan->ntfy_vma.offset; nouveau_abi16_ioctl_notifierobj_alloc()
462 args.ctxdma.limit += chan->ntfy_vma.offset; nouveau_abi16_ioctl_notifierobj_alloc()
467 args.ctxdma.start += drm->agp.base + chan->ntfy->bo.offset; nouveau_abi16_ioctl_notifierobj_alloc()
468 args.ctxdma.limit += drm->agp.base + chan->ntfy->bo.offset; nouveau_abi16_ioctl_notifierobj_alloc()
473 args.ctxdma.start += chan->ntfy->bo.offset; nouveau_abi16_ioctl_notifierobj_alloc()
474 args.ctxdma.limit += chan->ntfy->bo.offset; nouveau_abi16_ioctl_notifierobj_alloc()
486 nouveau_abi16_ntfy_fini(chan, ntfy); nouveau_abi16_ioctl_notifierobj_alloc()
507 struct nouveau_abi16_chan *chan; nouveau_abi16_ioctl_gpuobj_free() local
515 chan = nouveau_abi16_chan(abi16, fini->channel); nouveau_abi16_ioctl_gpuobj_free()
516 if (!chan) nouveau_abi16_ioctl_gpuobj_free()
521 nouveau_channel_idle(chan->chan); nouveau_abi16_ioctl_gpuobj_free()
528 list_for_each_entry(ntfy, &chan->notifiers, head) { nouveau_abi16_ioctl_gpuobj_free()
530 nvkm_mm_free(&chan->heap, &ntfy->node); nouveau_abi16_ioctl_gpuobj_free()
109 nouveau_abi16_chan_fini(struct nouveau_abi16 *abi16, struct nouveau_abi16_chan *chan) nouveau_abi16_chan_fini() argument
H A Dnouveau_dma.h89 RING_SPACE(struct nouveau_channel *chan, int size) RING_SPACE() argument
93 ret = nouveau_dma_wait(chan, 1, size); RING_SPACE()
97 chan->dma.free -= size; RING_SPACE()
102 OUT_RING(struct nouveau_channel *chan, int data) OUT_RING() argument
104 nouveau_bo_wr32(chan->push.buffer, chan->dma.cur++, data); OUT_RING()
108 OUT_RINGp(struct nouveau_channel *chan, const void *data, unsigned nr_dwords);
111 BEGIN_NV04(struct nouveau_channel *chan, int subc, int mthd, int size) BEGIN_NV04() argument
113 OUT_RING(chan, 0x00000000 | (subc << 13) | (size << 18) | mthd); BEGIN_NV04()
117 BEGIN_NI04(struct nouveau_channel *chan, int subc, int mthd, int size) BEGIN_NI04() argument
119 OUT_RING(chan, 0x40000000 | (subc << 13) | (size << 18) | mthd); BEGIN_NI04()
123 BEGIN_NVC0(struct nouveau_channel *chan, int subc, int mthd, int size) BEGIN_NVC0() argument
125 OUT_RING(chan, 0x20000000 | (size << 16) | (subc << 13) | (mthd >> 2)); BEGIN_NVC0()
129 BEGIN_NIC0(struct nouveau_channel *chan, int subc, int mthd, int size) BEGIN_NIC0() argument
131 OUT_RING(chan, 0x60000000 | (size << 16) | (subc << 13) | (mthd >> 2)); BEGIN_NIC0()
135 BEGIN_IMC0(struct nouveau_channel *chan, int subc, int mthd, u16 data) BEGIN_IMC0() argument
137 OUT_RING(chan, 0x80000000 | (data << 16) | (subc << 13) | (mthd >> 2)); BEGIN_IMC0()
142 nouveau_bo_rd32(chan->push.buffer, 0); \
143 nvif_wr32(chan, chan->user_put, ((val) << 2) + chan->push.vma.offset); \
147 FIRE_RING(struct nouveau_channel *chan) FIRE_RING() argument
149 if (chan->dma.cur == chan->dma.put) FIRE_RING()
151 chan->accel_done = true; FIRE_RING()
153 if (chan->dma.ib_max) { FIRE_RING()
154 nv50_dma_push(chan, chan->push.buffer, chan->dma.put << 2, FIRE_RING()
155 (chan->dma.cur - chan->dma.put) << 2); FIRE_RING()
157 WRITE_PUT(chan->dma.cur); FIRE_RING()
160 chan->dma.put = chan->dma.cur; FIRE_RING()
164 WIND_RING(struct nouveau_channel *chan) WIND_RING() argument
166 chan->dma.cur = chan->dma.put; WIND_RING()
H A Dnouveau_bo.c683 nve0_bo_move_init(struct nouveau_channel *chan, u32 handle) nve0_bo_move_init() argument
685 int ret = RING_SPACE(chan, 2); nve0_bo_move_init()
687 BEGIN_NVC0(chan, NvSubCopy, 0x0000, 1); nve0_bo_move_init()
688 OUT_RING (chan, handle & 0x0000ffff); nve0_bo_move_init()
689 FIRE_RING (chan); nve0_bo_move_init()
695 nve0_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo, nve0_bo_move_copy() argument
699 int ret = RING_SPACE(chan, 10); nve0_bo_move_copy()
701 BEGIN_NVC0(chan, NvSubCopy, 0x0400, 8); nve0_bo_move_copy()
702 OUT_RING (chan, upper_32_bits(node->vma[0].offset)); nve0_bo_move_copy()
703 OUT_RING (chan, lower_32_bits(node->vma[0].offset)); nve0_bo_move_copy()
704 OUT_RING (chan, upper_32_bits(node->vma[1].offset)); nve0_bo_move_copy()
705 OUT_RING (chan, lower_32_bits(node->vma[1].offset)); nve0_bo_move_copy()
706 OUT_RING (chan, PAGE_SIZE); nve0_bo_move_copy()
707 OUT_RING (chan, PAGE_SIZE); nve0_bo_move_copy()
708 OUT_RING (chan, PAGE_SIZE); nve0_bo_move_copy()
709 OUT_RING (chan, new_mem->num_pages); nve0_bo_move_copy()
710 BEGIN_IMC0(chan, NvSubCopy, 0x0300, 0x0386); nve0_bo_move_copy()
716 nvc0_bo_move_init(struct nouveau_channel *chan, u32 handle) nvc0_bo_move_init() argument
718 int ret = RING_SPACE(chan, 2); nvc0_bo_move_init()
720 BEGIN_NVC0(chan, NvSubCopy, 0x0000, 1); nvc0_bo_move_init()
721 OUT_RING (chan, handle); nvc0_bo_move_init()
727 nvc0_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo, nvc0_bo_move_copy() argument
740 ret = RING_SPACE(chan, 11); nvc0_bo_move_copy()
744 BEGIN_NVC0(chan, NvSubCopy, 0x030c, 8); nvc0_bo_move_copy()
745 OUT_RING (chan, upper_32_bits(src_offset)); nvc0_bo_move_copy()
746 OUT_RING (chan, lower_32_bits(src_offset)); nvc0_bo_move_copy()
747 OUT_RING (chan, upper_32_bits(dst_offset)); nvc0_bo_move_copy()
748 OUT_RING (chan, lower_32_bits(dst_offset)); nvc0_bo_move_copy()
749 OUT_RING (chan, PAGE_SIZE); nvc0_bo_move_copy()
750 OUT_RING (chan, PAGE_SIZE); nvc0_bo_move_copy()
751 OUT_RING (chan, PAGE_SIZE); nvc0_bo_move_copy()
752 OUT_RING (chan, line_count); nvc0_bo_move_copy()
753 BEGIN_NVC0(chan, NvSubCopy, 0x0300, 1); nvc0_bo_move_copy()
754 OUT_RING (chan, 0x00000110); nvc0_bo_move_copy()
765 nvc0_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo, nvc0_bo_move_m2mf() argument
778 ret = RING_SPACE(chan, 12); nvc0_bo_move_m2mf()
782 BEGIN_NVC0(chan, NvSubCopy, 0x0238, 2); nvc0_bo_move_m2mf()
783 OUT_RING (chan, upper_32_bits(dst_offset)); nvc0_bo_move_m2mf()
784 OUT_RING (chan, lower_32_bits(dst_offset)); nvc0_bo_move_m2mf()
785 BEGIN_NVC0(chan, NvSubCopy, 0x030c, 6); nvc0_bo_move_m2mf()
786 OUT_RING (chan, upper_32_bits(src_offset)); nvc0_bo_move_m2mf()
787 OUT_RING (chan, lower_32_bits(src_offset)); nvc0_bo_move_m2mf()
788 OUT_RING (chan, PAGE_SIZE); /* src_pitch */ nvc0_bo_move_m2mf()
789 OUT_RING (chan, PAGE_SIZE); /* dst_pitch */ nvc0_bo_move_m2mf()
790 OUT_RING (chan, PAGE_SIZE); /* line_length */ nvc0_bo_move_m2mf()
791 OUT_RING (chan, line_count); nvc0_bo_move_m2mf()
792 BEGIN_NVC0(chan, NvSubCopy, 0x0300, 1); nvc0_bo_move_m2mf()
793 OUT_RING (chan, 0x00100110); nvc0_bo_move_m2mf()
804 nva3_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo, nva3_bo_move_copy() argument
817 ret = RING_SPACE(chan, 11); nva3_bo_move_copy()
821 BEGIN_NV04(chan, NvSubCopy, 0x030c, 8); nva3_bo_move_copy()
822 OUT_RING (chan, upper_32_bits(src_offset)); nva3_bo_move_copy()
823 OUT_RING (chan, lower_32_bits(src_offset)); nva3_bo_move_copy()
824 OUT_RING (chan, upper_32_bits(dst_offset)); nva3_bo_move_copy()
825 OUT_RING (chan, lower_32_bits(dst_offset)); nva3_bo_move_copy()
826 OUT_RING (chan, PAGE_SIZE); nva3_bo_move_copy()
827 OUT_RING (chan, PAGE_SIZE); nva3_bo_move_copy()
828 OUT_RING (chan, PAGE_SIZE); nva3_bo_move_copy()
829 OUT_RING (chan, line_count); nva3_bo_move_copy()
830 BEGIN_NV04(chan, NvSubCopy, 0x0300, 1); nva3_bo_move_copy()
831 OUT_RING (chan, 0x00000110); nva3_bo_move_copy()
842 nv98_bo_move_exec(struct nouveau_channel *chan, struct ttm_buffer_object *bo, nv98_bo_move_exec() argument
846 int ret = RING_SPACE(chan, 7); nv98_bo_move_exec()
848 BEGIN_NV04(chan, NvSubCopy, 0x0320, 6); nv98_bo_move_exec()
849 OUT_RING (chan, upper_32_bits(node->vma[0].offset)); nv98_bo_move_exec()
850 OUT_RING (chan, lower_32_bits(node->vma[0].offset)); nv98_bo_move_exec()
851 OUT_RING (chan, upper_32_bits(node->vma[1].offset)); nv98_bo_move_exec()
852 OUT_RING (chan, lower_32_bits(node->vma[1].offset)); nv98_bo_move_exec()
853 OUT_RING (chan, 0x00000000 /* COPY */); nv98_bo_move_exec()
854 OUT_RING (chan, new_mem->num_pages << PAGE_SHIFT); nv98_bo_move_exec()
860 nv84_bo_move_exec(struct nouveau_channel *chan, struct ttm_buffer_object *bo, nv84_bo_move_exec() argument
864 int ret = RING_SPACE(chan, 7); nv84_bo_move_exec()
866 BEGIN_NV04(chan, NvSubCopy, 0x0304, 6); nv84_bo_move_exec()
867 OUT_RING (chan, new_mem->num_pages << PAGE_SHIFT); nv84_bo_move_exec()
868 OUT_RING (chan, upper_32_bits(node->vma[0].offset)); nv84_bo_move_exec()
869 OUT_RING (chan, lower_32_bits(node->vma[0].offset)); nv84_bo_move_exec()
870 OUT_RING (chan, upper_32_bits(node->vma[1].offset)); nv84_bo_move_exec()
871 OUT_RING (chan, lower_32_bits(node->vma[1].offset)); nv84_bo_move_exec()
872 OUT_RING (chan, 0x00000000 /* MODE_COPY, QUERY_NONE */); nv84_bo_move_exec()
878 nv50_bo_move_init(struct nouveau_channel *chan, u32 handle) nv50_bo_move_init() argument
880 int ret = RING_SPACE(chan, 6); nv50_bo_move_init()
882 BEGIN_NV04(chan, NvSubCopy, 0x0000, 1); nv50_bo_move_init()
883 OUT_RING (chan, handle); nv50_bo_move_init()
884 BEGIN_NV04(chan, NvSubCopy, 0x0180, 3); nv50_bo_move_init()
885 OUT_RING (chan, chan->drm->ntfy.handle); nv50_bo_move_init()
886 OUT_RING (chan, chan->vram.handle); nv50_bo_move_init()
887 OUT_RING (chan, chan->vram.handle); nv50_bo_move_init()
894 nv50_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo, nv50_bo_move_m2mf() argument
908 ret = RING_SPACE(chan, 18 + 6 * (src_tiled + dst_tiled)); nv50_bo_move_m2mf()
917 BEGIN_NV04(chan, NvSubCopy, 0x0200, 7); nv50_bo_move_m2mf()
918 OUT_RING (chan, 0); nv50_bo_move_m2mf()
919 OUT_RING (chan, 0); nv50_bo_move_m2mf()
920 OUT_RING (chan, stride); nv50_bo_move_m2mf()
921 OUT_RING (chan, height); nv50_bo_move_m2mf()
922 OUT_RING (chan, 1); nv50_bo_move_m2mf()
923 OUT_RING (chan, 0); nv50_bo_move_m2mf()
924 OUT_RING (chan, 0); nv50_bo_move_m2mf()
926 BEGIN_NV04(chan, NvSubCopy, 0x0200, 1); nv50_bo_move_m2mf()
927 OUT_RING (chan, 1); nv50_bo_move_m2mf()
930 BEGIN_NV04(chan, NvSubCopy, 0x021c, 7); nv50_bo_move_m2mf()
931 OUT_RING (chan, 0); nv50_bo_move_m2mf()
932 OUT_RING (chan, 0); nv50_bo_move_m2mf()
933 OUT_RING (chan, stride); nv50_bo_move_m2mf()
934 OUT_RING (chan, height); nv50_bo_move_m2mf()
935 OUT_RING (chan, 1); nv50_bo_move_m2mf()
936 OUT_RING (chan, 0); nv50_bo_move_m2mf()
937 OUT_RING (chan, 0); nv50_bo_move_m2mf()
939 BEGIN_NV04(chan, NvSubCopy, 0x021c, 1); nv50_bo_move_m2mf()
940 OUT_RING (chan, 1); nv50_bo_move_m2mf()
943 BEGIN_NV04(chan, NvSubCopy, 0x0238, 2); nv50_bo_move_m2mf()
944 OUT_RING (chan, upper_32_bits(src_offset)); nv50_bo_move_m2mf()
945 OUT_RING (chan, upper_32_bits(dst_offset)); nv50_bo_move_m2mf()
946 BEGIN_NV04(chan, NvSubCopy, 0x030c, 8); nv50_bo_move_m2mf()
947 OUT_RING (chan, lower_32_bits(src_offset)); nv50_bo_move_m2mf()
948 OUT_RING (chan, lower_32_bits(dst_offset)); nv50_bo_move_m2mf()
949 OUT_RING (chan, stride); nv50_bo_move_m2mf()
950 OUT_RING (chan, stride); nv50_bo_move_m2mf()
951 OUT_RING (chan, stride); nv50_bo_move_m2mf()
952 OUT_RING (chan, height); nv50_bo_move_m2mf()
953 OUT_RING (chan, 0x00000101); nv50_bo_move_m2mf()
954 OUT_RING (chan, 0x00000000); nv50_bo_move_m2mf()
955 BEGIN_NV04(chan, NvSubCopy, NV_MEMORY_TO_MEMORY_FORMAT_NOP, 1); nv50_bo_move_m2mf()
956 OUT_RING (chan, 0); nv50_bo_move_m2mf()
967 nv04_bo_move_init(struct nouveau_channel *chan, u32 handle) nv04_bo_move_init() argument
969 int ret = RING_SPACE(chan, 4); nv04_bo_move_init()
971 BEGIN_NV04(chan, NvSubCopy, 0x0000, 1); nv04_bo_move_init()
972 OUT_RING (chan, handle); nv04_bo_move_init()
973 BEGIN_NV04(chan, NvSubCopy, 0x0180, 1); nv04_bo_move_init()
974 OUT_RING (chan, chan->drm->ntfy.handle); nv04_bo_move_init()
982 struct nouveau_channel *chan, struct ttm_mem_reg *mem) nouveau_bo_mem_ctxdma()
986 return chan->vram.handle; nouveau_bo_mem_ctxdma()
990 nv04_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo, nv04_bo_move_m2mf() argument
998 ret = RING_SPACE(chan, 3); nv04_bo_move_m2mf()
1002 BEGIN_NV04(chan, NvSubCopy, NV_MEMORY_TO_MEMORY_FORMAT_DMA_SOURCE, 2); nv04_bo_move_m2mf()
1003 OUT_RING (chan, nouveau_bo_mem_ctxdma(bo, chan, old_mem)); nv04_bo_move_m2mf()
1004 OUT_RING (chan, nouveau_bo_mem_ctxdma(bo, chan, new_mem)); nv04_bo_move_m2mf()
1010 ret = RING_SPACE(chan, 11); nv04_bo_move_m2mf()
1014 BEGIN_NV04(chan, NvSubCopy, nv04_bo_move_m2mf()
1016 OUT_RING (chan, src_offset); nv04_bo_move_m2mf()
1017 OUT_RING (chan, dst_offset); nv04_bo_move_m2mf()
1018 OUT_RING (chan, PAGE_SIZE); /* src_pitch */ nv04_bo_move_m2mf()
1019 OUT_RING (chan, PAGE_SIZE); /* dst_pitch */ nv04_bo_move_m2mf()
1020 OUT_RING (chan, PAGE_SIZE); /* line_length */ nv04_bo_move_m2mf()
1021 OUT_RING (chan, line_count); nv04_bo_move_m2mf()
1022 OUT_RING (chan, 0x00000101); nv04_bo_move_m2mf()
1023 OUT_RING (chan, 0x00000000); nv04_bo_move_m2mf()
1024 BEGIN_NV04(chan, NvSubCopy, NV_MEMORY_TO_MEMORY_FORMAT_NOP, 1); nv04_bo_move_m2mf()
1025 OUT_RING (chan, 0); nv04_bo_move_m2mf()
1066 struct nouveau_channel *chan = drm->ttm.chan; nouveau_bo_move_m2mf() local
1067 struct nouveau_cli *cli = (void *)nvif_client(&chan->device->base); nouveau_bo_move_m2mf()
1082 ret = nouveau_fence_sync(nouveau_bo(bo), chan, true, intr); nouveau_bo_move_m2mf()
1084 ret = drm->ttm.move(chan, bo, &bo->mem, new_mem); nouveau_bo_move_m2mf()
1086 ret = nouveau_fence_new(chan, false, &fence); nouveau_bo_move_m2mf()
1131 struct nouveau_channel *chan; nouveau_bo_move_init() local
1134 chan = drm->cechan; nouveau_bo_move_init()
1136 chan = drm->channel; nouveau_bo_move_init()
1137 if (chan == NULL) nouveau_bo_move_init()
1140 ret = nvif_object_init(chan->object, NULL, nouveau_bo_move_init()
1145 ret = mthd->init(chan, drm->ttm.copy.handle); nouveau_bo_move_init()
1152 drm->ttm.chan = chan; nouveau_bo_move_init()
981 nouveau_bo_mem_ctxdma(struct ttm_buffer_object *bo, struct nouveau_channel *chan, struct ttm_mem_reg *mem) nouveau_bo_mem_ctxdma() argument
H A Dnv04_fence.c40 struct nouveau_channel *chan = fence->channel; nv04_fence_emit() local
41 int ret = RING_SPACE(chan, 2); nv04_fence_emit()
43 BEGIN_NV04(chan, NvSubSw, 0x0150, 1); nv04_fence_emit()
44 OUT_RING (chan, fence->base.seqno); nv04_fence_emit()
45 FIRE_RING (chan); nv04_fence_emit()
52 struct nouveau_channel *prev, struct nouveau_channel *chan) nv04_fence_sync()
58 nv04_fence_read(struct nouveau_channel *chan) nv04_fence_read() argument
60 struct nvkm_fifo_chan *fifo = nvxx_fifo_chan(chan);; nv04_fence_read()
65 nv04_fence_context_del(struct nouveau_channel *chan) nv04_fence_context_del() argument
67 struct nv04_fence_chan *fctx = chan->fence; nv04_fence_context_del()
69 chan->fence = NULL; nv04_fence_context_del()
74 nv04_fence_context_new(struct nouveau_channel *chan) nv04_fence_context_new() argument
78 nouveau_fence_context_new(chan, &fctx->base); nv04_fence_context_new()
82 chan->fence = fctx; nv04_fence_context_new()
51 nv04_fence_sync(struct nouveau_fence *fence, struct nouveau_channel *prev, struct nouveau_channel *chan) nv04_fence_sync() argument
H A Dnv10_fence.c32 struct nouveau_channel *chan = fence->channel; nv10_fence_emit() local
33 int ret = RING_SPACE(chan, 2); nv10_fence_emit()
35 BEGIN_NV04(chan, 0, NV10_SUBCHAN_REF_CNT, 1); nv10_fence_emit()
36 OUT_RING (chan, fence->base.seqno); nv10_fence_emit()
37 FIRE_RING (chan); nv10_fence_emit()
45 struct nouveau_channel *prev, struct nouveau_channel *chan) nv10_fence_sync()
51 nv10_fence_read(struct nouveau_channel *chan) nv10_fence_read() argument
53 return nvif_rd32(chan, 0x0048); nv10_fence_read()
57 nv10_fence_context_del(struct nouveau_channel *chan) nv10_fence_context_del() argument
59 struct nv10_fence_chan *fctx = chan->fence; nv10_fence_context_del()
65 chan->fence = NULL; nv10_fence_context_del()
70 nv10_fence_context_new(struct nouveau_channel *chan) nv10_fence_context_new() argument
74 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); nv10_fence_context_new()
78 nouveau_fence_context_new(chan, &fctx->base); nv10_fence_context_new()
44 nv10_fence_sync(struct nouveau_fence *fence, struct nouveau_channel *prev, struct nouveau_channel *chan) nv10_fence_sync() argument
H A Dnv17_fence.c34 struct nouveau_channel *prev, struct nouveau_channel *chan) nv17_fence_sync()
37 struct nv10_fence_priv *priv = chan->drm->fence; nv17_fence_sync()
38 struct nv10_fence_chan *fctx = chan->fence; nv17_fence_sync()
60 if (!ret && !(ret = RING_SPACE(chan, 5))) { nv17_fence_sync()
61 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 4); nv17_fence_sync()
62 OUT_RING (chan, fctx->sema.handle); nv17_fence_sync()
63 OUT_RING (chan, 0); nv17_fence_sync()
64 OUT_RING (chan, value + 1); nv17_fence_sync()
65 OUT_RING (chan, value + 2); nv17_fence_sync()
66 FIRE_RING (chan); nv17_fence_sync()
74 nv17_fence_context_new(struct nouveau_channel *chan) nv17_fence_context_new() argument
76 struct nv10_fence_priv *priv = chan->drm->fence; nv17_fence_context_new()
83 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); nv17_fence_context_new()
87 nouveau_fence_context_new(chan, &fctx->base); nv17_fence_context_new()
92 ret = nvif_object_init(chan->object, NULL, NvSema, NV_DMA_FROM_MEMORY, nv17_fence_context_new()
101 nv10_fence_context_del(chan); nv17_fence_context_new()
33 nv17_fence_sync(struct nouveau_fence *fence, struct nouveau_channel *prev, struct nouveau_channel *chan) nv17_fence_sync() argument
H A Dnouveau_fence.c127 nouveau_fence_update(struct nouveau_channel *chan, struct nouveau_fence_chan *fctx) nouveau_fence_update() argument
131 u32 seq = fctx->read(chan); nouveau_fence_update()
156 struct nouveau_channel *chan; nouveau_fence_wait_uevent_handler() local
159 chan = rcu_dereference_protected(fence->channel, lockdep_is_held(&fctx->lock)); nouveau_fence_wait_uevent_handler()
169 nouveau_fence_context_new(struct nouveau_channel *chan, struct nouveau_fence_chan *fctx) nouveau_fence_context_new() argument
171 struct nouveau_fence_priv *priv = (void*)chan->drm->fence; nouveau_fence_context_new()
172 struct nouveau_cli *cli = (void *)nvif_client(chan->object); nouveau_fence_context_new()
178 fctx->context = priv->context_base + chan->chid; nouveau_fence_context_new()
180 if (chan == chan->drm->cechan) nouveau_fence_context_new()
182 else if (chan == chan->drm->channel) nouveau_fence_context_new()
191 ret = nvif_notify_init(chan->object, NULL, nouveau_fence_context_new()
259 nouveau_fence_emit(struct nouveau_fence *fence, struct nouveau_channel *chan) nouveau_fence_emit() argument
261 struct nouveau_fence_chan *fctx = chan->fence; nouveau_fence_emit()
262 struct nouveau_fence_priv *priv = (void*)chan->drm->fence; nouveau_fence_emit()
265 fence->channel = chan; nouveau_fence_emit()
282 if (nouveau_fence_update(chan, fctx)) nouveau_fence_emit()
298 struct nouveau_channel *chan; nouveau_fence_done() local
305 chan = rcu_dereference_protected(fence->channel, lockdep_is_held(&fctx->lock)); nouveau_fence_done()
306 if (chan && nouveau_fence_update(chan, fctx)) nouveau_fence_done()
391 nouveau_fence_sync(struct nouveau_bo *nvbo, struct nouveau_channel *chan, bool exclusive, bool intr) nouveau_fence_sync() argument
393 struct nouveau_fence_chan *fctx = chan->fence; nouveau_fence_sync()
414 f = nouveau_local_fence(fence, chan->drm); nouveau_fence_sync()
418 if (prev && (prev == chan || fctx->sync(f, prev, chan) == 0)) nouveau_fence_sync()
439 f = nouveau_local_fence(fence, chan->drm); nouveau_fence_sync()
443 if (prev && (prev == chan || fctx->sync(f, prev, chan) == 0)) nouveau_fence_sync()
464 nouveau_fence_new(struct nouveau_channel *chan, bool sysmem, nouveau_fence_new() argument
470 if (unlikely(!chan->fence)) nouveau_fence_new()
479 ret = nouveau_fence_emit(fence, chan); nouveau_fence_new()
510 struct nouveau_channel *chan; nouveau_fence_is_signaled() local
514 chan = rcu_dereference(fence->channel); nouveau_fence_is_signaled()
515 if (chan) nouveau_fence_is_signaled()
516 ret = (int)(fctx->read(chan) - fence->base.seqno) >= 0; nouveau_fence_is_signaled()
H A Dnv50_fence.c35 nv50_fence_context_new(struct nouveau_channel *chan) nv50_fence_context_new() argument
37 struct drm_device *dev = chan->drm->dev; nv50_fence_context_new()
38 struct nv10_fence_priv *priv = chan->drm->fence; nv50_fence_context_new()
45 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); nv50_fence_context_new()
49 nouveau_fence_context_new(chan, &fctx->base); nv50_fence_context_new()
54 ret = nvif_object_init(chan->object, NULL, NvSema, NV_DMA_IN_MEMORY, nv50_fence_context_new()
69 ret = nvif_object_init(chan->object, NULL, NvEvoSema0 + i, nv50_fence_context_new()
80 nv10_fence_context_del(chan); nv50_fence_context_new()
/linux-4.1.27/include/sound/
H A Demu8000_reg.h38 #define EMU8000_CMD(reg, chan) ((reg)<<5 | (chan))
44 #define EMU8000_CPF_READ(emu, chan) \
45 snd_emu8000_peek_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(0, (chan)))
46 #define EMU8000_PTRX_READ(emu, chan) \
47 snd_emu8000_peek_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(1, (chan)))
48 #define EMU8000_CVCF_READ(emu, chan) \
49 snd_emu8000_peek_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(2, (chan)))
50 #define EMU8000_VTFT_READ(emu, chan) \
51 snd_emu8000_peek_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(3, (chan)))
52 #define EMU8000_PSST_READ(emu, chan) \
53 snd_emu8000_peek_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(6, (chan)))
54 #define EMU8000_CSL_READ(emu, chan) \
55 snd_emu8000_peek_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(7, (chan)))
56 #define EMU8000_CCCA_READ(emu, chan) \
57 snd_emu8000_peek_dw((emu), EMU8000_DATA1(emu), EMU8000_CMD(0, (chan)))
84 #define EMU8000_INIT1_READ(emu, chan) \
85 snd_emu8000_peek((emu), EMU8000_DATA1(emu), EMU8000_CMD(2, (chan)))
86 #define EMU8000_INIT2_READ(emu, chan) \
87 snd_emu8000_peek((emu), EMU8000_DATA2(emu), EMU8000_CMD(2, (chan)))
88 #define EMU8000_INIT3_READ(emu, chan) \
89 snd_emu8000_peek((emu), EMU8000_DATA1(emu), EMU8000_CMD(3, (chan)))
90 #define EMU8000_INIT4_READ(emu, chan) \
91 snd_emu8000_peek((emu), EMU8000_DATA2(emu), EMU8000_CMD(3, (chan)))
92 #define EMU8000_ENVVOL_READ(emu, chan) \
93 snd_emu8000_peek((emu), EMU8000_DATA1(emu), EMU8000_CMD(4, (chan)))
94 #define EMU8000_DCYSUSV_READ(emu, chan) \
95 snd_emu8000_peek((emu), EMU8000_DATA1(emu), EMU8000_CMD(5, (chan)))
96 #define EMU8000_ENVVAL_READ(emu, chan) \
97 snd_emu8000_peek((emu), EMU8000_DATA1(emu), EMU8000_CMD(6, (chan)))
98 #define EMU8000_DCYSUS_READ(emu, chan) \
99 snd_emu8000_peek((emu), EMU8000_DATA1(emu), EMU8000_CMD(7, (chan)))
100 #define EMU8000_ATKHLDV_READ(emu, chan) \
101 snd_emu8000_peek((emu), EMU8000_DATA2(emu), EMU8000_CMD(4, (chan)))
102 #define EMU8000_LFO1VAL_READ(emu, chan) \
103 snd_emu8000_peek((emu), EMU8000_DATA2(emu), EMU8000_CMD(5, (chan)))
104 #define EMU8000_ATKHLD_READ(emu, chan) \
105 snd_emu8000_peek((emu), EMU8000_DATA2(emu), EMU8000_CMD(6, (chan)))
106 #define EMU8000_LFO2VAL_READ(emu, chan) \
107 snd_emu8000_peek((emu), EMU8000_DATA2(emu), EMU8000_CMD(7, (chan)))
108 #define EMU8000_IP_READ(emu, chan) \
109 snd_emu8000_peek((emu), EMU8000_DATA3(emu), EMU8000_CMD(0, (chan)))
110 #define EMU8000_IFATN_READ(emu, chan) \
111 snd_emu8000_peek((emu), EMU8000_DATA3(emu), EMU8000_CMD(1, (chan)))
112 #define EMU8000_PEFE_READ(emu, chan) \
113 snd_emu8000_peek((emu), EMU8000_DATA3(emu), EMU8000_CMD(2, (chan)))
114 #define EMU8000_FMMOD_READ(emu, chan) \
115 snd_emu8000_peek((emu), EMU8000_DATA3(emu), EMU8000_CMD(3, (chan)))
116 #define EMU8000_TREMFRQ_READ(emu, chan) \
117 snd_emu8000_peek((emu), EMU8000_DATA3(emu), EMU8000_CMD(4, (chan)))
118 #define EMU8000_FM2FRQ2_READ(emu, chan) \
119 snd_emu8000_peek((emu), EMU8000_DATA3(emu), EMU8000_CMD(5, (chan)))
122 #define EMU8000_CPF_WRITE(emu, chan, val) \
123 snd_emu8000_poke_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(0, (chan)), (val))
124 #define EMU8000_PTRX_WRITE(emu, chan, val) \
125 snd_emu8000_poke_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(1, (chan)), (val))
126 #define EMU8000_CVCF_WRITE(emu, chan, val) \
127 snd_emu8000_poke_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(2, (chan)), (val))
128 #define EMU8000_VTFT_WRITE(emu, chan, val) \
129 snd_emu8000_poke_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(3, (chan)), (val))
130 #define EMU8000_PSST_WRITE(emu, chan, val) \
131 snd_emu8000_poke_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(6, (chan)), (val))
132 #define EMU8000_CSL_WRITE(emu, chan, val) \
133 snd_emu8000_poke_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(7, (chan)), (val))
134 #define EMU8000_CCCA_WRITE(emu, chan, val) \
135 snd_emu8000_poke_dw((emu), EMU8000_DATA1(emu), EMU8000_CMD(0, (chan)), (val))
165 #define EMU8000_INIT1_WRITE(emu, chan, val) \
166 snd_emu8000_poke((emu), EMU8000_DATA1(emu), EMU8000_CMD(2, (chan)), (val))
167 #define EMU8000_INIT2_WRITE(emu, chan, val) \
168 snd_emu8000_poke((emu), EMU8000_DATA2(emu), EMU8000_CMD(2, (chan)), (val))
169 #define EMU8000_INIT3_WRITE(emu, chan, val) \
170 snd_emu8000_poke((emu), EMU8000_DATA1(emu), EMU8000_CMD(3, (chan)), (val))
171 #define EMU8000_INIT4_WRITE(emu, chan, val) \
172 snd_emu8000_poke((emu), EMU8000_DATA2(emu), EMU8000_CMD(3, (chan)), (val))
173 #define EMU8000_ENVVOL_WRITE(emu, chan, val) \
174 snd_emu8000_poke((emu), EMU8000_DATA1(emu), EMU8000_CMD(4, (chan)), (val))
175 #define EMU8000_DCYSUSV_WRITE(emu, chan, val) \
176 snd_emu8000_poke((emu), EMU8000_DATA1(emu), EMU8000_CMD(5, (chan)), (val))
177 #define EMU8000_ENVVAL_WRITE(emu, chan, val) \
178 snd_emu8000_poke((emu), EMU8000_DATA1(emu), EMU8000_CMD(6, (chan)), (val))
179 #define EMU8000_DCYSUS_WRITE(emu, chan, val) \
180 snd_emu8000_poke((emu), EMU8000_DATA1(emu), EMU8000_CMD(7, (chan)), (val))
181 #define EMU8000_ATKHLDV_WRITE(emu, chan, val) \
182 snd_emu8000_poke((emu), EMU8000_DATA2(emu), EMU8000_CMD(4, (chan)), (val))
183 #define EMU8000_LFO1VAL_WRITE(emu, chan, val) \
184 snd_emu8000_poke((emu), EMU8000_DATA2(emu), EMU8000_CMD(5, (chan)), (val))
185 #define EMU8000_ATKHLD_WRITE(emu, chan, val) \
186 snd_emu8000_poke((emu), EMU8000_DATA2(emu), EMU8000_CMD(6, (chan)), (val))
187 #define EMU8000_LFO2VAL_WRITE(emu, chan, val) \
188 snd_emu8000_poke((emu), EMU8000_DATA2(emu), EMU8000_CMD(7, (chan)), (val))
189 #define EMU8000_IP_WRITE(emu, chan, val) \
190 snd_emu8000_poke((emu), EMU8000_DATA3(emu), EMU8000_CMD(0, (chan)), (val))
191 #define EMU8000_IFATN_WRITE(emu, chan, val) \
192 snd_emu8000_poke((emu), EMU8000_DATA3(emu), EMU8000_CMD(1, (chan)), (val))
193 #define EMU8000_PEFE_WRITE(emu, chan, val) \
194 snd_emu8000_poke((emu), EMU8000_DATA3(emu), EMU8000_CMD(2, (chan)), (val))
195 #define EMU8000_FMMOD_WRITE(emu, chan, val) \
196 snd_emu8000_poke((emu), EMU8000_DATA3(emu), EMU8000_CMD(3, (chan)), (val))
197 #define EMU8000_TREMFRQ_WRITE(emu, chan, val) \
198 snd_emu8000_poke((emu), EMU8000_DATA3(emu), EMU8000_CMD(4, (chan)), (val))
199 #define EMU8000_FM2FRQ2_WRITE(emu, chan, val) \
200 snd_emu8000_poke((emu), EMU8000_DATA3(emu), EMU8000_CMD(5, (chan)), (val))
202 #define EMU8000_0080_WRITE(emu, chan, val) \
203 snd_emu8000_poke_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(4, (chan)), (val))
204 #define EMU8000_00A0_WRITE(emu, chan, val) \
205 snd_emu8000_poke_dw((emu), EMU8000_DATA0(emu), EMU8000_CMD(5, (chan)), (val))
/linux-4.1.27/drivers/video/fbdev/i810/
H A Di810-i2c.c44 struct i810fb_i2c_chan *chan = data; i810i2c_setscl() local
45 struct i810fb_par *par = chan->par; i810i2c_setscl()
49 i810_writel(mmio, chan->ddc_base, SCL_DIR_MASK | SCL_VAL_MASK); i810i2c_setscl()
51 i810_writel(mmio, chan->ddc_base, SCL_DIR | SCL_DIR_MASK | SCL_VAL_MASK); i810i2c_setscl()
52 i810_readl(mmio, chan->ddc_base); /* flush posted write */ i810i2c_setscl()
57 struct i810fb_i2c_chan *chan = data; i810i2c_setsda() local
58 struct i810fb_par *par = chan->par; i810i2c_setsda()
62 i810_writel(mmio, chan->ddc_base, SDA_DIR_MASK | SDA_VAL_MASK); i810i2c_setsda()
64 i810_writel(mmio, chan->ddc_base, SDA_DIR | SDA_DIR_MASK | SDA_VAL_MASK); i810i2c_setsda()
65 i810_readl(mmio, chan->ddc_base); /* flush posted write */ i810i2c_setsda()
70 struct i810fb_i2c_chan *chan = data; i810i2c_getscl() local
71 struct i810fb_par *par = chan->par; i810i2c_getscl()
74 i810_writel(mmio, chan->ddc_base, SCL_DIR_MASK); i810i2c_getscl()
75 i810_writel(mmio, chan->ddc_base, 0); i810i2c_getscl()
76 return ((i810_readl(mmio, chan->ddc_base) & SCL_VAL_IN) != 0); i810i2c_getscl()
81 struct i810fb_i2c_chan *chan = data; i810i2c_getsda() local
82 struct i810fb_par *par = chan->par; i810i2c_getsda()
85 i810_writel(mmio, chan->ddc_base, SDA_DIR_MASK); i810i2c_getsda()
86 i810_writel(mmio, chan->ddc_base, 0); i810i2c_getsda()
87 return ((i810_readl(mmio, chan->ddc_base) & SDA_VAL_IN) != 0); i810i2c_getsda()
90 static int i810_setup_i2c_bus(struct i810fb_i2c_chan *chan, const char *name) i810_setup_i2c_bus() argument
94 strcpy(chan->adapter.name, name); i810_setup_i2c_bus()
95 chan->adapter.owner = THIS_MODULE; i810_setup_i2c_bus()
96 chan->adapter.algo_data = &chan->algo; i810_setup_i2c_bus()
97 chan->adapter.dev.parent = &chan->par->dev->dev; i810_setup_i2c_bus()
98 chan->algo.setsda = i810i2c_setsda; i810_setup_i2c_bus()
99 chan->algo.setscl = i810i2c_setscl; i810_setup_i2c_bus()
100 chan->algo.getsda = i810i2c_getsda; i810_setup_i2c_bus()
101 chan->algo.getscl = i810i2c_getscl; i810_setup_i2c_bus()
102 chan->algo.udelay = 10; i810_setup_i2c_bus()
103 chan->algo.timeout = (HZ/2); i810_setup_i2c_bus()
104 chan->algo.data = chan; i810_setup_i2c_bus()
106 i2c_set_adapdata(&chan->adapter, chan); i810_setup_i2c_bus()
109 chan->algo.setsda(chan, 1); i810_setup_i2c_bus()
110 chan->algo.setscl(chan, 1); i810_setup_i2c_bus()
113 rc = i2c_bit_add_bus(&chan->adapter); i810_setup_i2c_bus()
116 dev_dbg(&chan->par->dev->dev, "I2C bus %s registered.\n",name); i810_setup_i2c_bus()
118 dev_warn(&chan->par->dev->dev, "Failed to register I2C bus " i810_setup_i2c_bus()
120 chan->par = NULL; i810_setup_i2c_bus()
128 par->chan[0].par = par; i810_create_i2c_busses()
129 par->chan[1].par = par; i810_create_i2c_busses()
130 par->chan[2].par = par; i810_create_i2c_busses()
132 par->chan[0].ddc_base = GPIOA; i810_create_i2c_busses()
133 i810_setup_i2c_bus(&par->chan[0], "I810-DDC"); i810_create_i2c_busses()
134 par->chan[1].ddc_base = GPIOB; i810_create_i2c_busses()
135 i810_setup_i2c_bus(&par->chan[1], "I810-I2C"); i810_create_i2c_busses()
136 par->chan[2].ddc_base = GPIOC; i810_create_i2c_busses()
137 i810_setup_i2c_bus(&par->chan[2], "I810-GPIOC"); i810_create_i2c_busses()
142 if (par->chan[0].par) i810_delete_i2c_busses()
143 i2c_del_adapter(&par->chan[0].adapter); i810_delete_i2c_busses()
144 par->chan[0].par = NULL; i810_delete_i2c_busses()
146 if (par->chan[1].par) i810_delete_i2c_busses()
147 i2c_del_adapter(&par->chan[1].adapter); i810_delete_i2c_busses()
148 par->chan[1].par = NULL; i810_delete_i2c_busses()
150 if (par->chan[2].par) i810_delete_i2c_busses()
151 i2c_del_adapter(&par->chan[2].adapter); i810_delete_i2c_busses()
152 par->chan[2].par = NULL; i810_delete_i2c_busses()
162 edid = fb_ddc_read(&par->chan[conn].adapter); i810_probe_i2c_connector()
/linux-4.1.27/arch/um/drivers/
H A Dchan_kern.c9 #include "chan.h"
84 static int open_one_chan(struct chan *chan) open_one_chan() argument
88 if (chan->opened) open_one_chan()
91 if (chan->ops->open == NULL) open_one_chan()
93 else fd = (*chan->ops->open)(chan->input, chan->output, chan->primary, open_one_chan()
94 chan->data, &chan->dev); open_one_chan()
100 (*chan->ops->close)(fd, chan->data); open_one_chan()
104 chan->fd = fd; open_one_chan()
106 chan->opened = 1; open_one_chan()
113 struct chan *chan; open_chan() local
117 chan = list_entry(ele, struct chan, list); list_for_each()
118 ret = open_one_chan(chan); list_for_each()
119 if (chan->primary) list_for_each()
125 void chan_enable_winch(struct chan *chan, struct tty_port *port) chan_enable_winch() argument
127 if (chan && chan->primary && chan->ops->winch) chan_enable_winch()
128 register_winch(chan->fd, port); chan_enable_winch()
142 struct chan *chan; enable_chan() local
148 chan = list_entry(ele, struct chan, list); enable_chan()
149 err = open_one_chan(chan); enable_chan()
151 if (chan->primary) enable_chan()
157 if (chan->enabled) enable_chan()
159 err = line_setup_irq(chan->fd, chan->input, chan->output, line, enable_chan()
160 chan); enable_chan()
164 chan->enabled = 1; enable_chan()
185 struct chan *chan; free_irqs() local
195 chan = list_entry(ele, struct chan, free_list); free_irqs()
197 if (chan->input && chan->enabled) free_irqs()
198 um_free_irq(chan->line->driver->read_irq, chan); free_irqs()
199 if (chan->output && chan->enabled) free_irqs()
200 um_free_irq(chan->line->driver->write_irq, chan); free_irqs()
201 chan->enabled = 0; free_irqs()
205 static void close_one_chan(struct chan *chan, int delay_free_irq) close_one_chan() argument
209 if (!chan->opened) close_one_chan()
214 list_add(&chan->free_list, &irqs_to_free); close_one_chan()
218 if (chan->input && chan->enabled) close_one_chan()
219 um_free_irq(chan->line->driver->read_irq, chan); close_one_chan()
220 if (chan->output && chan->enabled) close_one_chan()
221 um_free_irq(chan->line->driver->write_irq, chan); close_one_chan()
222 chan->enabled = 0; close_one_chan()
224 if (chan->ops->close != NULL) close_one_chan()
225 (*chan->ops->close)(chan->fd, chan->data); close_one_chan()
227 chan->opened = 0; close_one_chan()
228 chan->fd = -1; close_one_chan()
233 struct chan *chan; close_chan() local
240 list_for_each_entry_reverse(chan, &line->chan_list, list) { close_chan()
241 close_one_chan(chan, 0); close_chan()
245 void deactivate_chan(struct chan *chan, int irq) deactivate_chan() argument
247 if (chan && chan->enabled) deactivate_chan()
248 deactivate_fd(chan->fd, irq); deactivate_chan()
251 void reactivate_chan(struct chan *chan, int irq) reactivate_chan() argument
253 if (chan && chan->enabled) reactivate_chan()
254 reactivate_fd(chan->fd, irq); reactivate_chan()
257 int write_chan(struct chan *chan, const char *buf, int len, write_chan() argument
262 if (len == 0 || !chan || !chan->ops->write) write_chan()
265 n = chan->ops->write(chan->fd, buf, len, chan->data); write_chan()
266 if (chan->primary) { write_chan()
269 reactivate_fd(chan->fd, write_irq); write_chan()
274 int console_write_chan(struct chan *chan, const char *buf, int len) console_write_chan() argument
278 if (!chan || !chan->ops->console_write) console_write_chan()
281 n = chan->ops->console_write(chan->fd, buf, len); console_write_chan()
282 if (chan->primary) console_write_chan()
303 struct chan *chan; chan_window_size() local
305 chan = line->chan_in; chan_window_size()
306 if (chan && chan->primary) { chan_window_size()
307 if (chan->ops->window_size == NULL) chan_window_size()
309 return chan->ops->window_size(chan->fd, chan->data, chan_window_size()
312 chan = line->chan_out; chan_window_size()
313 if (chan && chan->primary) { chan_window_size()
314 if (chan->ops->window_size == NULL) chan_window_size()
316 return chan->ops->window_size(chan->fd, chan->data, chan_window_size()
322 static void free_one_chan(struct chan *chan) free_one_chan() argument
324 list_del(&chan->list); free_one_chan()
326 close_one_chan(chan, 0); free_one_chan()
328 if (chan->ops->free != NULL) free_one_chan()
329 (*chan->ops->free)(chan->data); free_one_chan()
331 if (chan->primary && chan->output) free_one_chan()
332 ignore_sigio_fd(chan->fd); free_one_chan()
333 kfree(chan); free_one_chan()
339 struct chan *chan; free_chan() local
342 chan = list_entry(ele, struct chan, list); list_for_each_safe()
343 free_one_chan(chan); list_for_each_safe()
347 static int one_chan_config_string(struct chan *chan, char *str, int size, one_chan_config_string() argument
352 if (chan == NULL) { one_chan_config_string()
357 CONFIG_CHUNK(str, size, n, chan->ops->type, 0); one_chan_config_string()
359 if (chan->dev == NULL) { one_chan_config_string()
365 CONFIG_CHUNK(str, size, n, chan->dev, 0); one_chan_config_string()
370 static int chan_pair_config_string(struct chan *in, struct chan *out, chan_pair_config_string()
396 struct chan *in = line->chan_in, *out = line->chan_out; chan_config_string()
447 static struct chan *parse_chan(struct line *line, char *str, int device, parse_chan()
452 struct chan *chan; parse_chan() local
477 chan = kmalloc(sizeof(*chan), GFP_ATOMIC); parse_chan()
478 if (chan == NULL) { parse_chan()
482 *chan = ((struct chan) { .list = LIST_HEAD_INIT(chan->list), parse_chan()
484 LIST_HEAD_INIT(chan->free_list), parse_chan()
494 return chan; parse_chan()
501 struct chan *new; parse_chan_pair()
550 struct chan *chan = line->chan_in; chan_interrupt() local
554 if (!chan || !chan->ops->read) chan_interrupt()
562 err = chan->ops->read(chan->fd, &c, chan->data); chan_interrupt()
568 reactivate_fd(chan->fd, irq); chan_interrupt()
570 if (chan->primary) { chan_interrupt()
572 if (line->chan_out != chan) chan_interrupt()
575 close_one_chan(chan, 1); chan_interrupt()
576 if (chan->primary) chan_interrupt()
H A Dchan.h15 struct chan { struct
33 extern int write_chan(struct chan *chan, const char *buf, int len,
35 extern int console_write_chan(struct chan *chan, const char *buf,
38 extern void deactivate_chan(struct chan *chan, int irq);
39 extern void reactivate_chan(struct chan *chan, int irq);
40 extern void chan_enable_winch(struct chan *chan, struct tty_port *port);
/linux-4.1.27/net/bluetooth/
H A Dl2cap_core.c60 static int l2cap_build_conf_req(struct l2cap_chan *chan, void *data);
61 static void l2cap_send_disconn_req(struct l2cap_chan *chan, int err);
63 static void l2cap_tx(struct l2cap_chan *chan, struct l2cap_ctrl *control,
184 int l2cap_add_psm(struct l2cap_chan *chan, bdaddr_t *src, __le16 psm) l2cap_add_psm() argument
196 chan->psm = psm; l2cap_add_psm()
197 chan->sport = psm; l2cap_add_psm()
205 chan->psm = cpu_to_le16(p); l2cap_add_psm()
206 chan->sport = cpu_to_le16(p); l2cap_add_psm()
218 int l2cap_add_scid(struct l2cap_chan *chan, __u16 scid) l2cap_add_scid() argument
223 chan->omtu = L2CAP_DEFAULT_MTU; l2cap_add_scid()
224 chan->chan_type = L2CAP_CHAN_FIXED; l2cap_add_scid()
226 chan->scid = scid; l2cap_add_scid()
250 static void l2cap_state_change(struct l2cap_chan *chan, int state) l2cap_state_change() argument
252 BT_DBG("chan %p %s -> %s", chan, state_to_string(chan->state), l2cap_state_change()
255 chan->state = state; l2cap_state_change()
256 chan->ops->state_change(chan, state, 0); l2cap_state_change()
259 static inline void l2cap_state_change_and_error(struct l2cap_chan *chan, l2cap_state_change_and_error() argument
262 chan->state = state; l2cap_state_change_and_error()
263 chan->ops->state_change(chan, chan->state, err); l2cap_state_change_and_error()
266 static inline void l2cap_chan_set_err(struct l2cap_chan *chan, int err) l2cap_chan_set_err() argument
268 chan->ops->state_change(chan, chan->state, err); l2cap_chan_set_err()
271 static void __set_retrans_timer(struct l2cap_chan *chan) __set_retrans_timer() argument
273 if (!delayed_work_pending(&chan->monitor_timer) && __set_retrans_timer()
274 chan->retrans_timeout) { __set_retrans_timer()
275 l2cap_set_timer(chan, &chan->retrans_timer, __set_retrans_timer()
276 msecs_to_jiffies(chan->retrans_timeout)); __set_retrans_timer()
280 static void __set_monitor_timer(struct l2cap_chan *chan) __set_monitor_timer() argument
282 __clear_retrans_timer(chan); __set_monitor_timer()
283 if (chan->monitor_timeout) { __set_monitor_timer()
284 l2cap_set_timer(chan, &chan->monitor_timer, __set_monitor_timer()
285 msecs_to_jiffies(chan->monitor_timeout)); __set_monitor_timer()
398 struct l2cap_chan *chan = container_of(work, struct l2cap_chan, l2cap_chan_timeout() local
400 struct l2cap_conn *conn = chan->conn; l2cap_chan_timeout()
403 BT_DBG("chan %p state %s", chan, state_to_string(chan->state)); l2cap_chan_timeout()
406 l2cap_chan_lock(chan); l2cap_chan_timeout()
408 if (chan->state == BT_CONNECTED || chan->state == BT_CONFIG) l2cap_chan_timeout()
410 else if (chan->state == BT_CONNECT && l2cap_chan_timeout()
411 chan->sec_level != BT_SECURITY_SDP) l2cap_chan_timeout()
416 l2cap_chan_close(chan, reason); l2cap_chan_timeout()
418 l2cap_chan_unlock(chan); l2cap_chan_timeout()
420 chan->ops->close(chan); l2cap_chan_timeout()
423 l2cap_chan_put(chan); l2cap_chan_timeout()
428 struct l2cap_chan *chan; l2cap_chan_create() local
430 chan = kzalloc(sizeof(*chan), GFP_ATOMIC); l2cap_chan_create()
431 if (!chan) l2cap_chan_create()
434 mutex_init(&chan->lock); l2cap_chan_create()
437 atomic_set(&chan->nesting, L2CAP_NESTING_NORMAL); l2cap_chan_create()
440 list_add(&chan->global_l, &chan_list); l2cap_chan_create()
443 INIT_DELAYED_WORK(&chan->chan_timer, l2cap_chan_timeout); l2cap_chan_create()
445 chan->state = BT_OPEN; l2cap_chan_create()
447 kref_init(&chan->kref); l2cap_chan_create()
450 set_bit(CONF_NOT_COMPLETE, &chan->conf_state); l2cap_chan_create()
452 BT_DBG("chan %p", chan); l2cap_chan_create()
454 return chan; l2cap_chan_create()
460 struct l2cap_chan *chan = container_of(kref, struct l2cap_chan, kref); l2cap_chan_destroy() local
462 BT_DBG("chan %p", chan); l2cap_chan_destroy()
465 list_del(&chan->global_l); l2cap_chan_destroy()
468 kfree(chan); l2cap_chan_destroy()
473 BT_DBG("chan %p orig refcnt %d", c, atomic_read(&c->kref.refcount)); l2cap_chan_hold()
480 BT_DBG("chan %p orig refcnt %d", c, atomic_read(&c->kref.refcount)); l2cap_chan_put()
486 void l2cap_chan_set_defaults(struct l2cap_chan *chan) l2cap_chan_set_defaults() argument
488 chan->fcs = L2CAP_FCS_CRC16; l2cap_chan_set_defaults()
489 chan->max_tx = L2CAP_DEFAULT_MAX_TX; l2cap_chan_set_defaults()
490 chan->tx_win = L2CAP_DEFAULT_TX_WINDOW; l2cap_chan_set_defaults()
491 chan->tx_win_max = L2CAP_DEFAULT_TX_WINDOW; l2cap_chan_set_defaults()
492 chan->remote_max_tx = chan->max_tx; l2cap_chan_set_defaults()
493 chan->remote_tx_win = chan->tx_win; l2cap_chan_set_defaults()
494 chan->ack_win = L2CAP_DEFAULT_TX_WINDOW; l2cap_chan_set_defaults()
495 chan->sec_level = BT_SECURITY_LOW; l2cap_chan_set_defaults()
496 chan->flush_to = L2CAP_DEFAULT_FLUSH_TO; l2cap_chan_set_defaults()
497 chan->retrans_timeout = L2CAP_DEFAULT_RETRANS_TO; l2cap_chan_set_defaults()
498 chan->monitor_timeout = L2CAP_DEFAULT_MONITOR_TO; l2cap_chan_set_defaults()
499 chan->conf_state = 0; l2cap_chan_set_defaults()
501 set_bit(FLAG_FORCE_ACTIVE, &chan->flags); l2cap_chan_set_defaults()
505 static void l2cap_le_flowctl_init(struct l2cap_chan *chan) l2cap_le_flowctl_init() argument
507 chan->sdu = NULL; l2cap_le_flowctl_init()
508 chan->sdu_last_frag = NULL; l2cap_le_flowctl_init()
509 chan->sdu_len = 0; l2cap_le_flowctl_init()
510 chan->tx_credits = 0; l2cap_le_flowctl_init()
511 chan->rx_credits = le_max_credits; l2cap_le_flowctl_init()
512 chan->mps = min_t(u16, chan->imtu, le_default_mps); l2cap_le_flowctl_init()
514 skb_queue_head_init(&chan->tx_q); l2cap_le_flowctl_init()
517 void __l2cap_chan_add(struct l2cap_conn *conn, struct l2cap_chan *chan) __l2cap_chan_add() argument
520 __le16_to_cpu(chan->psm), chan->dcid); __l2cap_chan_add()
524 chan->conn = conn; __l2cap_chan_add()
526 switch (chan->chan_type) { __l2cap_chan_add()
529 chan->scid = l2cap_alloc_cid(conn); __l2cap_chan_add()
531 chan->omtu = L2CAP_DEFAULT_MTU; __l2cap_chan_add()
536 chan->scid = L2CAP_CID_CONN_LESS; __l2cap_chan_add()
537 chan->dcid = L2CAP_CID_CONN_LESS; __l2cap_chan_add()
538 chan->omtu = L2CAP_DEFAULT_MTU; __l2cap_chan_add()
547 chan->scid = L2CAP_CID_SIGNALING; __l2cap_chan_add()
548 chan->dcid = L2CAP_CID_SIGNALING; __l2cap_chan_add()
549 chan->omtu = L2CAP_DEFAULT_MTU; __l2cap_chan_add()
552 chan->local_id = L2CAP_BESTEFFORT_ID; __l2cap_chan_add()
553 chan->local_stype = L2CAP_SERV_BESTEFFORT; __l2cap_chan_add()
554 chan->local_msdu = L2CAP_DEFAULT_MAX_SDU_SIZE; __l2cap_chan_add()
555 chan->local_sdu_itime = L2CAP_DEFAULT_SDU_ITIME; __l2cap_chan_add()
556 chan->local_acc_lat = L2CAP_DEFAULT_ACC_LAT; __l2cap_chan_add()
557 chan->local_flush_to = L2CAP_EFS_DEFAULT_FLUSH_TO; __l2cap_chan_add()
559 l2cap_chan_hold(chan); __l2cap_chan_add()
562 if (chan->chan_type != L2CAP_CHAN_FIXED || __l2cap_chan_add()
563 test_bit(FLAG_HOLD_HCI_CONN, &chan->flags)) __l2cap_chan_add()
566 list_add(&chan->list, &conn->chan_l); __l2cap_chan_add()
569 void l2cap_chan_add(struct l2cap_conn *conn, struct l2cap_chan *chan) l2cap_chan_add() argument
572 __l2cap_chan_add(conn, chan); l2cap_chan_add()
576 void l2cap_chan_del(struct l2cap_chan *chan, int err) l2cap_chan_del() argument
578 struct l2cap_conn *conn = chan->conn; l2cap_chan_del()
580 __clear_chan_timer(chan); l2cap_chan_del()
582 BT_DBG("chan %p, conn %p, err %d, state %s", chan, conn, err, l2cap_chan_del()
583 state_to_string(chan->state)); l2cap_chan_del()
585 chan->ops->teardown(chan, err); l2cap_chan_del()
590 list_del(&chan->list); l2cap_chan_del()
592 l2cap_chan_put(chan); l2cap_chan_del()
594 chan->conn = NULL; l2cap_chan_del()
600 if (chan->chan_type != L2CAP_CHAN_FIXED || l2cap_chan_del()
601 test_bit(FLAG_HOLD_HCI_CONN, &chan->flags)) l2cap_chan_del()
604 if (mgr && mgr->bredr_chan == chan) l2cap_chan_del()
608 if (chan->hs_hchan) { l2cap_chan_del()
609 struct hci_chan *hs_hchan = chan->hs_hchan; l2cap_chan_del()
611 BT_DBG("chan %p disconnect hs_hchan %p", chan, hs_hchan); l2cap_chan_del()
615 if (test_bit(CONF_NOT_COMPLETE, &chan->conf_state)) l2cap_chan_del()
618 switch(chan->mode) { l2cap_chan_del()
623 skb_queue_purge(&chan->tx_q); l2cap_chan_del()
627 __clear_retrans_timer(chan); l2cap_chan_del()
628 __clear_monitor_timer(chan); l2cap_chan_del()
629 __clear_ack_timer(chan); l2cap_chan_del()
631 skb_queue_purge(&chan->srej_q); l2cap_chan_del()
633 l2cap_seq_list_free(&chan->srej_list); l2cap_chan_del()
634 l2cap_seq_list_free(&chan->retrans_list); l2cap_chan_del()
639 skb_queue_purge(&chan->tx_q); l2cap_chan_del()
652 struct l2cap_chan *chan; l2cap_conn_update_id_addr() local
656 list_for_each_entry(chan, &conn->chan_l, list) { l2cap_conn_update_id_addr()
657 l2cap_chan_lock(chan); l2cap_conn_update_id_addr()
658 bacpy(&chan->dst, &hcon->dst); l2cap_conn_update_id_addr()
659 chan->dst_type = bdaddr_dst_type(hcon); l2cap_conn_update_id_addr()
660 l2cap_chan_unlock(chan); l2cap_conn_update_id_addr()
666 static void l2cap_chan_le_connect_reject(struct l2cap_chan *chan) l2cap_chan_le_connect_reject() argument
668 struct l2cap_conn *conn = chan->conn; l2cap_chan_le_connect_reject()
672 if (test_bit(FLAG_DEFER_SETUP, &chan->flags)) l2cap_chan_le_connect_reject()
677 l2cap_state_change(chan, BT_DISCONN); l2cap_chan_le_connect_reject()
679 rsp.dcid = cpu_to_le16(chan->scid); l2cap_chan_le_connect_reject()
680 rsp.mtu = cpu_to_le16(chan->imtu); l2cap_chan_le_connect_reject()
681 rsp.mps = cpu_to_le16(chan->mps); l2cap_chan_le_connect_reject()
682 rsp.credits = cpu_to_le16(chan->rx_credits); l2cap_chan_le_connect_reject()
685 l2cap_send_cmd(conn, chan->ident, L2CAP_LE_CONN_RSP, sizeof(rsp), l2cap_chan_le_connect_reject()
689 static void l2cap_chan_connect_reject(struct l2cap_chan *chan) l2cap_chan_connect_reject() argument
691 struct l2cap_conn *conn = chan->conn; l2cap_chan_connect_reject()
695 if (test_bit(FLAG_DEFER_SETUP, &chan->flags)) l2cap_chan_connect_reject()
700 l2cap_state_change(chan, BT_DISCONN); l2cap_chan_connect_reject()
702 rsp.scid = cpu_to_le16(chan->dcid); l2cap_chan_connect_reject()
703 rsp.dcid = cpu_to_le16(chan->scid); l2cap_chan_connect_reject()
707 l2cap_send_cmd(conn, chan->ident, L2CAP_CONN_RSP, sizeof(rsp), &rsp); l2cap_chan_connect_reject()
710 void l2cap_chan_close(struct l2cap_chan *chan, int reason) l2cap_chan_close() argument
712 struct l2cap_conn *conn = chan->conn; l2cap_chan_close()
714 BT_DBG("chan %p state %s", chan, state_to_string(chan->state)); l2cap_chan_close()
716 switch (chan->state) { l2cap_chan_close()
718 chan->ops->teardown(chan, 0); l2cap_chan_close()
723 if (chan->chan_type == L2CAP_CHAN_CONN_ORIENTED) { l2cap_chan_close()
724 __set_chan_timer(chan, chan->ops->get_sndtimeo(chan)); l2cap_chan_close()
725 l2cap_send_disconn_req(chan, reason); l2cap_chan_close()
727 l2cap_chan_del(chan, reason); l2cap_chan_close()
731 if (chan->chan_type == L2CAP_CHAN_CONN_ORIENTED) { l2cap_chan_close()
733 l2cap_chan_connect_reject(chan); l2cap_chan_close()
735 l2cap_chan_le_connect_reject(chan); l2cap_chan_close()
738 l2cap_chan_del(chan, reason); l2cap_chan_close()
743 l2cap_chan_del(chan, reason); l2cap_chan_close()
747 chan->ops->teardown(chan, 0); l2cap_chan_close()
753 static inline u8 l2cap_get_auth_type(struct l2cap_chan *chan) l2cap_get_auth_type() argument
755 switch (chan->chan_type) { l2cap_get_auth_type()
757 switch (chan->sec_level) { l2cap_get_auth_type()
768 if (chan->psm == cpu_to_le16(L2CAP_PSM_3DSP)) { l2cap_get_auth_type()
769 if (chan->sec_level == BT_SECURITY_LOW) l2cap_get_auth_type()
770 chan->sec_level = BT_SECURITY_SDP; l2cap_get_auth_type()
772 if (chan->sec_level == BT_SECURITY_HIGH || l2cap_get_auth_type()
773 chan->sec_level == BT_SECURITY_FIPS) l2cap_get_auth_type()
779 if (chan->psm == cpu_to_le16(L2CAP_PSM_SDP)) { l2cap_get_auth_type()
780 if (chan->sec_level == BT_SECURITY_LOW) l2cap_get_auth_type()
781 chan->sec_level = BT_SECURITY_SDP; l2cap_get_auth_type()
783 if (chan->sec_level == BT_SECURITY_HIGH || l2cap_get_auth_type()
784 chan->sec_level == BT_SECURITY_FIPS) l2cap_get_auth_type()
791 switch (chan->sec_level) { l2cap_get_auth_type()
805 int l2cap_chan_check_security(struct l2cap_chan *chan, bool initiator) l2cap_chan_check_security() argument
807 struct l2cap_conn *conn = chan->conn; l2cap_chan_check_security()
811 return smp_conn_security(conn->hcon, chan->sec_level); l2cap_chan_check_security()
813 auth_type = l2cap_get_auth_type(chan); l2cap_chan_check_security()
815 return hci_conn_security(conn->hcon, chan->sec_level, auth_type, l2cap_chan_check_security()
866 static bool __chan_is_moving(struct l2cap_chan *chan) __chan_is_moving() argument
868 return chan->move_state != L2CAP_MOVE_STABLE && __chan_is_moving()
869 chan->move_state != L2CAP_MOVE_WAIT_PREPARE; __chan_is_moving()
872 static void l2cap_do_send(struct l2cap_chan *chan, struct sk_buff *skb) l2cap_do_send() argument
874 struct hci_conn *hcon = chan->conn->hcon; l2cap_do_send()
877 BT_DBG("chan %p, skb %p len %d priority %u", chan, skb, skb->len, l2cap_do_send()
880 if (chan->hs_hcon && !__chan_is_moving(chan)) { l2cap_do_send()
881 if (chan->hs_hchan) l2cap_do_send()
882 hci_send_acl(chan->hs_hchan, skb, ACL_COMPLETE); l2cap_do_send()
894 (!test_bit(FLAG_FLUSHABLE, &chan->flags) && l2cap_do_send()
900 bt_cb(skb)->force_active = test_bit(FLAG_FORCE_ACTIVE, &chan->flags); l2cap_do_send()
901 hci_send_acl(chan->conn->hchan, skb, flags); l2cap_do_send()
952 static inline void __unpack_control(struct l2cap_chan *chan, __unpack_control() argument
955 if (test_bit(FLAG_EXT_CTRL, &chan->flags)) { __unpack_control()
1004 static inline void __pack_control(struct l2cap_chan *chan, __pack_control() argument
1008 if (test_bit(FLAG_EXT_CTRL, &chan->flags)) { __pack_control()
1017 static inline unsigned int __ertm_hdr_size(struct l2cap_chan *chan) __ertm_hdr_size() argument
1019 if (test_bit(FLAG_EXT_CTRL, &chan->flags)) __ertm_hdr_size()
1025 static struct sk_buff *l2cap_create_sframe_pdu(struct l2cap_chan *chan, l2cap_create_sframe_pdu() argument
1030 int hlen = __ertm_hdr_size(chan); l2cap_create_sframe_pdu()
1032 if (chan->fcs == L2CAP_FCS_CRC16) l2cap_create_sframe_pdu()
1042 lh->cid = cpu_to_le16(chan->dcid); l2cap_create_sframe_pdu()
1044 if (test_bit(FLAG_EXT_CTRL, &chan->flags)) l2cap_create_sframe_pdu()
1049 if (chan->fcs == L2CAP_FCS_CRC16) { l2cap_create_sframe_pdu()
1058 static void l2cap_send_sframe(struct l2cap_chan *chan, l2cap_send_sframe() argument
1064 BT_DBG("chan %p, control %p", chan, control); l2cap_send_sframe()
1069 if (__chan_is_moving(chan)) l2cap_send_sframe()
1072 if (test_and_clear_bit(CONN_SEND_FBIT, &chan->conn_state) && l2cap_send_sframe()
1077 clear_bit(CONN_RNR_SENT, &chan->conn_state); l2cap_send_sframe()
1079 set_bit(CONN_RNR_SENT, &chan->conn_state); l2cap_send_sframe()
1082 chan->last_acked_seq = control->reqseq; l2cap_send_sframe()
1083 __clear_ack_timer(chan); l2cap_send_sframe()
1089 if (test_bit(FLAG_EXT_CTRL, &chan->flags)) l2cap_send_sframe()
1094 skb = l2cap_create_sframe_pdu(chan, control_field); l2cap_send_sframe()
1096 l2cap_do_send(chan, skb); l2cap_send_sframe()
1099 static void l2cap_send_rr_or_rnr(struct l2cap_chan *chan, bool poll) l2cap_send_rr_or_rnr() argument
1103 BT_DBG("chan %p, poll %d", chan, poll); l2cap_send_rr_or_rnr()
1109 if (test_bit(CONN_LOCAL_BUSY, &chan->conn_state)) l2cap_send_rr_or_rnr()
1114 control.reqseq = chan->buffer_seq; l2cap_send_rr_or_rnr()
1115 l2cap_send_sframe(chan, &control); l2cap_send_rr_or_rnr()
1118 static inline int __l2cap_no_conn_pending(struct l2cap_chan *chan) __l2cap_no_conn_pending() argument
1120 if (chan->chan_type != L2CAP_CHAN_CONN_ORIENTED) __l2cap_no_conn_pending()
1123 return !test_bit(CONF_CONNECT_PEND, &chan->conf_state); __l2cap_no_conn_pending()
1126 static bool __amp_capable(struct l2cap_chan *chan) __amp_capable() argument
1128 struct l2cap_conn *conn = chan->conn; __amp_capable()
1148 if (chan->chan_policy == BT_CHANNEL_POLICY_AMP_PREFERRED) __amp_capable()
1154 static bool l2cap_check_efs(struct l2cap_chan *chan) l2cap_check_efs() argument
1160 void l2cap_send_conn_req(struct l2cap_chan *chan) l2cap_send_conn_req() argument
1162 struct l2cap_conn *conn = chan->conn; l2cap_send_conn_req()
1165 req.scid = cpu_to_le16(chan->scid); l2cap_send_conn_req()
1166 req.psm = chan->psm; l2cap_send_conn_req()
1168 chan->ident = l2cap_get_ident(conn); l2cap_send_conn_req()
1170 set_bit(CONF_CONNECT_PEND, &chan->conf_state); l2cap_send_conn_req()
1172 l2cap_send_cmd(conn, chan->ident, L2CAP_CONN_REQ, sizeof(req), &req); l2cap_send_conn_req()
1175 static void l2cap_send_create_chan_req(struct l2cap_chan *chan, u8 amp_id) l2cap_send_create_chan_req() argument
1178 req.scid = cpu_to_le16(chan->scid); l2cap_send_create_chan_req()
1179 req.psm = chan->psm; l2cap_send_create_chan_req()
1182 chan->ident = l2cap_get_ident(chan->conn); l2cap_send_create_chan_req()
1184 l2cap_send_cmd(chan->conn, chan->ident, L2CAP_CREATE_CHAN_REQ, l2cap_send_create_chan_req()
1188 static void l2cap_move_setup(struct l2cap_chan *chan) l2cap_move_setup() argument
1192 BT_DBG("chan %p", chan); l2cap_move_setup()
1194 if (chan->mode != L2CAP_MODE_ERTM) l2cap_move_setup()
1197 __clear_retrans_timer(chan); l2cap_move_setup()
1198 __clear_monitor_timer(chan); l2cap_move_setup()
1199 __clear_ack_timer(chan); l2cap_move_setup()
1201 chan->retry_count = 0; l2cap_move_setup()
1202 skb_queue_walk(&chan->tx_q, skb) { l2cap_move_setup()
1209 chan->expected_tx_seq = chan->buffer_seq; l2cap_move_setup()
1211 clear_bit(CONN_REJ_ACT, &chan->conn_state); l2cap_move_setup()
1212 clear_bit(CONN_SREJ_ACT, &chan->conn_state); l2cap_move_setup()
1213 l2cap_seq_list_clear(&chan->retrans_list); l2cap_move_setup()
1214 l2cap_seq_list_clear(&chan->srej_list); l2cap_move_setup()
1215 skb_queue_purge(&chan->srej_q); l2cap_move_setup()
1217 chan->tx_state = L2CAP_TX_STATE_XMIT; l2cap_move_setup()
1218 chan->rx_state = L2CAP_RX_STATE_MOVE; l2cap_move_setup()
1220 set_bit(CONN_REMOTE_BUSY, &chan->conn_state); l2cap_move_setup()
1223 static void l2cap_move_done(struct l2cap_chan *chan) l2cap_move_done() argument
1225 u8 move_role = chan->move_role; l2cap_move_done()
1226 BT_DBG("chan %p", chan); l2cap_move_done()
1228 chan->move_state = L2CAP_MOVE_STABLE; l2cap_move_done()
1229 chan->move_role = L2CAP_MOVE_ROLE_NONE; l2cap_move_done()
1231 if (chan->mode != L2CAP_MODE_ERTM) l2cap_move_done()
1236 l2cap_tx(chan, NULL, NULL, L2CAP_EV_EXPLICIT_POLL); l2cap_move_done()
1237 chan->rx_state = L2CAP_RX_STATE_WAIT_F; l2cap_move_done()
1240 chan->rx_state = L2CAP_RX_STATE_WAIT_P; l2cap_move_done()
1245 static void l2cap_chan_ready(struct l2cap_chan *chan) l2cap_chan_ready() argument
1251 if (chan->state == BT_CONNECTED) l2cap_chan_ready()
1255 chan->conf_state = 0; l2cap_chan_ready()
1256 __clear_chan_timer(chan); l2cap_chan_ready()
1258 if (chan->mode == L2CAP_MODE_LE_FLOWCTL && !chan->tx_credits) l2cap_chan_ready()
1259 chan->ops->suspend(chan); l2cap_chan_ready()
1261 chan->state = BT_CONNECTED; l2cap_chan_ready()
1263 chan->ops->ready(chan); l2cap_chan_ready()
1266 static void l2cap_le_connect(struct l2cap_chan *chan) l2cap_le_connect() argument
1268 struct l2cap_conn *conn = chan->conn; l2cap_le_connect()
1271 if (test_and_set_bit(FLAG_LE_CONN_REQ_SENT, &chan->flags)) l2cap_le_connect()
1274 req.psm = chan->psm; l2cap_le_connect()
1275 req.scid = cpu_to_le16(chan->scid); l2cap_le_connect()
1276 req.mtu = cpu_to_le16(chan->imtu); l2cap_le_connect()
1277 req.mps = cpu_to_le16(chan->mps); l2cap_le_connect()
1278 req.credits = cpu_to_le16(chan->rx_credits); l2cap_le_connect()
1280 chan->ident = l2cap_get_ident(conn); l2cap_le_connect()
1282 l2cap_send_cmd(conn, chan->ident, L2CAP_LE_CONN_REQ, l2cap_le_connect()
1286 static void l2cap_le_start(struct l2cap_chan *chan) l2cap_le_start() argument
1288 struct l2cap_conn *conn = chan->conn; l2cap_le_start()
1290 if (!smp_conn_security(conn->hcon, chan->sec_level)) l2cap_le_start()
1293 if (!chan->psm) { l2cap_le_start()
1294 l2cap_chan_ready(chan); l2cap_le_start()
1298 if (chan->state == BT_CONNECT) l2cap_le_start()
1299 l2cap_le_connect(chan); l2cap_le_start()
1302 static void l2cap_start_connection(struct l2cap_chan *chan) l2cap_start_connection() argument
1304 if (__amp_capable(chan)) { l2cap_start_connection()
1305 BT_DBG("chan %p AMP capable: discover AMPs", chan); l2cap_start_connection()
1306 a2mp_discover_amp(chan); l2cap_start_connection()
1307 } else if (chan->conn->hcon->type == LE_LINK) { l2cap_start_connection()
1308 l2cap_le_start(chan); l2cap_start_connection()
1310 l2cap_send_conn_req(chan); l2cap_start_connection()
1332 static void l2cap_do_start(struct l2cap_chan *chan) l2cap_do_start() argument
1334 struct l2cap_conn *conn = chan->conn; l2cap_do_start()
1337 l2cap_le_start(chan); l2cap_do_start()
1349 if (l2cap_chan_check_security(chan, true) && l2cap_do_start()
1350 __l2cap_no_conn_pending(chan)) l2cap_do_start()
1351 l2cap_start_connection(chan); l2cap_do_start()
1370 static void l2cap_send_disconn_req(struct l2cap_chan *chan, int err) l2cap_send_disconn_req() argument
1372 struct l2cap_conn *conn = chan->conn; l2cap_send_disconn_req()
1378 if (chan->mode == L2CAP_MODE_ERTM && chan->state == BT_CONNECTED) { l2cap_send_disconn_req()
1379 __clear_retrans_timer(chan); l2cap_send_disconn_req()
1380 __clear_monitor_timer(chan); l2cap_send_disconn_req()
1381 __clear_ack_timer(chan); l2cap_send_disconn_req()
1384 if (chan->scid == L2CAP_CID_A2MP) { l2cap_send_disconn_req()
1385 l2cap_state_change(chan, BT_DISCONN); l2cap_send_disconn_req()
1389 req.dcid = cpu_to_le16(chan->dcid); l2cap_send_disconn_req()
1390 req.scid = cpu_to_le16(chan->scid); l2cap_send_disconn_req()
1394 l2cap_state_change_and_error(chan, BT_DISCONN, err); l2cap_send_disconn_req()
1400 struct l2cap_chan *chan, *tmp; l2cap_conn_start() local
1406 list_for_each_entry_safe(chan, tmp, &conn->chan_l, list) { l2cap_conn_start()
1407 l2cap_chan_lock(chan); l2cap_conn_start()
1409 if (chan->chan_type != L2CAP_CHAN_CONN_ORIENTED) { l2cap_conn_start()
1410 l2cap_chan_ready(chan); l2cap_conn_start()
1411 l2cap_chan_unlock(chan); l2cap_conn_start()
1415 if (chan->state == BT_CONNECT) { l2cap_conn_start()
1416 if (!l2cap_chan_check_security(chan, true) || l2cap_conn_start()
1417 !__l2cap_no_conn_pending(chan)) { l2cap_conn_start()
1418 l2cap_chan_unlock(chan); l2cap_conn_start()
1422 if (!l2cap_mode_supported(chan->mode, conn->feat_mask) l2cap_conn_start()
1424 &chan->conf_state)) { l2cap_conn_start()
1425 l2cap_chan_close(chan, ECONNRESET); l2cap_conn_start()
1426 l2cap_chan_unlock(chan); l2cap_conn_start()
1430 l2cap_start_connection(chan); l2cap_conn_start()
1432 } else if (chan->state == BT_CONNECT2) { l2cap_conn_start()
1435 rsp.scid = cpu_to_le16(chan->dcid); l2cap_conn_start()
1436 rsp.dcid = cpu_to_le16(chan->scid); l2cap_conn_start()
1438 if (l2cap_chan_check_security(chan, false)) { l2cap_conn_start()
1439 if (test_bit(FLAG_DEFER_SETUP, &chan->flags)) { l2cap_conn_start()
1442 chan->ops->defer(chan); l2cap_conn_start()
1445 l2cap_state_change(chan, BT_CONFIG); l2cap_conn_start()
1454 l2cap_send_cmd(conn, chan->ident, L2CAP_CONN_RSP, l2cap_conn_start()
1457 if (test_bit(CONF_REQ_SENT, &chan->conf_state) || l2cap_conn_start()
1459 l2cap_chan_unlock(chan); l2cap_conn_start()
1463 set_bit(CONF_REQ_SENT, &chan->conf_state); l2cap_conn_start()
1465 l2cap_build_conf_req(chan, buf), buf); l2cap_conn_start()
1466 chan->num_conf_req++; l2cap_conn_start()
1469 l2cap_chan_unlock(chan); l2cap_conn_start()
1510 struct l2cap_chan *chan; l2cap_conn_ready() local
1520 list_for_each_entry(chan, &conn->chan_l, list) { l2cap_conn_ready()
1522 l2cap_chan_lock(chan); l2cap_conn_ready()
1524 if (chan->scid == L2CAP_CID_A2MP) { l2cap_conn_ready()
1525 l2cap_chan_unlock(chan); l2cap_conn_ready()
1530 l2cap_le_start(chan); l2cap_conn_ready()
1531 } else if (chan->chan_type != L2CAP_CHAN_CONN_ORIENTED) { l2cap_conn_ready()
1533 l2cap_chan_ready(chan); l2cap_conn_ready()
1534 } else if (chan->state == BT_CONNECT) { l2cap_conn_ready()
1535 l2cap_do_start(chan); l2cap_conn_ready()
1538 l2cap_chan_unlock(chan); l2cap_conn_ready()
1552 struct l2cap_chan *chan; l2cap_conn_unreliable() local
1558 list_for_each_entry(chan, &conn->chan_l, list) { l2cap_conn_unreliable()
1559 if (test_bit(FLAG_FORCE_RELIABLE, &chan->flags)) l2cap_conn_unreliable()
1560 l2cap_chan_set_err(chan, err); l2cap_conn_unreliable()
1663 struct l2cap_chan *chan, *l; l2cap_conn_del() local
1692 list_for_each_entry_safe(chan, l, &conn->chan_l, list) { l2cap_conn_del()
1693 l2cap_chan_hold(chan); l2cap_conn_del()
1694 l2cap_chan_lock(chan); l2cap_conn_del()
1696 l2cap_chan_del(chan, err); l2cap_conn_del()
1698 l2cap_chan_unlock(chan); l2cap_conn_del()
1700 chan->ops->close(chan); l2cap_conn_del()
1701 l2cap_chan_put(chan); l2cap_conn_del()
1793 struct l2cap_chan *chan = container_of(work, struct l2cap_chan, l2cap_monitor_timeout() local
1796 BT_DBG("chan %p", chan); l2cap_monitor_timeout()
1798 l2cap_chan_lock(chan); l2cap_monitor_timeout()
1800 if (!chan->conn) { l2cap_monitor_timeout()
1801 l2cap_chan_unlock(chan); l2cap_monitor_timeout()
1802 l2cap_chan_put(chan); l2cap_monitor_timeout()
1806 l2cap_tx(chan, NULL, NULL, L2CAP_EV_MONITOR_TO); l2cap_monitor_timeout()
1808 l2cap_chan_unlock(chan); l2cap_monitor_timeout()
1809 l2cap_chan_put(chan); l2cap_monitor_timeout()
1814 struct l2cap_chan *chan = container_of(work, struct l2cap_chan, l2cap_retrans_timeout() local
1817 BT_DBG("chan %p", chan); l2cap_retrans_timeout()
1819 l2cap_chan_lock(chan); l2cap_retrans_timeout()
1821 if (!chan->conn) { l2cap_retrans_timeout()
1822 l2cap_chan_unlock(chan); l2cap_retrans_timeout()
1823 l2cap_chan_put(chan); l2cap_retrans_timeout()
1827 l2cap_tx(chan, NULL, NULL, L2CAP_EV_RETRANS_TO); l2cap_retrans_timeout()
1828 l2cap_chan_unlock(chan); l2cap_retrans_timeout()
1829 l2cap_chan_put(chan); l2cap_retrans_timeout()
1832 static void l2cap_streaming_send(struct l2cap_chan *chan, l2cap_streaming_send() argument
1838 BT_DBG("chan %p, skbs %p", chan, skbs); l2cap_streaming_send()
1840 if (__chan_is_moving(chan)) l2cap_streaming_send()
1843 skb_queue_splice_tail_init(skbs, &chan->tx_q); l2cap_streaming_send()
1845 while (!skb_queue_empty(&chan->tx_q)) { l2cap_streaming_send()
1847 skb = skb_dequeue(&chan->tx_q); l2cap_streaming_send()
1853 control->txseq = chan->next_tx_seq; l2cap_streaming_send()
1855 __pack_control(chan, control, skb); l2cap_streaming_send()
1857 if (chan->fcs == L2CAP_FCS_CRC16) { l2cap_streaming_send()
1862 l2cap_do_send(chan, skb); l2cap_streaming_send()
1866 chan->next_tx_seq = __next_seq(chan, chan->next_tx_seq); l2cap_streaming_send()
1867 chan->frames_sent++; l2cap_streaming_send()
1871 static int l2cap_ertm_send(struct l2cap_chan *chan) l2cap_ertm_send() argument
1877 BT_DBG("chan %p", chan); l2cap_ertm_send()
1879 if (chan->state != BT_CONNECTED) l2cap_ertm_send()
1882 if (test_bit(CONN_REMOTE_BUSY, &chan->conn_state)) l2cap_ertm_send()
1885 if (__chan_is_moving(chan)) l2cap_ertm_send()
1888 while (chan->tx_send_head && l2cap_ertm_send()
1889 chan->unacked_frames < chan->remote_tx_win && l2cap_ertm_send()
1890 chan->tx_state == L2CAP_TX_STATE_XMIT) { l2cap_ertm_send()
1892 skb = chan->tx_send_head; l2cap_ertm_send()
1897 if (test_and_clear_bit(CONN_SEND_FBIT, &chan->conn_state)) l2cap_ertm_send()
1900 control->reqseq = chan->buffer_seq; l2cap_ertm_send()
1901 chan->last_acked_seq = chan->buffer_seq; l2cap_ertm_send()
1902 control->txseq = chan->next_tx_seq; l2cap_ertm_send()
1904 __pack_control(chan, control, skb); l2cap_ertm_send()
1906 if (chan->fcs == L2CAP_FCS_CRC16) { l2cap_ertm_send()
1919 __set_retrans_timer(chan); l2cap_ertm_send()
1921 chan->next_tx_seq = __next_seq(chan, chan->next_tx_seq); l2cap_ertm_send()
1922 chan->unacked_frames++; l2cap_ertm_send()
1923 chan->frames_sent++; l2cap_ertm_send()
1926 if (skb_queue_is_last(&chan->tx_q, skb)) l2cap_ertm_send()
1927 chan->tx_send_head = NULL; l2cap_ertm_send()
1929 chan->tx_send_head = skb_queue_next(&chan->tx_q, skb); l2cap_ertm_send()
1931 l2cap_do_send(chan, tx_skb); l2cap_ertm_send()
1936 chan->unacked_frames, skb_queue_len(&chan->tx_q)); l2cap_ertm_send()
1941 static void l2cap_ertm_resend(struct l2cap_chan *chan) l2cap_ertm_resend() argument
1948 BT_DBG("chan %p", chan); l2cap_ertm_resend()
1950 if (test_bit(CONN_REMOTE_BUSY, &chan->conn_state)) l2cap_ertm_resend()
1953 if (__chan_is_moving(chan)) l2cap_ertm_resend()
1956 while (chan->retrans_list.head != L2CAP_SEQ_LIST_CLEAR) { l2cap_ertm_resend()
1957 seq = l2cap_seq_list_pop(&chan->retrans_list); l2cap_ertm_resend()
1959 skb = l2cap_ertm_seq_in_queue(&chan->tx_q, seq); l2cap_ertm_resend()
1969 if (chan->max_tx != 0 && l2cap_ertm_resend()
1970 bt_cb(skb)->l2cap.retries > chan->max_tx) { l2cap_ertm_resend()
1971 BT_DBG("Retry limit exceeded (%d)", chan->max_tx); l2cap_ertm_resend()
1972 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_ertm_resend()
1973 l2cap_seq_list_clear(&chan->retrans_list); l2cap_ertm_resend()
1977 control.reqseq = chan->buffer_seq; l2cap_ertm_resend()
1978 if (test_and_clear_bit(CONN_SEND_FBIT, &chan->conn_state)) l2cap_ertm_resend()
1993 l2cap_seq_list_clear(&chan->retrans_list); l2cap_ertm_resend()
1998 if (test_bit(FLAG_EXT_CTRL, &chan->flags)) { l2cap_ertm_resend()
2007 if (chan->fcs == L2CAP_FCS_CRC16) { l2cap_ertm_resend()
2014 l2cap_do_send(chan, tx_skb); l2cap_ertm_resend()
2018 chan->last_acked_seq = chan->buffer_seq; l2cap_ertm_resend()
2022 static void l2cap_retransmit(struct l2cap_chan *chan, l2cap_retransmit() argument
2025 BT_DBG("chan %p, control %p", chan, control); l2cap_retransmit()
2027 l2cap_seq_list_append(&chan->retrans_list, control->reqseq); l2cap_retransmit()
2028 l2cap_ertm_resend(chan); l2cap_retransmit()
2031 static void l2cap_retransmit_all(struct l2cap_chan *chan, l2cap_retransmit_all() argument
2036 BT_DBG("chan %p, control %p", chan, control); l2cap_retransmit_all()
2039 set_bit(CONN_SEND_FBIT, &chan->conn_state); l2cap_retransmit_all()
2041 l2cap_seq_list_clear(&chan->retrans_list); l2cap_retransmit_all()
2043 if (test_bit(CONN_REMOTE_BUSY, &chan->conn_state)) l2cap_retransmit_all()
2046 if (chan->unacked_frames) { l2cap_retransmit_all()
2047 skb_queue_walk(&chan->tx_q, skb) { l2cap_retransmit_all()
2049 skb == chan->tx_send_head) l2cap_retransmit_all()
2053 skb_queue_walk_from(&chan->tx_q, skb) { l2cap_retransmit_all()
2054 if (skb == chan->tx_send_head) l2cap_retransmit_all()
2057 l2cap_seq_list_append(&chan->retrans_list, l2cap_retransmit_all()
2061 l2cap_ertm_resend(chan); l2cap_retransmit_all()
2065 static void l2cap_send_ack(struct l2cap_chan *chan) l2cap_send_ack() argument
2068 u16 frames_to_ack = __seq_offset(chan, chan->buffer_seq, l2cap_send_ack()
2069 chan->last_acked_seq); l2cap_send_ack()
2072 BT_DBG("chan %p last_acked_seq %d buffer_seq %d", l2cap_send_ack()
2073 chan, chan->last_acked_seq, chan->buffer_seq); l2cap_send_ack()
2078 if (test_bit(CONN_LOCAL_BUSY, &chan->conn_state) && l2cap_send_ack()
2079 chan->rx_state == L2CAP_RX_STATE_RECV) { l2cap_send_ack()
2080 __clear_ack_timer(chan); l2cap_send_ack()
2082 control.reqseq = chan->buffer_seq; l2cap_send_ack()
2083 l2cap_send_sframe(chan, &control); l2cap_send_ack()
2085 if (!test_bit(CONN_REMOTE_BUSY, &chan->conn_state)) { l2cap_send_ack()
2086 l2cap_ertm_send(chan); l2cap_send_ack()
2088 if (chan->buffer_seq == chan->last_acked_seq) l2cap_send_ack()
2095 threshold = chan->ack_win; l2cap_send_ack()
2103 __clear_ack_timer(chan); l2cap_send_ack()
2105 control.reqseq = chan->buffer_seq; l2cap_send_ack()
2106 l2cap_send_sframe(chan, &control); l2cap_send_ack()
2111 __set_ack_timer(chan); l2cap_send_ack()
2115 static inline int l2cap_skbuff_fromiovec(struct l2cap_chan *chan, l2cap_skbuff_fromiovec() argument
2119 struct l2cap_conn *conn = chan->conn; l2cap_skbuff_fromiovec()
2136 tmp = chan->ops->alloc_skb(chan, 0, count, l2cap_skbuff_fromiovec()
2159 static struct sk_buff *l2cap_create_connless_pdu(struct l2cap_chan *chan, l2cap_create_connless_pdu() argument
2162 struct l2cap_conn *conn = chan->conn; l2cap_create_connless_pdu()
2167 BT_DBG("chan %p psm 0x%2.2x len %zu", chan, l2cap_create_connless_pdu()
2168 __le16_to_cpu(chan->psm), len); l2cap_create_connless_pdu()
2172 skb = chan->ops->alloc_skb(chan, hlen, count, l2cap_create_connless_pdu()
2179 lh->cid = cpu_to_le16(chan->dcid); l2cap_create_connless_pdu()
2181 put_unaligned(chan->psm, (__le16 *) skb_put(skb, L2CAP_PSMLEN_SIZE)); l2cap_create_connless_pdu()
2183 err = l2cap_skbuff_fromiovec(chan, msg, len, count, skb); l2cap_create_connless_pdu()
2191 static struct sk_buff *l2cap_create_basic_pdu(struct l2cap_chan *chan, l2cap_create_basic_pdu() argument
2194 struct l2cap_conn *conn = chan->conn; l2cap_create_basic_pdu()
2199 BT_DBG("chan %p len %zu", chan, len); l2cap_create_basic_pdu()
2203 skb = chan->ops->alloc_skb(chan, L2CAP_HDR_SIZE, count, l2cap_create_basic_pdu()
2210 lh->cid = cpu_to_le16(chan->dcid); l2cap_create_basic_pdu()
2213 err = l2cap_skbuff_fromiovec(chan, msg, len, count, skb); l2cap_create_basic_pdu()
2221 static struct sk_buff *l2cap_create_iframe_pdu(struct l2cap_chan *chan, l2cap_create_iframe_pdu() argument
2225 struct l2cap_conn *conn = chan->conn; l2cap_create_iframe_pdu()
2230 BT_DBG("chan %p len %zu", chan, len); l2cap_create_iframe_pdu()
2235 hlen = __ertm_hdr_size(chan); l2cap_create_iframe_pdu()
2240 if (chan->fcs == L2CAP_FCS_CRC16) l2cap_create_iframe_pdu()
2245 skb = chan->ops->alloc_skb(chan, hlen, count, l2cap_create_iframe_pdu()
2252 lh->cid = cpu_to_le16(chan->dcid); l2cap_create_iframe_pdu()
2256 if (test_bit(FLAG_EXT_CTRL, &chan->flags)) l2cap_create_iframe_pdu()
2264 err = l2cap_skbuff_fromiovec(chan, msg, len, count, skb); l2cap_create_iframe_pdu()
2270 bt_cb(skb)->l2cap.fcs = chan->fcs; l2cap_create_iframe_pdu()
2275 static int l2cap_segment_sdu(struct l2cap_chan *chan, l2cap_segment_sdu() argument
2284 BT_DBG("chan %p, msg %p, len %zu", chan, msg, len); l2cap_segment_sdu()
2292 pdu_len = chan->conn->mtu; l2cap_segment_sdu()
2295 if (!chan->hs_hcon) l2cap_segment_sdu()
2299 if (chan->fcs) l2cap_segment_sdu()
2302 pdu_len -= __ertm_hdr_size(chan); l2cap_segment_sdu()
2305 pdu_len = min_t(size_t, pdu_len, chan->remote_mps); l2cap_segment_sdu()
2317 skb = l2cap_create_iframe_pdu(chan, msg, pdu_len, sdu_len); l2cap_segment_sdu()
2342 static struct sk_buff *l2cap_create_le_flowctl_pdu(struct l2cap_chan *chan, l2cap_create_le_flowctl_pdu() argument
2346 struct l2cap_conn *conn = chan->conn; l2cap_create_le_flowctl_pdu()
2351 BT_DBG("chan %p len %zu", chan, len); l2cap_create_le_flowctl_pdu()
2363 skb = chan->ops->alloc_skb(chan, hlen, count, l2cap_create_le_flowctl_pdu()
2370 lh->cid = cpu_to_le16(chan->dcid); l2cap_create_le_flowctl_pdu()
2376 err = l2cap_skbuff_fromiovec(chan, msg, len, count, skb); l2cap_create_le_flowctl_pdu()
2385 static int l2cap_segment_le_sdu(struct l2cap_chan *chan, l2cap_segment_le_sdu() argument
2393 BT_DBG("chan %p, msg %p, len %zu", chan, msg, len); l2cap_segment_le_sdu()
2396 pdu_len = chan->remote_mps - L2CAP_SDULEN_SIZE; l2cap_segment_le_sdu()
2402 skb = l2cap_create_le_flowctl_pdu(chan, msg, pdu_len, sdu_len); l2cap_segment_le_sdu()
2421 int l2cap_chan_send(struct l2cap_chan *chan, struct msghdr *msg, size_t len) l2cap_chan_send() argument
2427 if (!chan->conn) l2cap_chan_send()
2431 if (chan->chan_type == L2CAP_CHAN_CONN_LESS) { l2cap_chan_send()
2432 skb = l2cap_create_connless_pdu(chan, msg, len); l2cap_chan_send()
2439 if (chan->state != BT_CONNECTED) { l2cap_chan_send()
2444 l2cap_do_send(chan, skb); l2cap_chan_send()
2448 switch (chan->mode) { l2cap_chan_send()
2451 if (len > chan->omtu) l2cap_chan_send()
2454 if (!chan->tx_credits) l2cap_chan_send()
2459 err = l2cap_segment_le_sdu(chan, &seg_queue, msg, len); l2cap_chan_send()
2461 if (chan->state != BT_CONNECTED) { l2cap_chan_send()
2469 skb_queue_splice_tail_init(&seg_queue, &chan->tx_q); l2cap_chan_send()
2471 while (chan->tx_credits && !skb_queue_empty(&chan->tx_q)) { l2cap_chan_send()
2472 l2cap_do_send(chan, skb_dequeue(&chan->tx_q)); l2cap_chan_send()
2473 chan->tx_credits--; l2cap_chan_send()
2476 if (!chan->tx_credits) l2cap_chan_send()
2477 chan->ops->suspend(chan); l2cap_chan_send()
2485 if (len > chan->omtu) l2cap_chan_send()
2489 skb = l2cap_create_basic_pdu(chan, msg, len); l2cap_chan_send()
2496 if (chan->state != BT_CONNECTED) { l2cap_chan_send()
2501 l2cap_do_send(chan, skb); l2cap_chan_send()
2508 if (len > chan->omtu) { l2cap_chan_send()
2519 err = l2cap_segment_sdu(chan, &seg_queue, msg, len); l2cap_chan_send()
2524 if (chan->state != BT_CONNECTED) { l2cap_chan_send()
2532 if (chan->mode == L2CAP_MODE_ERTM) l2cap_chan_send()
2533 l2cap_tx(chan, NULL, &seg_queue, L2CAP_EV_DATA_REQUEST); l2cap_chan_send()
2535 l2cap_streaming_send(chan, &seg_queue); l2cap_chan_send()
2546 BT_DBG("bad state %1.1x", chan->mode); l2cap_chan_send()
2554 static void l2cap_send_srej(struct l2cap_chan *chan, u16 txseq) l2cap_send_srej() argument
2559 BT_DBG("chan %p, txseq %u", chan, txseq); l2cap_send_srej()
2565 for (seq = chan->expected_tx_seq; seq != txseq; l2cap_send_srej()
2566 seq = __next_seq(chan, seq)) { l2cap_send_srej()
2567 if (!l2cap_ertm_seq_in_queue(&chan->srej_q, seq)) { l2cap_send_srej()
2569 l2cap_send_sframe(chan, &control); l2cap_send_srej()
2570 l2cap_seq_list_append(&chan->srej_list, seq); l2cap_send_srej()
2574 chan->expected_tx_seq = __next_seq(chan, txseq); l2cap_send_srej()
2577 static void l2cap_send_srej_tail(struct l2cap_chan *chan) l2cap_send_srej_tail() argument
2581 BT_DBG("chan %p", chan); l2cap_send_srej_tail()
2583 if (chan->srej_list.tail == L2CAP_SEQ_LIST_CLEAR) l2cap_send_srej_tail()
2589 control.reqseq = chan->srej_list.tail; l2cap_send_srej_tail()
2590 l2cap_send_sframe(chan, &control); l2cap_send_srej_tail()
2593 static void l2cap_send_srej_list(struct l2cap_chan *chan, u16 txseq) l2cap_send_srej_list() argument
2599 BT_DBG("chan %p, txseq %u", chan, txseq); l2cap_send_srej_list()
2606 initial_head = chan->srej_list.head; l2cap_send_srej_list()
2609 seq = l2cap_seq_list_pop(&chan->srej_list); l2cap_send_srej_list()
2614 l2cap_send_sframe(chan, &control); l2cap_send_srej_list()
2615 l2cap_seq_list_append(&chan->srej_list, seq); l2cap_send_srej_list()
2616 } while (chan->srej_list.head != initial_head); l2cap_send_srej_list()
2619 static void l2cap_process_reqseq(struct l2cap_chan *chan, u16 reqseq) l2cap_process_reqseq() argument
2624 BT_DBG("chan %p, reqseq %u", chan, reqseq); l2cap_process_reqseq()
2626 if (chan->unacked_frames == 0 || reqseq == chan->expected_ack_seq) l2cap_process_reqseq()
2630 chan->expected_ack_seq, chan->unacked_frames); l2cap_process_reqseq()
2632 for (ackseq = chan->expected_ack_seq; ackseq != reqseq; l2cap_process_reqseq()
2633 ackseq = __next_seq(chan, ackseq)) { l2cap_process_reqseq()
2635 acked_skb = l2cap_ertm_seq_in_queue(&chan->tx_q, ackseq); l2cap_process_reqseq()
2637 skb_unlink(acked_skb, &chan->tx_q); l2cap_process_reqseq()
2639 chan->unacked_frames--; l2cap_process_reqseq()
2643 chan->expected_ack_seq = reqseq; l2cap_process_reqseq()
2645 if (chan->unacked_frames == 0) l2cap_process_reqseq()
2646 __clear_retrans_timer(chan); l2cap_process_reqseq()
2648 BT_DBG("unacked_frames %u", chan->unacked_frames); l2cap_process_reqseq()
2651 static void l2cap_abort_rx_srej_sent(struct l2cap_chan *chan) l2cap_abort_rx_srej_sent() argument
2653 BT_DBG("chan %p", chan); l2cap_abort_rx_srej_sent()
2655 chan->expected_tx_seq = chan->buffer_seq; l2cap_abort_rx_srej_sent()
2656 l2cap_seq_list_clear(&chan->srej_list); l2cap_abort_rx_srej_sent()
2657 skb_queue_purge(&chan->srej_q); l2cap_abort_rx_srej_sent()
2658 chan->rx_state = L2CAP_RX_STATE_RECV; l2cap_abort_rx_srej_sent()
2661 static void l2cap_tx_state_xmit(struct l2cap_chan *chan, l2cap_tx_state_xmit() argument
2665 BT_DBG("chan %p, control %p, skbs %p, event %d", chan, control, skbs, l2cap_tx_state_xmit()
2670 if (chan->tx_send_head == NULL) l2cap_tx_state_xmit()
2671 chan->tx_send_head = skb_peek(skbs); l2cap_tx_state_xmit()
2673 skb_queue_splice_tail_init(skbs, &chan->tx_q); l2cap_tx_state_xmit()
2674 l2cap_ertm_send(chan); l2cap_tx_state_xmit()
2678 set_bit(CONN_LOCAL_BUSY, &chan->conn_state); l2cap_tx_state_xmit()
2680 if (chan->rx_state == L2CAP_RX_STATE_SREJ_SENT) { l2cap_tx_state_xmit()
2684 l2cap_abort_rx_srej_sent(chan); l2cap_tx_state_xmit()
2687 l2cap_send_ack(chan); l2cap_tx_state_xmit()
2692 clear_bit(CONN_LOCAL_BUSY, &chan->conn_state); l2cap_tx_state_xmit()
2694 if (test_bit(CONN_RNR_SENT, &chan->conn_state)) { l2cap_tx_state_xmit()
2701 local_control.reqseq = chan->buffer_seq; l2cap_tx_state_xmit()
2702 l2cap_send_sframe(chan, &local_control); l2cap_tx_state_xmit()
2704 chan->retry_count = 1; l2cap_tx_state_xmit()
2705 __set_monitor_timer(chan); l2cap_tx_state_xmit()
2706 chan->tx_state = L2CAP_TX_STATE_WAIT_F; l2cap_tx_state_xmit()
2710 l2cap_process_reqseq(chan, control->reqseq); l2cap_tx_state_xmit()
2713 l2cap_send_rr_or_rnr(chan, 1); l2cap_tx_state_xmit()
2714 chan->retry_count = 1; l2cap_tx_state_xmit()
2715 __set_monitor_timer(chan); l2cap_tx_state_xmit()
2716 __clear_ack_timer(chan); l2cap_tx_state_xmit()
2717 chan->tx_state = L2CAP_TX_STATE_WAIT_F; l2cap_tx_state_xmit()
2720 l2cap_send_rr_or_rnr(chan, 1); l2cap_tx_state_xmit()
2721 chan->retry_count = 1; l2cap_tx_state_xmit()
2722 __set_monitor_timer(chan); l2cap_tx_state_xmit()
2723 chan->tx_state = L2CAP_TX_STATE_WAIT_F; l2cap_tx_state_xmit()
2733 static void l2cap_tx_state_wait_f(struct l2cap_chan *chan, l2cap_tx_state_wait_f() argument
2737 BT_DBG("chan %p, control %p, skbs %p, event %d", chan, control, skbs, l2cap_tx_state_wait_f()
2742 if (chan->tx_send_head == NULL) l2cap_tx_state_wait_f()
2743 chan->tx_send_head = skb_peek(skbs); l2cap_tx_state_wait_f()
2745 skb_queue_splice_tail_init(skbs, &chan->tx_q); l2cap_tx_state_wait_f()
2749 set_bit(CONN_LOCAL_BUSY, &chan->conn_state); l2cap_tx_state_wait_f()
2751 if (chan->rx_state == L2CAP_RX_STATE_SREJ_SENT) { l2cap_tx_state_wait_f()
2755 l2cap_abort_rx_srej_sent(chan); l2cap_tx_state_wait_f()
2758 l2cap_send_ack(chan); l2cap_tx_state_wait_f()
2763 clear_bit(CONN_LOCAL_BUSY, &chan->conn_state); l2cap_tx_state_wait_f()
2765 if (test_bit(CONN_RNR_SENT, &chan->conn_state)) { l2cap_tx_state_wait_f()
2771 local_control.reqseq = chan->buffer_seq; l2cap_tx_state_wait_f()
2772 l2cap_send_sframe(chan, &local_control); l2cap_tx_state_wait_f()
2774 chan->retry_count = 1; l2cap_tx_state_wait_f()
2775 __set_monitor_timer(chan); l2cap_tx_state_wait_f()
2776 chan->tx_state = L2CAP_TX_STATE_WAIT_F; l2cap_tx_state_wait_f()
2780 l2cap_process_reqseq(chan, control->reqseq); l2cap_tx_state_wait_f()
2786 __clear_monitor_timer(chan); l2cap_tx_state_wait_f()
2787 if (chan->unacked_frames > 0) l2cap_tx_state_wait_f()
2788 __set_retrans_timer(chan); l2cap_tx_state_wait_f()
2789 chan->retry_count = 0; l2cap_tx_state_wait_f()
2790 chan->tx_state = L2CAP_TX_STATE_XMIT; l2cap_tx_state_wait_f()
2791 BT_DBG("recv fbit tx_state 0x2.2%x", chan->tx_state); l2cap_tx_state_wait_f()
2798 if (chan->max_tx == 0 || chan->retry_count < chan->max_tx) { l2cap_tx_state_wait_f()
2799 l2cap_send_rr_or_rnr(chan, 1); l2cap_tx_state_wait_f()
2800 __set_monitor_timer(chan); l2cap_tx_state_wait_f()
2801 chan->retry_count++; l2cap_tx_state_wait_f()
2803 l2cap_send_disconn_req(chan, ECONNABORTED); l2cap_tx_state_wait_f()
2811 static void l2cap_tx(struct l2cap_chan *chan, struct l2cap_ctrl *control, l2cap_tx() argument
2814 BT_DBG("chan %p, control %p, skbs %p, event %d, state %d", l2cap_tx()
2815 chan, control, skbs, event, chan->tx_state); l2cap_tx()
2817 switch (chan->tx_state) { l2cap_tx()
2819 l2cap_tx_state_xmit(chan, control, skbs, event); l2cap_tx()
2822 l2cap_tx_state_wait_f(chan, control, skbs, event); l2cap_tx()
2830 static void l2cap_pass_to_tx(struct l2cap_chan *chan, l2cap_pass_to_tx() argument
2833 BT_DBG("chan %p, control %p", chan, control); l2cap_pass_to_tx()
2834 l2cap_tx(chan, control, NULL, L2CAP_EV_RECV_REQSEQ_AND_FBIT); l2cap_pass_to_tx()
2837 static void l2cap_pass_to_tx_fbit(struct l2cap_chan *chan, l2cap_pass_to_tx_fbit() argument
2840 BT_DBG("chan %p, control %p", chan, control); l2cap_pass_to_tx_fbit()
2841 l2cap_tx(chan, control, NULL, L2CAP_EV_RECV_FBIT); l2cap_pass_to_tx_fbit()
2848 struct l2cap_chan *chan; l2cap_raw_recv() local
2854 list_for_each_entry(chan, &conn->chan_l, list) { l2cap_raw_recv()
2855 if (chan->chan_type != L2CAP_CHAN_RAW) l2cap_raw_recv()
2859 if (bt_cb(skb)->l2cap.chan == chan) l2cap_raw_recv()
2865 if (chan->ops->recv(chan, nskb)) l2cap_raw_recv()
3003 static void l2cap_add_opt_efs(void **ptr, struct l2cap_chan *chan) l2cap_add_opt_efs() argument
3007 switch (chan->mode) { l2cap_add_opt_efs()
3009 efs.id = chan->local_id; l2cap_add_opt_efs()
3010 efs.stype = chan->local_stype; l2cap_add_opt_efs()
3011 efs.msdu = cpu_to_le16(chan->local_msdu); l2cap_add_opt_efs()
3012 efs.sdu_itime = cpu_to_le32(chan->local_sdu_itime); l2cap_add_opt_efs()
3020 efs.msdu = cpu_to_le16(chan->local_msdu); l2cap_add_opt_efs()
3021 efs.sdu_itime = cpu_to_le32(chan->local_sdu_itime); l2cap_add_opt_efs()
3036 struct l2cap_chan *chan = container_of(work, struct l2cap_chan, l2cap_ack_timeout() local
3040 BT_DBG("chan %p", chan); l2cap_ack_timeout()
3042 l2cap_chan_lock(chan); l2cap_ack_timeout()
3044 frames_to_ack = __seq_offset(chan, chan->buffer_seq, l2cap_ack_timeout()
3045 chan->last_acked_seq); l2cap_ack_timeout()
3048 l2cap_send_rr_or_rnr(chan, 0); l2cap_ack_timeout()
3050 l2cap_chan_unlock(chan); l2cap_ack_timeout()
3051 l2cap_chan_put(chan); l2cap_ack_timeout()
3054 int l2cap_ertm_init(struct l2cap_chan *chan) l2cap_ertm_init() argument
3058 chan->next_tx_seq = 0; l2cap_ertm_init()
3059 chan->expected_tx_seq = 0; l2cap_ertm_init()
3060 chan->expected_ack_seq = 0; l2cap_ertm_init()
3061 chan->unacked_frames = 0; l2cap_ertm_init()
3062 chan->buffer_seq = 0; l2cap_ertm_init()
3063 chan->frames_sent = 0; l2cap_ertm_init()
3064 chan->last_acked_seq = 0; l2cap_ertm_init()
3065 chan->sdu = NULL; l2cap_ertm_init()
3066 chan->sdu_last_frag = NULL; l2cap_ertm_init()
3067 chan->sdu_len = 0; l2cap_ertm_init()
3069 skb_queue_head_init(&chan->tx_q); l2cap_ertm_init()
3071 chan->local_amp_id = AMP_ID_BREDR; l2cap_ertm_init()
3072 chan->move_id = AMP_ID_BREDR; l2cap_ertm_init()
3073 chan->move_state = L2CAP_MOVE_STABLE; l2cap_ertm_init()
3074 chan->move_role = L2CAP_MOVE_ROLE_NONE; l2cap_ertm_init()
3076 if (chan->mode != L2CAP_MODE_ERTM) l2cap_ertm_init()
3079 chan->rx_state = L2CAP_RX_STATE_RECV; l2cap_ertm_init()
3080 chan->tx_state = L2CAP_TX_STATE_XMIT; l2cap_ertm_init()
3082 INIT_DELAYED_WORK(&chan->retrans_timer, l2cap_retrans_timeout); l2cap_ertm_init()
3083 INIT_DELAYED_WORK(&chan->monitor_timer, l2cap_monitor_timeout); l2cap_ertm_init()
3084 INIT_DELAYED_WORK(&chan->ack_timer, l2cap_ack_timeout); l2cap_ertm_init()
3086 skb_queue_head_init(&chan->srej_q); l2cap_ertm_init()
3088 err = l2cap_seq_list_init(&chan->srej_list, chan->tx_win); l2cap_ertm_init()
3092 err = l2cap_seq_list_init(&chan->retrans_list, chan->remote_tx_win); l2cap_ertm_init()
3094 l2cap_seq_list_free(&chan->srej_list); l2cap_ertm_init()
3124 static void __l2cap_set_ertm_timeouts(struct l2cap_chan *chan, __l2cap_set_ertm_timeouts() argument
3127 if (chan->local_amp_id != AMP_ID_BREDR && chan->hs_hcon) { __l2cap_set_ertm_timeouts()
3128 u64 ertm_to = chan->hs_hcon->hdev->amp_be_flush_to; __l2cap_set_ertm_timeouts()
3162 static inline void l2cap_txwin_setup(struct l2cap_chan *chan) l2cap_txwin_setup() argument
3164 if (chan->tx_win > L2CAP_DEFAULT_TX_WINDOW && l2cap_txwin_setup()
3165 __l2cap_ews_supported(chan->conn)) { l2cap_txwin_setup()
3167 set_bit(FLAG_EXT_CTRL, &chan->flags); l2cap_txwin_setup()
3168 chan->tx_win_max = L2CAP_DEFAULT_EXT_WINDOW; l2cap_txwin_setup()
3170 chan->tx_win = min_t(u16, chan->tx_win, l2cap_txwin_setup()
3172 chan->tx_win_max = L2CAP_DEFAULT_TX_WINDOW; l2cap_txwin_setup()
3174 chan->ack_win = chan->tx_win; l2cap_txwin_setup()
3177 static int l2cap_build_conf_req(struct l2cap_chan *chan, void *data) l2cap_build_conf_req() argument
3180 struct l2cap_conf_rfc rfc = { .mode = chan->mode }; l2cap_build_conf_req()
3184 BT_DBG("chan %p", chan); l2cap_build_conf_req()
3186 if (chan->num_conf_req || chan->num_conf_rsp) l2cap_build_conf_req()
3189 switch (chan->mode) { l2cap_build_conf_req()
3192 if (test_bit(CONF_STATE2_DEVICE, &chan->conf_state)) l2cap_build_conf_req()
3195 if (__l2cap_efs_supported(chan->conn)) l2cap_build_conf_req()
3196 set_bit(FLAG_EFS_ENABLE, &chan->flags); l2cap_build_conf_req()
3200 chan->mode = l2cap_select_mode(rfc.mode, chan->conn->feat_mask); l2cap_build_conf_req()
3205 if (chan->imtu != L2CAP_DEFAULT_MTU) l2cap_build_conf_req()
3206 l2cap_add_conf_opt(&ptr, L2CAP_CONF_MTU, 2, chan->imtu); l2cap_build_conf_req()
3208 switch (chan->mode) { l2cap_build_conf_req()
3213 if (!(chan->conn->feat_mask & L2CAP_FEAT_ERTM) && l2cap_build_conf_req()
3214 !(chan->conn->feat_mask & L2CAP_FEAT_STREAMING)) l2cap_build_conf_req()
3230 rfc.max_transmit = chan->max_tx; l2cap_build_conf_req()
3232 __l2cap_set_ertm_timeouts(chan, &rfc); l2cap_build_conf_req()
3234 size = min_t(u16, L2CAP_DEFAULT_MAX_PDU_SIZE, chan->conn->mtu - l2cap_build_conf_req()
3239 l2cap_txwin_setup(chan); l2cap_build_conf_req()
3241 rfc.txwin_size = min_t(u16, chan->tx_win, l2cap_build_conf_req()
3247 if (test_bit(FLAG_EFS_ENABLE, &chan->flags)) l2cap_build_conf_req()
3248 l2cap_add_opt_efs(&ptr, chan); l2cap_build_conf_req()
3250 if (test_bit(FLAG_EXT_CTRL, &chan->flags)) l2cap_build_conf_req()
3252 chan->tx_win); l2cap_build_conf_req()
3254 if (chan->conn->feat_mask & L2CAP_FEAT_FCS) l2cap_build_conf_req()
3255 if (chan->fcs == L2CAP_FCS_NONE || l2cap_build_conf_req()
3256 test_bit(CONF_RECV_NO_FCS, &chan->conf_state)) { l2cap_build_conf_req()
3257 chan->fcs = L2CAP_FCS_NONE; l2cap_build_conf_req()
3259 chan->fcs); l2cap_build_conf_req()
3264 l2cap_txwin_setup(chan); l2cap_build_conf_req()
3271 size = min_t(u16, L2CAP_DEFAULT_MAX_PDU_SIZE, chan->conn->mtu - l2cap_build_conf_req()
3279 if (test_bit(FLAG_EFS_ENABLE, &chan->flags)) l2cap_build_conf_req()
3280 l2cap_add_opt_efs(&ptr, chan); l2cap_build_conf_req()
3282 if (chan->conn->feat_mask & L2CAP_FEAT_FCS) l2cap_build_conf_req()
3283 if (chan->fcs == L2CAP_FCS_NONE || l2cap_build_conf_req()
3284 test_bit(CONF_RECV_NO_FCS, &chan->conf_state)) { l2cap_build_conf_req()
3285 chan->fcs = L2CAP_FCS_NONE; l2cap_build_conf_req()
3287 chan->fcs); l2cap_build_conf_req()
3292 req->dcid = cpu_to_le16(chan->dcid); l2cap_build_conf_req()
3298 static int l2cap_parse_conf_req(struct l2cap_chan *chan, void *data) l2cap_parse_conf_req() argument
3302 void *req = chan->conf_req; l2cap_parse_conf_req()
3303 int len = chan->conf_len; l2cap_parse_conf_req()
3313 BT_DBG("chan %p", chan); l2cap_parse_conf_req()
3327 chan->flush_to = val; l2cap_parse_conf_req()
3340 set_bit(CONF_RECV_NO_FCS, &chan->conf_state); l2cap_parse_conf_req()
3350 if (!(chan->conn->local_fixed_chan & L2CAP_FC_A2MP)) l2cap_parse_conf_req()
3353 set_bit(FLAG_EXT_CTRL, &chan->flags); l2cap_parse_conf_req()
3354 set_bit(CONF_EWS_RECV, &chan->conf_state); l2cap_parse_conf_req()
3355 chan->tx_win_max = L2CAP_DEFAULT_EXT_WINDOW; l2cap_parse_conf_req()
3356 chan->remote_tx_win = val; l2cap_parse_conf_req()
3369 if (chan->num_conf_rsp || chan->num_conf_req > 1) l2cap_parse_conf_req()
3372 switch (chan->mode) { l2cap_parse_conf_req()
3375 if (!test_bit(CONF_STATE2_DEVICE, &chan->conf_state)) { l2cap_parse_conf_req()
3376 chan->mode = l2cap_select_mode(rfc.mode, l2cap_parse_conf_req()
3377 chan->conn->feat_mask); l2cap_parse_conf_req()
3382 if (__l2cap_efs_supported(chan->conn)) l2cap_parse_conf_req()
3383 set_bit(FLAG_EFS_ENABLE, &chan->flags); l2cap_parse_conf_req()
3388 if (chan->mode != rfc.mode) l2cap_parse_conf_req()
3395 if (chan->mode != rfc.mode) { l2cap_parse_conf_req()
3397 rfc.mode = chan->mode; l2cap_parse_conf_req()
3399 if (chan->num_conf_rsp == 1) l2cap_parse_conf_req()
3413 chan->omtu = mtu; l2cap_parse_conf_req()
3414 set_bit(CONF_MTU_DONE, &chan->conf_state); l2cap_parse_conf_req()
3416 l2cap_add_conf_opt(&ptr, L2CAP_CONF_MTU, 2, chan->omtu); l2cap_parse_conf_req()
3419 if (chan->local_stype != L2CAP_SERV_NOTRAFIC && l2cap_parse_conf_req()
3421 efs.stype != chan->local_stype) { l2cap_parse_conf_req()
3425 if (chan->num_conf_req >= 1) l2cap_parse_conf_req()
3434 set_bit(CONF_LOC_CONF_PEND, &chan->conf_state); l2cap_parse_conf_req()
3440 chan->fcs = L2CAP_FCS_NONE; l2cap_parse_conf_req()
3441 set_bit(CONF_MODE_DONE, &chan->conf_state); l2cap_parse_conf_req()
3445 if (!test_bit(CONF_EWS_RECV, &chan->conf_state)) l2cap_parse_conf_req()
3446 chan->remote_tx_win = rfc.txwin_size; l2cap_parse_conf_req()
3450 chan->remote_max_tx = rfc.max_transmit; l2cap_parse_conf_req()
3453 chan->conn->mtu - L2CAP_EXT_HDR_SIZE - l2cap_parse_conf_req()
3456 chan->remote_mps = size; l2cap_parse_conf_req()
3458 __l2cap_set_ertm_timeouts(chan, &rfc); l2cap_parse_conf_req()
3460 set_bit(CONF_MODE_DONE, &chan->conf_state); l2cap_parse_conf_req()
3465 if (test_bit(FLAG_EFS_ENABLE, &chan->flags)) { l2cap_parse_conf_req()
3466 chan->remote_id = efs.id; l2cap_parse_conf_req()
3467 chan->remote_stype = efs.stype; l2cap_parse_conf_req()
3468 chan->remote_msdu = le16_to_cpu(efs.msdu); l2cap_parse_conf_req()
3469 chan->remote_flush_to = l2cap_parse_conf_req()
3471 chan->remote_acc_lat = l2cap_parse_conf_req()
3473 chan->remote_sdu_itime = l2cap_parse_conf_req()
3483 chan->conn->mtu - L2CAP_EXT_HDR_SIZE - l2cap_parse_conf_req()
3486 chan->remote_mps = size; l2cap_parse_conf_req()
3488 set_bit(CONF_MODE_DONE, &chan->conf_state); l2cap_parse_conf_req()
3499 rfc.mode = chan->mode; l2cap_parse_conf_req()
3503 set_bit(CONF_OUTPUT_DONE, &chan->conf_state); l2cap_parse_conf_req()
3505 rsp->scid = cpu_to_le16(chan->dcid); l2cap_parse_conf_req()
3512 static int l2cap_parse_conf_rsp(struct l2cap_chan *chan, void *rsp, int len, l2cap_parse_conf_rsp() argument
3522 BT_DBG("chan %p, rsp %p, len %d, req %p", chan, rsp, len, data); l2cap_parse_conf_rsp()
3531 chan->imtu = L2CAP_DEFAULT_MIN_MTU; l2cap_parse_conf_rsp()
3533 chan->imtu = val; l2cap_parse_conf_rsp()
3534 l2cap_add_conf_opt(&ptr, L2CAP_CONF_MTU, 2, chan->imtu); l2cap_parse_conf_rsp()
3538 chan->flush_to = val; l2cap_parse_conf_rsp()
3540 2, chan->flush_to); l2cap_parse_conf_rsp()
3547 if (test_bit(CONF_STATE2_DEVICE, &chan->conf_state) && l2cap_parse_conf_rsp()
3548 rfc.mode != chan->mode) l2cap_parse_conf_rsp()
3551 chan->fcs = 0; l2cap_parse_conf_rsp()
3558 chan->ack_win = min_t(u16, val, chan->ack_win); l2cap_parse_conf_rsp()
3560 chan->tx_win); l2cap_parse_conf_rsp()
3567 if (chan->local_stype != L2CAP_SERV_NOTRAFIC && l2cap_parse_conf_rsp()
3569 efs.stype != chan->local_stype) l2cap_parse_conf_rsp()
3580 &chan->conf_state); l2cap_parse_conf_rsp()
3585 if (chan->mode == L2CAP_MODE_BASIC && chan->mode != rfc.mode) l2cap_parse_conf_rsp()
3588 chan->mode = rfc.mode; l2cap_parse_conf_rsp()
3593 chan->retrans_timeout = le16_to_cpu(rfc.retrans_timeout); l2cap_parse_conf_rsp()
3594 chan->monitor_timeout = le16_to_cpu(rfc.monitor_timeout); l2cap_parse_conf_rsp()
3595 chan->mps = le16_to_cpu(rfc.max_pdu_size); l2cap_parse_conf_rsp()
3596 if (!test_bit(FLAG_EXT_CTRL, &chan->flags)) l2cap_parse_conf_rsp()
3597 chan->ack_win = min_t(u16, chan->ack_win, l2cap_parse_conf_rsp()
3600 if (test_bit(FLAG_EFS_ENABLE, &chan->flags)) { l2cap_parse_conf_rsp()
3601 chan->local_msdu = le16_to_cpu(efs.msdu); l2cap_parse_conf_rsp()
3602 chan->local_sdu_itime = l2cap_parse_conf_rsp()
3604 chan->local_acc_lat = le32_to_cpu(efs.acc_lat); l2cap_parse_conf_rsp()
3605 chan->local_flush_to = l2cap_parse_conf_rsp()
3611 chan->mps = le16_to_cpu(rfc.max_pdu_size); l2cap_parse_conf_rsp()
3615 req->dcid = cpu_to_le16(chan->dcid); l2cap_parse_conf_rsp()
3621 static int l2cap_build_conf_rsp(struct l2cap_chan *chan, void *data, l2cap_build_conf_rsp() argument
3627 BT_DBG("chan %p", chan); l2cap_build_conf_rsp()
3629 rsp->scid = cpu_to_le16(chan->dcid); l2cap_build_conf_rsp()
3636 void __l2cap_le_connect_rsp_defer(struct l2cap_chan *chan) __l2cap_le_connect_rsp_defer() argument
3639 struct l2cap_conn *conn = chan->conn; __l2cap_le_connect_rsp_defer()
3641 BT_DBG("chan %p", chan); __l2cap_le_connect_rsp_defer()
3643 rsp.dcid = cpu_to_le16(chan->scid); __l2cap_le_connect_rsp_defer()
3644 rsp.mtu = cpu_to_le16(chan->imtu); __l2cap_le_connect_rsp_defer()
3645 rsp.mps = cpu_to_le16(chan->mps); __l2cap_le_connect_rsp_defer()
3646 rsp.credits = cpu_to_le16(chan->rx_credits); __l2cap_le_connect_rsp_defer()
3649 l2cap_send_cmd(conn, chan->ident, L2CAP_LE_CONN_RSP, sizeof(rsp), __l2cap_le_connect_rsp_defer()
3653 void __l2cap_connect_rsp_defer(struct l2cap_chan *chan) __l2cap_connect_rsp_defer() argument
3656 struct l2cap_conn *conn = chan->conn; __l2cap_connect_rsp_defer()
3660 rsp.scid = cpu_to_le16(chan->dcid); __l2cap_connect_rsp_defer()
3661 rsp.dcid = cpu_to_le16(chan->scid); __l2cap_connect_rsp_defer()
3665 if (chan->hs_hcon) __l2cap_connect_rsp_defer()
3670 BT_DBG("chan %p rsp_code %u", chan, rsp_code); __l2cap_connect_rsp_defer()
3672 l2cap_send_cmd(conn, chan->ident, rsp_code, sizeof(rsp), &rsp); __l2cap_connect_rsp_defer()
3674 if (test_and_set_bit(CONF_REQ_SENT, &chan->conf_state)) __l2cap_connect_rsp_defer()
3678 l2cap_build_conf_req(chan, buf), buf); __l2cap_connect_rsp_defer()
3679 chan->num_conf_req++; __l2cap_connect_rsp_defer()
3682 static void l2cap_conf_rfc_get(struct l2cap_chan *chan, void *rsp, int len) l2cap_conf_rfc_get() argument
3689 u16 txwin_ext = chan->ack_win; l2cap_conf_rfc_get()
3691 .mode = chan->mode, l2cap_conf_rfc_get()
3694 .max_pdu_size = cpu_to_le16(chan->imtu), l2cap_conf_rfc_get()
3695 .txwin_size = min_t(u16, chan->ack_win, L2CAP_DEFAULT_TX_WINDOW), l2cap_conf_rfc_get()
3698 BT_DBG("chan %p, rsp %p, len %d", chan, rsp, len); l2cap_conf_rfc_get()
3700 if ((chan->mode != L2CAP_MODE_ERTM) && (chan->mode != L2CAP_MODE_STREAMING)) l2cap_conf_rfc_get()
3719 chan->retrans_timeout = le16_to_cpu(rfc.retrans_timeout); l2cap_conf_rfc_get()
3720 chan->monitor_timeout = le16_to_cpu(rfc.monitor_timeout); l2cap_conf_rfc_get()
3721 chan->mps = le16_to_cpu(rfc.max_pdu_size); l2cap_conf_rfc_get()
3722 if (test_bit(FLAG_EXT_CTRL, &chan->flags)) l2cap_conf_rfc_get()
3723 chan->ack_win = min_t(u16, chan->ack_win, txwin_ext); l2cap_conf_rfc_get()
3725 chan->ack_win = min_t(u16, chan->ack_win, l2cap_conf_rfc_get()
3729 chan->mps = le16_to_cpu(rfc.max_pdu_size); l2cap_conf_rfc_get()
3764 struct l2cap_chan *chan = NULL, *pchan; l2cap_connect() local
3797 chan = pchan->ops->new_connection(pchan); l2cap_connect()
3798 if (!chan) l2cap_connect()
3808 bacpy(&chan->src, &conn->hcon->src); l2cap_connect()
3809 bacpy(&chan->dst, &conn->hcon->dst); l2cap_connect()
3810 chan->src_type = bdaddr_src_type(conn->hcon); l2cap_connect()
3811 chan->dst_type = bdaddr_dst_type(conn->hcon); l2cap_connect()
3812 chan->psm = psm; l2cap_connect()
3813 chan->dcid = scid; l2cap_connect()
3814 chan->local_amp_id = amp_id; l2cap_connect()
3816 __l2cap_chan_add(conn, chan); l2cap_connect()
3818 dcid = chan->scid; l2cap_connect()
3820 __set_chan_timer(chan, chan->ops->get_sndtimeo(chan)); l2cap_connect()
3822 chan->ident = cmd->ident; l2cap_connect()
3825 if (l2cap_chan_check_security(chan, false)) { l2cap_connect()
3826 if (test_bit(FLAG_DEFER_SETUP, &chan->flags)) { l2cap_connect()
3827 l2cap_state_change(chan, BT_CONNECT2); l2cap_connect()
3830 chan->ops->defer(chan); l2cap_connect()
3837 l2cap_state_change(chan, BT_CONFIG); l2cap_connect()
3840 l2cap_state_change(chan, BT_CONNECT2); l2cap_connect()
3846 l2cap_state_change(chan, BT_CONNECT2); l2cap_connect()
3851 l2cap_state_change(chan, BT_CONNECT2); l2cap_connect()
3881 if (chan && !test_bit(CONF_REQ_SENT, &chan->conf_state) && l2cap_connect()
3884 set_bit(CONF_REQ_SENT, &chan->conf_state); l2cap_connect()
3886 l2cap_build_conf_req(chan, buf), buf); l2cap_connect()
3887 chan->num_conf_req++; l2cap_connect()
3890 return chan; l2cap_connect()
3918 struct l2cap_chan *chan; l2cap_connect_create_rsp() local
3936 chan = __l2cap_get_chan_by_scid(conn, scid); l2cap_connect_create_rsp()
3937 if (!chan) { l2cap_connect_create_rsp()
3942 chan = __l2cap_get_chan_by_ident(conn, cmd->ident); l2cap_connect_create_rsp()
3943 if (!chan) { l2cap_connect_create_rsp()
3951 l2cap_chan_lock(chan); l2cap_connect_create_rsp()
3955 l2cap_state_change(chan, BT_CONFIG); l2cap_connect_create_rsp()
3956 chan->ident = 0; l2cap_connect_create_rsp()
3957 chan->dcid = dcid; l2cap_connect_create_rsp()
3958 clear_bit(CONF_CONNECT_PEND, &chan->conf_state); l2cap_connect_create_rsp()
3960 if (test_and_set_bit(CONF_REQ_SENT, &chan->conf_state)) l2cap_connect_create_rsp()
3964 l2cap_build_conf_req(chan, req), req); l2cap_connect_create_rsp()
3965 chan->num_conf_req++; l2cap_connect_create_rsp()
3969 set_bit(CONF_CONNECT_PEND, &chan->conf_state); l2cap_connect_create_rsp()
3973 l2cap_chan_del(chan, ECONNREFUSED); l2cap_connect_create_rsp()
3977 l2cap_chan_unlock(chan); l2cap_connect_create_rsp()
3985 static inline void set_default_fcs(struct l2cap_chan *chan) set_default_fcs() argument
3990 if (chan->mode != L2CAP_MODE_ERTM && chan->mode != L2CAP_MODE_STREAMING) set_default_fcs()
3991 chan->fcs = L2CAP_FCS_NONE; set_default_fcs()
3992 else if (!test_bit(CONF_RECV_NO_FCS, &chan->conf_state)) set_default_fcs()
3993 chan->fcs = L2CAP_FCS_CRC16; set_default_fcs()
3996 static void l2cap_send_efs_conf_rsp(struct l2cap_chan *chan, void *data, l2cap_send_efs_conf_rsp() argument
3999 struct l2cap_conn *conn = chan->conn; l2cap_send_efs_conf_rsp()
4001 BT_DBG("conn %p chan %p ident %d flags 0x%4.4x", conn, chan, ident, l2cap_send_efs_conf_rsp()
4004 clear_bit(CONF_LOC_CONF_PEND, &chan->conf_state); l2cap_send_efs_conf_rsp()
4005 set_bit(CONF_OUTPUT_DONE, &chan->conf_state); l2cap_send_efs_conf_rsp()
4008 l2cap_build_conf_rsp(chan, data, l2cap_send_efs_conf_rsp()
4031 struct l2cap_chan *chan; l2cap_config_req() local
4042 chan = l2cap_get_chan_by_scid(conn, dcid); l2cap_config_req()
4043 if (!chan) { l2cap_config_req()
4048 if (chan->state != BT_CONFIG && chan->state != BT_CONNECT2) { l2cap_config_req()
4049 cmd_reject_invalid_cid(conn, cmd->ident, chan->scid, l2cap_config_req()
4050 chan->dcid); l2cap_config_req()
4056 if (chan->conf_len + len > sizeof(chan->conf_req)) { l2cap_config_req()
4058 l2cap_build_conf_rsp(chan, rsp, l2cap_config_req()
4064 memcpy(chan->conf_req + chan->conf_len, req->data, len); l2cap_config_req()
4065 chan->conf_len += len; l2cap_config_req()
4070 l2cap_build_conf_rsp(chan, rsp, l2cap_config_req()
4076 len = l2cap_parse_conf_req(chan, rsp); l2cap_config_req()
4078 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_config_req()
4082 chan->ident = cmd->ident; l2cap_config_req()
4084 chan->num_conf_rsp++; l2cap_config_req()
4087 chan->conf_len = 0; l2cap_config_req()
4089 if (!test_bit(CONF_OUTPUT_DONE, &chan->conf_state)) l2cap_config_req()
4092 if (test_bit(CONF_INPUT_DONE, &chan->conf_state)) { l2cap_config_req()
4093 set_default_fcs(chan); l2cap_config_req()
4095 if (chan->mode == L2CAP_MODE_ERTM || l2cap_config_req()
4096 chan->mode == L2CAP_MODE_STREAMING) l2cap_config_req()
4097 err = l2cap_ertm_init(chan); l2cap_config_req()
4100 l2cap_send_disconn_req(chan, -err); l2cap_config_req()
4102 l2cap_chan_ready(chan); l2cap_config_req()
4107 if (!test_and_set_bit(CONF_REQ_SENT, &chan->conf_state)) { l2cap_config_req()
4110 l2cap_build_conf_req(chan, buf), buf); l2cap_config_req()
4111 chan->num_conf_req++; l2cap_config_req()
4116 if (test_bit(CONF_REM_CONF_PEND, &chan->conf_state) && l2cap_config_req()
4117 test_bit(CONF_LOC_CONF_PEND, &chan->conf_state)) { l2cap_config_req()
4122 if (!chan->hs_hcon) l2cap_config_req()
4123 l2cap_send_efs_conf_rsp(chan, rsp, cmd->ident, flags); l2cap_config_req()
4125 chan->ident = cmd->ident; l2cap_config_req()
4129 l2cap_chan_unlock(chan); l2cap_config_req()
4139 struct l2cap_chan *chan; l2cap_config_rsp() local
4153 chan = l2cap_get_chan_by_scid(conn, scid); l2cap_config_rsp()
4154 if (!chan) l2cap_config_rsp()
4159 l2cap_conf_rfc_get(chan, rsp->data, len); l2cap_config_rsp()
4160 clear_bit(CONF_REM_CONF_PEND, &chan->conf_state); l2cap_config_rsp()
4164 set_bit(CONF_REM_CONF_PEND, &chan->conf_state); l2cap_config_rsp()
4166 if (test_bit(CONF_LOC_CONF_PEND, &chan->conf_state)) { l2cap_config_rsp()
4169 len = l2cap_parse_conf_rsp(chan, rsp->data, len, l2cap_config_rsp()
4172 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_config_rsp()
4176 if (!chan->hs_hcon) { l2cap_config_rsp()
4177 l2cap_send_efs_conf_rsp(chan, buf, cmd->ident, l2cap_config_rsp()
4180 if (l2cap_check_efs(chan)) { l2cap_config_rsp()
4181 amp_create_logical_link(chan); l2cap_config_rsp()
4182 chan->ident = cmd->ident; l2cap_config_rsp()
4189 if (chan->num_conf_rsp <= L2CAP_CONF_MAX_CONF_RSP) { l2cap_config_rsp()
4193 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_config_rsp()
4199 len = l2cap_parse_conf_rsp(chan, rsp->data, len, l2cap_config_rsp()
4202 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_config_rsp()
4208 chan->num_conf_req++; l2cap_config_rsp()
4215 l2cap_chan_set_err(chan, ECONNRESET); l2cap_config_rsp()
4217 __set_chan_timer(chan, L2CAP_DISC_REJ_TIMEOUT); l2cap_config_rsp()
4218 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_config_rsp()
4225 set_bit(CONF_INPUT_DONE, &chan->conf_state); l2cap_config_rsp()
4227 if (test_bit(CONF_OUTPUT_DONE, &chan->conf_state)) { l2cap_config_rsp()
4228 set_default_fcs(chan); l2cap_config_rsp()
4230 if (chan->mode == L2CAP_MODE_ERTM || l2cap_config_rsp()
4231 chan->mode == L2CAP_MODE_STREAMING) l2cap_config_rsp()
4232 err = l2cap_ertm_init(chan); l2cap_config_rsp()
4235 l2cap_send_disconn_req(chan, -err); l2cap_config_rsp()
4237 l2cap_chan_ready(chan); l2cap_config_rsp()
4241 l2cap_chan_unlock(chan); l2cap_config_rsp()
4252 struct l2cap_chan *chan; l2cap_disconnect_req() local
4264 chan = __l2cap_get_chan_by_scid(conn, dcid); l2cap_disconnect_req()
4265 if (!chan) { l2cap_disconnect_req()
4271 l2cap_chan_lock(chan); l2cap_disconnect_req()
4273 rsp.dcid = cpu_to_le16(chan->scid); l2cap_disconnect_req()
4274 rsp.scid = cpu_to_le16(chan->dcid); l2cap_disconnect_req()
4277 chan->ops->set_shutdown(chan); l2cap_disconnect_req()
4279 l2cap_chan_hold(chan); l2cap_disconnect_req()
4280 l2cap_chan_del(chan, ECONNRESET); l2cap_disconnect_req()
4282 l2cap_chan_unlock(chan); l2cap_disconnect_req()
4284 chan->ops->close(chan); l2cap_disconnect_req()
4285 l2cap_chan_put(chan); l2cap_disconnect_req()
4298 struct l2cap_chan *chan; l2cap_disconnect_rsp() local
4310 chan = __l2cap_get_chan_by_scid(conn, scid); l2cap_disconnect_rsp()
4311 if (!chan) { l2cap_disconnect_rsp()
4316 l2cap_chan_lock(chan); l2cap_disconnect_rsp()
4318 l2cap_chan_hold(chan); l2cap_disconnect_rsp()
4319 l2cap_chan_del(chan, 0); l2cap_disconnect_rsp()
4321 l2cap_chan_unlock(chan); l2cap_disconnect_rsp()
4323 chan->ops->close(chan); l2cap_disconnect_rsp()
4324 l2cap_chan_put(chan); l2cap_disconnect_rsp()
4451 struct l2cap_chan *chan; l2cap_create_channel_req() local
4483 chan = l2cap_connect(conn, cmd, data, L2CAP_CREATE_CHAN_RSP, l2cap_create_channel_req()
4485 if (chan) { l2cap_create_channel_req()
4493 cmd_reject_invalid_cid(conn, cmd->ident, chan->scid, l2cap_create_channel_req()
4494 chan->dcid); l2cap_create_channel_req()
4498 BT_DBG("mgr %p bredr_chan %p hs_hcon %p", mgr, chan, hs_hcon); l2cap_create_channel_req()
4500 mgr->bredr_chan = chan; l2cap_create_channel_req()
4501 chan->hs_hcon = hs_hcon; l2cap_create_channel_req()
4502 chan->fcs = L2CAP_FCS_NONE; l2cap_create_channel_req()
4522 static void l2cap_send_move_chan_req(struct l2cap_chan *chan, u8 dest_amp_id) l2cap_send_move_chan_req() argument
4527 BT_DBG("chan %p, dest_amp_id %d", chan, dest_amp_id); l2cap_send_move_chan_req()
4529 ident = l2cap_get_ident(chan->conn); l2cap_send_move_chan_req()
4530 chan->ident = ident; l2cap_send_move_chan_req()
4532 req.icid = cpu_to_le16(chan->scid); l2cap_send_move_chan_req()
4535 l2cap_send_cmd(chan->conn, ident, L2CAP_MOVE_CHAN_REQ, sizeof(req), l2cap_send_move_chan_req()
4538 __set_chan_timer(chan, L2CAP_MOVE_TIMEOUT); l2cap_send_move_chan_req()
4541 static void l2cap_send_move_chan_rsp(struct l2cap_chan *chan, u16 result) l2cap_send_move_chan_rsp() argument
4545 BT_DBG("chan %p, result 0x%4.4x", chan, result); l2cap_send_move_chan_rsp()
4547 rsp.icid = cpu_to_le16(chan->dcid); l2cap_send_move_chan_rsp()
4550 l2cap_send_cmd(chan->conn, chan->ident, L2CAP_MOVE_CHAN_RSP, l2cap_send_move_chan_rsp()
4554 static void l2cap_send_move_chan_cfm(struct l2cap_chan *chan, u16 result) l2cap_send_move_chan_cfm() argument
4558 BT_DBG("chan %p, result 0x%4.4x", chan, result); l2cap_send_move_chan_cfm()
4560 chan->ident = l2cap_get_ident(chan->conn); l2cap_send_move_chan_cfm()
4562 cfm.icid = cpu_to_le16(chan->scid); l2cap_send_move_chan_cfm()
4565 l2cap_send_cmd(chan->conn, chan->ident, L2CAP_MOVE_CHAN_CFM, l2cap_send_move_chan_cfm()
4568 __set_chan_timer(chan, L2CAP_MOVE_TIMEOUT); l2cap_send_move_chan_cfm()
4595 static void __release_logical_link(struct l2cap_chan *chan) __release_logical_link() argument
4597 chan->hs_hchan = NULL; __release_logical_link()
4598 chan->hs_hcon = NULL; __release_logical_link()
4603 static void l2cap_logical_fail(struct l2cap_chan *chan) l2cap_logical_fail() argument
4606 if (chan->state != BT_CONNECTED) { l2cap_logical_fail()
4608 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_logical_fail()
4612 switch (chan->move_role) { l2cap_logical_fail()
4614 l2cap_move_done(chan); l2cap_logical_fail()
4615 l2cap_send_move_chan_rsp(chan, L2CAP_MR_NOT_SUPP); l2cap_logical_fail()
4618 if (chan->move_state == L2CAP_MOVE_WAIT_LOGICAL_COMP || l2cap_logical_fail()
4619 chan->move_state == L2CAP_MOVE_WAIT_LOGICAL_CFM) { l2cap_logical_fail()
4623 l2cap_move_done(chan); l2cap_logical_fail()
4629 l2cap_send_move_chan_cfm(chan, L2CAP_MC_UNCONFIRMED); l2cap_logical_fail()
4634 static void l2cap_logical_finish_create(struct l2cap_chan *chan, l2cap_logical_finish_create() argument
4639 chan->hs_hchan = hchan; l2cap_logical_finish_create()
4640 chan->hs_hcon->l2cap_data = chan->conn; l2cap_logical_finish_create()
4642 l2cap_send_efs_conf_rsp(chan, &rsp, chan->ident, 0); l2cap_logical_finish_create()
4644 if (test_bit(CONF_INPUT_DONE, &chan->conf_state)) { l2cap_logical_finish_create()
4647 set_default_fcs(chan); l2cap_logical_finish_create()
4649 err = l2cap_ertm_init(chan); l2cap_logical_finish_create()
4651 l2cap_send_disconn_req(chan, -err); l2cap_logical_finish_create()
4653 l2cap_chan_ready(chan); l2cap_logical_finish_create()
4657 static void l2cap_logical_finish_move(struct l2cap_chan *chan, l2cap_logical_finish_move() argument
4660 chan->hs_hcon = hchan->conn; l2cap_logical_finish_move()
4661 chan->hs_hcon->l2cap_data = chan->conn; l2cap_logical_finish_move()
4663 BT_DBG("move_state %d", chan->move_state); l2cap_logical_finish_move()
4665 switch (chan->move_state) { l2cap_logical_finish_move()
4670 chan->move_state = L2CAP_MOVE_WAIT_RSP_SUCCESS; l2cap_logical_finish_move()
4673 if (test_bit(CONN_LOCAL_BUSY, &chan->conn_state)) { l2cap_logical_finish_move()
4674 chan->move_state = L2CAP_MOVE_WAIT_LOCAL_BUSY; l2cap_logical_finish_move()
4675 } else if (chan->move_role == L2CAP_MOVE_ROLE_INITIATOR) { l2cap_logical_finish_move()
4676 chan->move_state = L2CAP_MOVE_WAIT_CONFIRM_RSP; l2cap_logical_finish_move()
4677 l2cap_send_move_chan_cfm(chan, L2CAP_MC_CONFIRMED); l2cap_logical_finish_move()
4678 } else if (chan->move_role == L2CAP_MOVE_ROLE_RESPONDER) { l2cap_logical_finish_move()
4679 chan->move_state = L2CAP_MOVE_WAIT_CONFIRM; l2cap_logical_finish_move()
4680 l2cap_send_move_chan_rsp(chan, L2CAP_MR_SUCCESS); l2cap_logical_finish_move()
4685 __release_logical_link(chan); l2cap_logical_finish_move()
4687 chan->move_state = L2CAP_MOVE_STABLE; l2cap_logical_finish_move()
4691 /* Call with chan locked */ l2cap_logical_cfm()
4692 void l2cap_logical_cfm(struct l2cap_chan *chan, struct hci_chan *hchan, l2cap_logical_cfm() argument
4695 BT_DBG("chan %p, hchan %p, status %d", chan, hchan, status); l2cap_logical_cfm()
4698 l2cap_logical_fail(chan); l2cap_logical_cfm()
4699 __release_logical_link(chan); l2cap_logical_cfm()
4703 if (chan->state != BT_CONNECTED) { l2cap_logical_cfm()
4705 if (chan->local_amp_id != AMP_ID_BREDR) l2cap_logical_cfm()
4706 l2cap_logical_finish_create(chan, hchan); l2cap_logical_cfm()
4708 l2cap_logical_finish_move(chan, hchan); l2cap_logical_cfm()
4712 void l2cap_move_start(struct l2cap_chan *chan) l2cap_move_start() argument
4714 BT_DBG("chan %p", chan); l2cap_move_start()
4716 if (chan->local_amp_id == AMP_ID_BREDR) { l2cap_move_start()
4717 if (chan->chan_policy != BT_CHANNEL_POLICY_AMP_PREFERRED) l2cap_move_start()
4719 chan->move_role = L2CAP_MOVE_ROLE_INITIATOR; l2cap_move_start()
4720 chan->move_state = L2CAP_MOVE_WAIT_PREPARE; l2cap_move_start()
4723 chan->move_role = L2CAP_MOVE_ROLE_INITIATOR; l2cap_move_start()
4724 chan->move_state = L2CAP_MOVE_WAIT_RSP_SUCCESS; l2cap_move_start()
4725 chan->move_id = 0; l2cap_move_start()
4726 l2cap_move_setup(chan); l2cap_move_start()
4727 l2cap_send_move_chan_req(chan, 0); l2cap_move_start()
4731 static void l2cap_do_create(struct l2cap_chan *chan, int result, l2cap_do_create() argument
4734 BT_DBG("chan %p state %s %u -> %u", chan, state_to_string(chan->state), l2cap_do_create()
4737 chan->fcs = L2CAP_FCS_NONE; l2cap_do_create()
4740 if (chan->state == BT_CONNECT) { l2cap_do_create()
4742 chan->local_amp_id = local_amp_id; l2cap_do_create()
4743 l2cap_send_create_chan_req(chan, remote_amp_id); l2cap_do_create()
4746 l2cap_send_conn_req(chan); l2cap_do_create()
4753 if (__l2cap_no_conn_pending(chan)) { l2cap_do_create()
4756 rsp.scid = cpu_to_le16(chan->dcid); l2cap_do_create()
4757 rsp.dcid = cpu_to_le16(chan->scid); l2cap_do_create()
4769 l2cap_send_cmd(chan->conn, chan->ident, L2CAP_CREATE_CHAN_RSP, l2cap_do_create()
4773 l2cap_state_change(chan, BT_CONFIG); l2cap_do_create()
4774 set_bit(CONF_REQ_SENT, &chan->conf_state); l2cap_do_create()
4775 l2cap_send_cmd(chan->conn, l2cap_get_ident(chan->conn), l2cap_do_create()
4777 l2cap_build_conf_req(chan, buf), buf); l2cap_do_create()
4778 chan->num_conf_req++; l2cap_do_create()
4783 static void l2cap_do_move_initiate(struct l2cap_chan *chan, u8 local_amp_id, l2cap_do_move_initiate() argument
4786 l2cap_move_setup(chan); l2cap_do_move_initiate()
4787 chan->move_id = local_amp_id; l2cap_do_move_initiate()
4788 chan->move_state = L2CAP_MOVE_WAIT_RSP; l2cap_do_move_initiate()
4790 l2cap_send_move_chan_req(chan, remote_amp_id); l2cap_do_move_initiate()
4793 static void l2cap_do_move_respond(struct l2cap_chan *chan, int result) l2cap_do_move_respond() argument
4802 chan->hs_hcon = hchan->conn; l2cap_do_move_respond()
4803 chan->hs_hcon->l2cap_data = chan->conn; l2cap_do_move_respond()
4804 chan->move_state = L2CAP_MOVE_WAIT_CONFIRM; l2cap_do_move_respond()
4805 l2cap_send_move_chan_rsp(chan, L2CAP_MR_SUCCESS); l2cap_do_move_respond()
4807 l2cap_logical_cfm(chan, hchan, L2CAP_MR_SUCCESS); l2cap_do_move_respond()
4810 chan->move_state = L2CAP_MOVE_WAIT_LOGICAL_CFM; l2cap_do_move_respond()
4814 l2cap_send_move_chan_rsp(chan, L2CAP_MR_NOT_ALLOWED); l2cap_do_move_respond()
4818 static void l2cap_do_move_cancel(struct l2cap_chan *chan, int result) l2cap_do_move_cancel() argument
4820 if (chan->move_role == L2CAP_MOVE_ROLE_RESPONDER) { l2cap_do_move_cancel()
4827 l2cap_send_move_chan_rsp(chan, rsp_result); l2cap_do_move_cancel()
4830 chan->move_role = L2CAP_MOVE_ROLE_NONE; l2cap_do_move_cancel()
4831 chan->move_state = L2CAP_MOVE_STABLE; l2cap_do_move_cancel()
4834 l2cap_ertm_send(chan); l2cap_do_move_cancel()
4837 /* Invoke with locked chan */ __l2cap_physical_cfm()
4838 void __l2cap_physical_cfm(struct l2cap_chan *chan, int result) __l2cap_physical_cfm() argument
4840 u8 local_amp_id = chan->local_amp_id; __l2cap_physical_cfm()
4841 u8 remote_amp_id = chan->remote_amp_id; __l2cap_physical_cfm()
4843 BT_DBG("chan %p, result %d, local_amp_id %d, remote_amp_id %d", __l2cap_physical_cfm()
4844 chan, result, local_amp_id, remote_amp_id); __l2cap_physical_cfm()
4846 if (chan->state == BT_DISCONN || chan->state == BT_CLOSED) { __l2cap_physical_cfm()
4847 l2cap_chan_unlock(chan); __l2cap_physical_cfm()
4851 if (chan->state != BT_CONNECTED) { __l2cap_physical_cfm()
4852 l2cap_do_create(chan, result, local_amp_id, remote_amp_id); __l2cap_physical_cfm()
4854 l2cap_do_move_cancel(chan, result); __l2cap_physical_cfm()
4856 switch (chan->move_role) { __l2cap_physical_cfm()
4858 l2cap_do_move_initiate(chan, local_amp_id, __l2cap_physical_cfm()
4862 l2cap_do_move_respond(chan, result); __l2cap_physical_cfm()
4865 l2cap_do_move_cancel(chan, result); __l2cap_physical_cfm()
4877 struct l2cap_chan *chan; l2cap_move_channel_req() local
4891 chan = l2cap_get_chan_by_dcid(conn, icid); l2cap_move_channel_req()
4892 if (!chan) { l2cap_move_channel_req()
4900 chan->ident = cmd->ident; l2cap_move_channel_req()
4902 if (chan->scid < L2CAP_CID_DYN_START || l2cap_move_channel_req()
4903 chan->chan_policy == BT_CHANNEL_POLICY_BREDR_ONLY || l2cap_move_channel_req()
4904 (chan->mode != L2CAP_MODE_ERTM && l2cap_move_channel_req()
4905 chan->mode != L2CAP_MODE_STREAMING)) { l2cap_move_channel_req()
4910 if (chan->local_amp_id == req->dest_amp_id) { l2cap_move_channel_req()
4933 if ((__chan_is_moving(chan) || l2cap_move_channel_req()
4934 chan->move_role != L2CAP_MOVE_ROLE_NONE) && l2cap_move_channel_req()
4940 chan->move_role = L2CAP_MOVE_ROLE_RESPONDER; l2cap_move_channel_req()
4941 l2cap_move_setup(chan); l2cap_move_channel_req()
4942 chan->move_id = req->dest_amp_id; l2cap_move_channel_req()
4943 icid = chan->dcid; l2cap_move_channel_req()
4947 if (test_bit(CONN_LOCAL_BUSY, &chan->conn_state)) { l2cap_move_channel_req()
4948 chan->move_state = L2CAP_MOVE_WAIT_LOCAL_BUSY; l2cap_move_channel_req()
4951 chan->move_state = L2CAP_MOVE_WAIT_CONFIRM; l2cap_move_channel_req()
4955 chan->move_state = L2CAP_MOVE_WAIT_PREPARE; l2cap_move_channel_req()
4957 /*amp_accept_physical(chan, req->dest_amp_id);*/ l2cap_move_channel_req()
4962 l2cap_send_move_chan_rsp(chan, result); l2cap_move_channel_req()
4964 l2cap_chan_unlock(chan); l2cap_move_channel_req()
4971 struct l2cap_chan *chan; l2cap_move_continue() local
4974 chan = l2cap_get_chan_by_scid(conn, icid); l2cap_move_continue()
4975 if (!chan) { l2cap_move_continue()
4980 __clear_chan_timer(chan); l2cap_move_continue()
4982 __set_chan_timer(chan, L2CAP_MOVE_ERTX_TIMEOUT); l2cap_move_continue()
4984 switch (chan->move_state) { l2cap_move_continue()
4989 chan->move_state = L2CAP_MOVE_WAIT_LOGICAL_CFM; l2cap_move_continue()
4995 &chan->conn_state)) { l2cap_move_continue()
4996 chan->move_state = L2CAP_MOVE_WAIT_LOCAL_BUSY; l2cap_move_continue()
5001 chan->move_state = L2CAP_MOVE_WAIT_CONFIRM_RSP; l2cap_move_continue()
5002 l2cap_send_move_chan_cfm(chan, L2CAP_MC_CONFIRMED); l2cap_move_continue()
5011 chan->move_state = L2CAP_MOVE_WAIT_LOGICAL_CFM; l2cap_move_continue()
5016 chan->move_state = L2CAP_MOVE_WAIT_LOGICAL_COMP; l2cap_move_continue()
5022 l2cap_send_move_chan_cfm(chan, L2CAP_MC_UNCONFIRMED); l2cap_move_continue()
5034 chan->hs_hcon = hchan->conn; l2cap_move_continue()
5035 chan->hs_hcon->l2cap_data = chan->conn; l2cap_move_continue()
5039 l2cap_send_move_chan_cfm(chan, L2CAP_MC_CONFIRMED); l2cap_move_continue()
5044 chan->move_state = L2CAP_MOVE_WAIT_RSP_SUCCESS; l2cap_move_continue()
5047 l2cap_logical_cfm(chan, hchan, L2CAP_MR_SUCCESS); l2cap_move_continue()
5051 chan->move_id = chan->local_amp_id; l2cap_move_continue()
5052 l2cap_move_done(chan); l2cap_move_continue()
5053 l2cap_send_move_chan_cfm(chan, L2CAP_MC_UNCONFIRMED); l2cap_move_continue()
5056 l2cap_chan_unlock(chan); l2cap_move_continue()
5062 struct l2cap_chan *chan; l2cap_move_fail() local
5064 chan = l2cap_get_chan_by_ident(conn, ident); l2cap_move_fail()
5065 if (!chan) { l2cap_move_fail()
5071 __clear_chan_timer(chan); l2cap_move_fail()
5073 if (chan->move_role == L2CAP_MOVE_ROLE_INITIATOR) { l2cap_move_fail()
5075 chan->move_role = L2CAP_MOVE_ROLE_RESPONDER; l2cap_move_fail()
5078 chan->move_id = chan->local_amp_id; l2cap_move_fail()
5079 l2cap_move_done(chan); l2cap_move_fail()
5083 l2cap_send_move_chan_cfm(chan, L2CAP_MC_UNCONFIRMED); l2cap_move_fail()
5085 l2cap_chan_unlock(chan); l2cap_move_fail()
5116 struct l2cap_chan *chan; l2cap_move_channel_confirm() local
5127 chan = l2cap_get_chan_by_dcid(conn, icid); l2cap_move_channel_confirm()
5128 if (!chan) { l2cap_move_channel_confirm()
5134 if (chan->move_state == L2CAP_MOVE_WAIT_CONFIRM) { l2cap_move_channel_confirm()
5136 chan->local_amp_id = chan->move_id; l2cap_move_channel_confirm()
5137 if (chan->local_amp_id == AMP_ID_BREDR) l2cap_move_channel_confirm()
5138 __release_logical_link(chan); l2cap_move_channel_confirm()
5140 chan->move_id = chan->local_amp_id; l2cap_move_channel_confirm()
5143 l2cap_move_done(chan); l2cap_move_channel_confirm()
5148 l2cap_chan_unlock(chan); l2cap_move_channel_confirm()
5158 struct l2cap_chan *chan; l2cap_move_channel_confirm_rsp() local
5168 chan = l2cap_get_chan_by_scid(conn, icid); l2cap_move_channel_confirm_rsp()
5169 if (!chan) l2cap_move_channel_confirm_rsp()
5172 __clear_chan_timer(chan); l2cap_move_channel_confirm_rsp()
5174 if (chan->move_state == L2CAP_MOVE_WAIT_CONFIRM_RSP) { l2cap_move_channel_confirm_rsp()
5175 chan->local_amp_id = chan->move_id; l2cap_move_channel_confirm_rsp()
5177 if (chan->local_amp_id == AMP_ID_BREDR && chan->hs_hchan) l2cap_move_channel_confirm_rsp()
5178 __release_logical_link(chan); l2cap_move_channel_confirm_rsp()
5180 l2cap_move_done(chan); l2cap_move_channel_confirm_rsp()
5183 l2cap_chan_unlock(chan); l2cap_move_channel_confirm_rsp()
5245 struct l2cap_chan *chan; l2cap_le_connect_rsp() local
5265 chan = __l2cap_get_chan_by_ident(conn, cmd->ident); l2cap_le_connect_rsp()
5266 if (!chan) { l2cap_le_connect_rsp()
5273 l2cap_chan_lock(chan); l2cap_le_connect_rsp()
5277 chan->ident = 0; l2cap_le_connect_rsp()
5278 chan->dcid = dcid; l2cap_le_connect_rsp()
5279 chan->omtu = mtu; l2cap_le_connect_rsp()
5280 chan->remote_mps = mps; l2cap_le_connect_rsp()
5281 chan->tx_credits = credits; l2cap_le_connect_rsp()
5282 l2cap_chan_ready(chan); l2cap_le_connect_rsp()
5291 l2cap_chan_del(chan, ECONNREFUSED); l2cap_le_connect_rsp()
5296 if (chan->sec_level < sec_level) l2cap_le_connect_rsp()
5297 chan->sec_level = sec_level; l2cap_le_connect_rsp()
5300 clear_bit(FLAG_LE_CONN_REQ_SENT, &chan->flags); l2cap_le_connect_rsp()
5302 smp_conn_security(hcon, chan->sec_level); l2cap_le_connect_rsp()
5306 l2cap_chan_del(chan, ECONNREFUSED); l2cap_le_connect_rsp()
5310 l2cap_chan_unlock(chan); l2cap_le_connect_rsp()
5404 struct l2cap_chan *chan, *pchan; l2cap_le_connect_req() local
5430 chan = NULL; l2cap_le_connect_req()
5440 chan = NULL; l2cap_le_connect_req()
5447 chan = NULL; l2cap_le_connect_req()
5451 chan = pchan->ops->new_connection(pchan); l2cap_le_connect_req()
5452 if (!chan) { l2cap_le_connect_req()
5457 l2cap_le_flowctl_init(chan); l2cap_le_connect_req()
5459 bacpy(&chan->src, &conn->hcon->src); l2cap_le_connect_req()
5460 bacpy(&chan->dst, &conn->hcon->dst); l2cap_le_connect_req()
5461 chan->src_type = bdaddr_src_type(conn->hcon); l2cap_le_connect_req()
5462 chan->dst_type = bdaddr_dst_type(conn->hcon); l2cap_le_connect_req()
5463 chan->psm = psm; l2cap_le_connect_req()
5464 chan->dcid = scid; l2cap_le_connect_req()
5465 chan->omtu = mtu; l2cap_le_connect_req()
5466 chan->remote_mps = mps; l2cap_le_connect_req()
5467 chan->tx_credits = __le16_to_cpu(req->credits); l2cap_le_connect_req()
5469 __l2cap_chan_add(conn, chan); l2cap_le_connect_req()
5470 dcid = chan->scid; l2cap_le_connect_req()
5471 credits = chan->rx_credits; l2cap_le_connect_req()
5473 __set_chan_timer(chan, chan->ops->get_sndtimeo(chan)); l2cap_le_connect_req()
5475 chan->ident = cmd->ident; l2cap_le_connect_req()
5477 if (test_bit(FLAG_DEFER_SETUP, &chan->flags)) { l2cap_le_connect_req()
5478 l2cap_state_change(chan, BT_CONNECT2); l2cap_le_connect_req()
5485 chan->ops->defer(chan); l2cap_le_connect_req()
5487 l2cap_chan_ready(chan); l2cap_le_connect_req()
5500 if (chan) { l2cap_le_connect_req()
5501 rsp.mtu = cpu_to_le16(chan->imtu); l2cap_le_connect_req()
5502 rsp.mps = cpu_to_le16(chan->mps); l2cap_le_connect_req()
5522 struct l2cap_chan *chan; l2cap_le_credits() local
5534 chan = l2cap_get_chan_by_dcid(conn, cid); l2cap_le_credits()
5535 if (!chan) l2cap_le_credits()
5538 max_credits = LE_FLOWCTL_MAX_CREDITS - chan->tx_credits; l2cap_le_credits()
5541 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_le_credits()
5542 l2cap_chan_unlock(chan); l2cap_le_credits()
5550 chan->tx_credits += credits; l2cap_le_credits()
5552 while (chan->tx_credits && !skb_queue_empty(&chan->tx_q)) { l2cap_le_credits()
5553 l2cap_do_send(chan, skb_dequeue(&chan->tx_q)); l2cap_le_credits()
5554 chan->tx_credits--; l2cap_le_credits()
5557 if (chan->tx_credits) l2cap_le_credits()
5558 chan->ops->resume(chan); l2cap_le_credits()
5560 l2cap_chan_unlock(chan); l2cap_le_credits()
5570 struct l2cap_chan *chan; l2cap_le_command_rej() local
5577 chan = __l2cap_get_chan_by_ident(conn, cmd->ident); l2cap_le_command_rej()
5578 if (!chan) l2cap_le_command_rej()
5581 l2cap_chan_lock(chan); l2cap_le_command_rej()
5582 l2cap_chan_del(chan, ECONNREFUSED); l2cap_le_command_rej()
5583 l2cap_chan_unlock(chan); l2cap_le_command_rej()
5727 static int l2cap_check_fcs(struct l2cap_chan *chan, struct sk_buff *skb) l2cap_check_fcs() argument
5732 if (test_bit(FLAG_EXT_CTRL, &chan->flags)) l2cap_check_fcs()
5737 if (chan->fcs == L2CAP_FCS_CRC16) { l2cap_check_fcs()
5748 static void l2cap_send_i_or_rr_or_rnr(struct l2cap_chan *chan) l2cap_send_i_or_rr_or_rnr() argument
5752 BT_DBG("chan %p", chan); l2cap_send_i_or_rr_or_rnr()
5757 control.reqseq = chan->buffer_seq; l2cap_send_i_or_rr_or_rnr()
5758 set_bit(CONN_SEND_FBIT, &chan->conn_state); l2cap_send_i_or_rr_or_rnr()
5760 if (test_bit(CONN_LOCAL_BUSY, &chan->conn_state)) { l2cap_send_i_or_rr_or_rnr()
5762 l2cap_send_sframe(chan, &control); l2cap_send_i_or_rr_or_rnr()
5765 if (test_and_clear_bit(CONN_REMOTE_BUSY, &chan->conn_state) && l2cap_send_i_or_rr_or_rnr()
5766 chan->unacked_frames > 0) l2cap_send_i_or_rr_or_rnr()
5767 __set_retrans_timer(chan); l2cap_send_i_or_rr_or_rnr()
5770 l2cap_ertm_send(chan); l2cap_send_i_or_rr_or_rnr()
5772 if (!test_bit(CONN_LOCAL_BUSY, &chan->conn_state) && l2cap_send_i_or_rr_or_rnr()
5773 test_bit(CONN_SEND_FBIT, &chan->conn_state)) { l2cap_send_i_or_rr_or_rnr()
5778 l2cap_send_sframe(chan, &control); l2cap_send_i_or_rr_or_rnr()
5801 static int l2cap_reassemble_sdu(struct l2cap_chan *chan, struct sk_buff *skb, l2cap_reassemble_sdu() argument
5808 if (chan->sdu) l2cap_reassemble_sdu()
5811 err = chan->ops->recv(chan, skb); l2cap_reassemble_sdu()
5815 if (chan->sdu) l2cap_reassemble_sdu()
5818 chan->sdu_len = get_unaligned_le16(skb->data); l2cap_reassemble_sdu()
5821 if (chan->sdu_len > chan->imtu) { l2cap_reassemble_sdu()
5826 if (skb->len >= chan->sdu_len) l2cap_reassemble_sdu()
5829 chan->sdu = skb; l2cap_reassemble_sdu()
5830 chan->sdu_last_frag = skb; l2cap_reassemble_sdu()
5837 if (!chan->sdu) l2cap_reassemble_sdu()
5840 append_skb_frag(chan->sdu, skb, l2cap_reassemble_sdu()
5841 &chan->sdu_last_frag); l2cap_reassemble_sdu()
5844 if (chan->sdu->len >= chan->sdu_len) l2cap_reassemble_sdu()
5851 if (!chan->sdu) l2cap_reassemble_sdu()
5854 append_skb_frag(chan->sdu, skb, l2cap_reassemble_sdu()
5855 &chan->sdu_last_frag); l2cap_reassemble_sdu()
5858 if (chan->sdu->len != chan->sdu_len) l2cap_reassemble_sdu()
5861 err = chan->ops->recv(chan, chan->sdu); l2cap_reassemble_sdu()
5865 chan->sdu = NULL; l2cap_reassemble_sdu()
5866 chan->sdu_last_frag = NULL; l2cap_reassemble_sdu()
5867 chan->sdu_len = 0; l2cap_reassemble_sdu()
5874 kfree_skb(chan->sdu); l2cap_reassemble_sdu()
5875 chan->sdu = NULL; l2cap_reassemble_sdu()
5876 chan->sdu_last_frag = NULL; l2cap_reassemble_sdu()
5877 chan->sdu_len = 0; l2cap_reassemble_sdu()
5883 static int l2cap_resegment(struct l2cap_chan *chan) l2cap_resegment() argument
5889 void l2cap_chan_busy(struct l2cap_chan *chan, int busy) l2cap_chan_busy() argument
5893 if (chan->mode != L2CAP_MODE_ERTM) l2cap_chan_busy()
5897 l2cap_tx(chan, NULL, NULL, event); l2cap_chan_busy()
5900 static int l2cap_rx_queued_iframes(struct l2cap_chan *chan) l2cap_rx_queued_iframes() argument
5907 BT_DBG("chan %p", chan); l2cap_rx_queued_iframes()
5909 while (!test_bit(CONN_LOCAL_BUSY, &chan->conn_state)) { l2cap_rx_queued_iframes()
5912 chan->buffer_seq, skb_queue_len(&chan->srej_q)); l2cap_rx_queued_iframes()
5914 skb = l2cap_ertm_seq_in_queue(&chan->srej_q, chan->buffer_seq); l2cap_rx_queued_iframes()
5919 skb_unlink(skb, &chan->srej_q); l2cap_rx_queued_iframes()
5920 chan->buffer_seq = __next_seq(chan, chan->buffer_seq); l2cap_rx_queued_iframes()
5921 err = l2cap_reassemble_sdu(chan, skb, &bt_cb(skb)->l2cap); l2cap_rx_queued_iframes()
5926 if (skb_queue_empty(&chan->srej_q)) { l2cap_rx_queued_iframes()
5927 chan->rx_state = L2CAP_RX_STATE_RECV; l2cap_rx_queued_iframes()
5928 l2cap_send_ack(chan); l2cap_rx_queued_iframes()
5934 static void l2cap_handle_srej(struct l2cap_chan *chan, l2cap_handle_srej() argument
5939 BT_DBG("chan %p, control %p", chan, control); l2cap_handle_srej()
5941 if (control->reqseq == chan->next_tx_seq) { l2cap_handle_srej()
5943 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_handle_srej()
5947 skb = l2cap_ertm_seq_in_queue(&chan->tx_q, control->reqseq); l2cap_handle_srej()
5955 if (chan->max_tx != 0 && bt_cb(skb)->l2cap.retries >= chan->max_tx) { l2cap_handle_srej()
5956 BT_DBG("Retry limit exceeded (%d)", chan->max_tx); l2cap_handle_srej()
5957 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_handle_srej()
5961 clear_bit(CONN_REMOTE_BUSY, &chan->conn_state); l2cap_handle_srej()
5964 l2cap_pass_to_tx(chan, control); l2cap_handle_srej()
5966 set_bit(CONN_SEND_FBIT, &chan->conn_state); l2cap_handle_srej()
5967 l2cap_retransmit(chan, control); l2cap_handle_srej()
5968 l2cap_ertm_send(chan); l2cap_handle_srej()
5970 if (chan->tx_state == L2CAP_TX_STATE_WAIT_F) { l2cap_handle_srej()
5971 set_bit(CONN_SREJ_ACT, &chan->conn_state); l2cap_handle_srej()
5972 chan->srej_save_reqseq = control->reqseq; l2cap_handle_srej()
5975 l2cap_pass_to_tx_fbit(chan, control); l2cap_handle_srej()
5978 if (chan->srej_save_reqseq != control->reqseq || l2cap_handle_srej()
5980 &chan->conn_state)) l2cap_handle_srej()
5981 l2cap_retransmit(chan, control); l2cap_handle_srej()
5983 l2cap_retransmit(chan, control); l2cap_handle_srej()
5984 if (chan->tx_state == L2CAP_TX_STATE_WAIT_F) { l2cap_handle_srej()
5985 set_bit(CONN_SREJ_ACT, &chan->conn_state); l2cap_handle_srej()
5986 chan->srej_save_reqseq = control->reqseq; l2cap_handle_srej()
5992 static void l2cap_handle_rej(struct l2cap_chan *chan, l2cap_handle_rej() argument
5997 BT_DBG("chan %p, control %p", chan, control); l2cap_handle_rej()
5999 if (control->reqseq == chan->next_tx_seq) { l2cap_handle_rej()
6001 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_handle_rej()
6005 skb = l2cap_ertm_seq_in_queue(&chan->tx_q, control->reqseq); l2cap_handle_rej()
6007 if (chan->max_tx && skb && l2cap_handle_rej()
6008 bt_cb(skb)->l2cap.retries >= chan->max_tx) { l2cap_handle_rej()
6009 BT_DBG("Retry limit exceeded (%d)", chan->max_tx); l2cap_handle_rej()
6010 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_handle_rej()
6014 clear_bit(CONN_REMOTE_BUSY, &chan->conn_state); l2cap_handle_rej()
6016 l2cap_pass_to_tx(chan, control); l2cap_handle_rej()
6019 if (!test_and_clear_bit(CONN_REJ_ACT, &chan->conn_state)) l2cap_handle_rej()
6020 l2cap_retransmit_all(chan, control); l2cap_handle_rej()
6022 l2cap_retransmit_all(chan, control); l2cap_handle_rej()
6023 l2cap_ertm_send(chan); l2cap_handle_rej()
6024 if (chan->tx_state == L2CAP_TX_STATE_WAIT_F) l2cap_handle_rej()
6025 set_bit(CONN_REJ_ACT, &chan->conn_state); l2cap_handle_rej()
6029 static u8 l2cap_classify_txseq(struct l2cap_chan *chan, u16 txseq) l2cap_classify_txseq() argument
6031 BT_DBG("chan %p, txseq %d", chan, txseq); l2cap_classify_txseq()
6033 BT_DBG("last_acked_seq %d, expected_tx_seq %d", chan->last_acked_seq, l2cap_classify_txseq()
6034 chan->expected_tx_seq); l2cap_classify_txseq()
6036 if (chan->rx_state == L2CAP_RX_STATE_SREJ_SENT) { l2cap_classify_txseq()
6037 if (__seq_offset(chan, txseq, chan->last_acked_seq) >= l2cap_classify_txseq()
6038 chan->tx_win) { l2cap_classify_txseq()
6042 if (chan->tx_win <= ((chan->tx_win_max + 1) >> 1)) { l2cap_classify_txseq()
6051 if (chan->srej_list.head == txseq) { l2cap_classify_txseq()
6056 if (l2cap_ertm_seq_in_queue(&chan->srej_q, txseq)) { l2cap_classify_txseq()
6061 if (l2cap_seq_list_contains(&chan->srej_list, txseq)) { l2cap_classify_txseq()
6067 if (chan->expected_tx_seq == txseq) { l2cap_classify_txseq()
6068 if (__seq_offset(chan, txseq, chan->last_acked_seq) >= l2cap_classify_txseq()
6069 chan->tx_win) { l2cap_classify_txseq()
6078 if (__seq_offset(chan, txseq, chan->last_acked_seq) < l2cap_classify_txseq()
6079 __seq_offset(chan, chan->expected_tx_seq, chan->last_acked_seq)) { l2cap_classify_txseq()
6084 if (__seq_offset(chan, txseq, chan->last_acked_seq) >= chan->tx_win) { l2cap_classify_txseq()
6102 if (chan->tx_win <= ((chan->tx_win_max + 1) >> 1)) { l2cap_classify_txseq()
6115 static int l2cap_rx_state_recv(struct l2cap_chan *chan, l2cap_rx_state_recv() argument
6122 BT_DBG("chan %p, control %p, skb %p, event %d", chan, control, skb, l2cap_rx_state_recv()
6127 switch (l2cap_classify_txseq(chan, control->txseq)) { l2cap_rx_state_recv()
6129 l2cap_pass_to_tx(chan, control); l2cap_rx_state_recv()
6131 if (test_bit(CONN_LOCAL_BUSY, &chan->conn_state)) { l2cap_rx_state_recv()
6137 chan->expected_tx_seq = __next_seq(chan, l2cap_rx_state_recv()
6140 chan->buffer_seq = chan->expected_tx_seq; l2cap_rx_state_recv()
6143 err = l2cap_reassemble_sdu(chan, skb, control); l2cap_rx_state_recv()
6149 &chan->conn_state)) { l2cap_rx_state_recv()
6151 l2cap_retransmit_all(chan, control); l2cap_rx_state_recv()
6152 l2cap_ertm_send(chan); l2cap_rx_state_recv()
6156 if (!test_bit(CONN_LOCAL_BUSY, &chan->conn_state)) l2cap_rx_state_recv()
6157 l2cap_send_ack(chan); l2cap_rx_state_recv()
6160 l2cap_pass_to_tx(chan, control); l2cap_rx_state_recv()
6166 if (test_bit(CONN_LOCAL_BUSY, &chan->conn_state)) { l2cap_rx_state_recv()
6176 skb_queue_tail(&chan->srej_q, skb); l2cap_rx_state_recv()
6179 skb_queue_len(&chan->srej_q)); l2cap_rx_state_recv()
6181 clear_bit(CONN_SREJ_ACT, &chan->conn_state); l2cap_rx_state_recv()
6182 l2cap_seq_list_clear(&chan->srej_list); l2cap_rx_state_recv()
6183 l2cap_send_srej(chan, control->txseq); l2cap_rx_state_recv()
6185 chan->rx_state = L2CAP_RX_STATE_SREJ_SENT; l2cap_rx_state_recv()
6188 l2cap_pass_to_tx(chan, control); l2cap_rx_state_recv()
6194 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_rx_state_recv()
6199 l2cap_pass_to_tx(chan, control); l2cap_rx_state_recv()
6201 clear_bit(CONN_REMOTE_BUSY, &chan->conn_state); l2cap_rx_state_recv()
6203 if (!test_and_clear_bit(CONN_REJ_ACT, &chan->conn_state) && l2cap_rx_state_recv()
6204 !__chan_is_moving(chan)) { l2cap_rx_state_recv()
6206 l2cap_retransmit_all(chan, control); l2cap_rx_state_recv()
6209 l2cap_ertm_send(chan); l2cap_rx_state_recv()
6211 l2cap_send_i_or_rr_or_rnr(chan); l2cap_rx_state_recv()
6214 &chan->conn_state) && l2cap_rx_state_recv()
6215 chan->unacked_frames) l2cap_rx_state_recv()
6216 __set_retrans_timer(chan); l2cap_rx_state_recv()
6218 l2cap_ertm_send(chan); l2cap_rx_state_recv()
6222 set_bit(CONN_REMOTE_BUSY, &chan->conn_state); l2cap_rx_state_recv()
6223 l2cap_pass_to_tx(chan, control); l2cap_rx_state_recv()
6225 set_bit(CONN_SEND_FBIT, &chan->conn_state); l2cap_rx_state_recv()
6226 l2cap_send_rr_or_rnr(chan, 0); l2cap_rx_state_recv()
6228 __clear_retrans_timer(chan); l2cap_rx_state_recv()
6229 l2cap_seq_list_clear(&chan->retrans_list); l2cap_rx_state_recv()
6232 l2cap_handle_rej(chan, control); l2cap_rx_state_recv()
6235 l2cap_handle_srej(chan, control); l2cap_rx_state_recv()
6249 static int l2cap_rx_state_srej_sent(struct l2cap_chan *chan, l2cap_rx_state_srej_sent() argument
6257 BT_DBG("chan %p, control %p, skb %p, event %d", chan, control, skb, l2cap_rx_state_srej_sent()
6262 switch (l2cap_classify_txseq(chan, txseq)) { l2cap_rx_state_srej_sent()
6265 l2cap_pass_to_tx(chan, control); l2cap_rx_state_srej_sent()
6266 skb_queue_tail(&chan->srej_q, skb); l2cap_rx_state_srej_sent()
6269 skb_queue_len(&chan->srej_q)); l2cap_rx_state_srej_sent()
6271 chan->expected_tx_seq = __next_seq(chan, txseq); l2cap_rx_state_srej_sent()
6274 l2cap_seq_list_pop(&chan->srej_list); l2cap_rx_state_srej_sent()
6276 l2cap_pass_to_tx(chan, control); l2cap_rx_state_srej_sent()
6277 skb_queue_tail(&chan->srej_q, skb); l2cap_rx_state_srej_sent()
6280 skb_queue_len(&chan->srej_q)); l2cap_rx_state_srej_sent()
6282 err = l2cap_rx_queued_iframes(chan); l2cap_rx_state_srej_sent()
6292 skb_queue_tail(&chan->srej_q, skb); l2cap_rx_state_srej_sent()
6295 skb_queue_len(&chan->srej_q)); l2cap_rx_state_srej_sent()
6297 l2cap_pass_to_tx(chan, control); l2cap_rx_state_srej_sent()
6298 l2cap_send_srej(chan, control->txseq); l2cap_rx_state_srej_sent()
6306 skb_queue_tail(&chan->srej_q, skb); l2cap_rx_state_srej_sent()
6309 skb_queue_len(&chan->srej_q)); l2cap_rx_state_srej_sent()
6311 l2cap_pass_to_tx(chan, control); l2cap_rx_state_srej_sent()
6312 l2cap_send_srej_list(chan, control->txseq); l2cap_rx_state_srej_sent()
6316 l2cap_pass_to_tx(chan, control); l2cap_rx_state_srej_sent()
6327 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_rx_state_srej_sent()
6332 l2cap_pass_to_tx(chan, control); l2cap_rx_state_srej_sent()
6334 clear_bit(CONN_REMOTE_BUSY, &chan->conn_state); l2cap_rx_state_srej_sent()
6337 &chan->conn_state)) { l2cap_rx_state_srej_sent()
6339 l2cap_retransmit_all(chan, control); l2cap_rx_state_srej_sent()
6342 l2cap_ertm_send(chan); l2cap_rx_state_srej_sent()
6345 &chan->conn_state) && l2cap_rx_state_srej_sent()
6346 chan->unacked_frames) { l2cap_rx_state_srej_sent()
6347 __set_retrans_timer(chan); l2cap_rx_state_srej_sent()
6350 set_bit(CONN_SEND_FBIT, &chan->conn_state); l2cap_rx_state_srej_sent()
6351 l2cap_send_srej_tail(chan); l2cap_rx_state_srej_sent()
6354 &chan->conn_state) && l2cap_rx_state_srej_sent()
6355 chan->unacked_frames) l2cap_rx_state_srej_sent()
6356 __set_retrans_timer(chan); l2cap_rx_state_srej_sent()
6358 l2cap_send_ack(chan); l2cap_rx_state_srej_sent()
6362 set_bit(CONN_REMOTE_BUSY, &chan->conn_state); l2cap_rx_state_srej_sent()
6363 l2cap_pass_to_tx(chan, control); l2cap_rx_state_srej_sent()
6365 l2cap_send_srej_tail(chan); l2cap_rx_state_srej_sent()
6371 rr_control.reqseq = chan->buffer_seq; l2cap_rx_state_srej_sent()
6372 l2cap_send_sframe(chan, &rr_control); l2cap_rx_state_srej_sent()
6377 l2cap_handle_rej(chan, control); l2cap_rx_state_srej_sent()
6380 l2cap_handle_srej(chan, control); l2cap_rx_state_srej_sent()
6392 static int l2cap_finish_move(struct l2cap_chan *chan) l2cap_finish_move() argument
6394 BT_DBG("chan %p", chan); l2cap_finish_move()
6396 chan->rx_state = L2CAP_RX_STATE_RECV; l2cap_finish_move()
6398 if (chan->hs_hcon) l2cap_finish_move()
6399 chan->conn->mtu = chan->hs_hcon->hdev->block_mtu; l2cap_finish_move()
6401 chan->conn->mtu = chan->conn->hcon->hdev->acl_mtu; l2cap_finish_move()
6403 return l2cap_resegment(chan); l2cap_finish_move()
6406 static int l2cap_rx_state_wait_p(struct l2cap_chan *chan, l2cap_rx_state_wait_p() argument
6412 BT_DBG("chan %p, control %p, skb %p, event %d", chan, control, skb, l2cap_rx_state_wait_p()
6418 l2cap_process_reqseq(chan, control->reqseq); l2cap_rx_state_wait_p()
6420 if (!skb_queue_empty(&chan->tx_q)) l2cap_rx_state_wait_p()
6421 chan->tx_send_head = skb_peek(&chan->tx_q); l2cap_rx_state_wait_p()
6423 chan->tx_send_head = NULL; l2cap_rx_state_wait_p()
6428 chan->next_tx_seq = control->reqseq; l2cap_rx_state_wait_p()
6429 chan->unacked_frames = 0; l2cap_rx_state_wait_p()
6431 err = l2cap_finish_move(chan); l2cap_rx_state_wait_p()
6435 set_bit(CONN_SEND_FBIT, &chan->conn_state); l2cap_rx_state_wait_p()
6436 l2cap_send_i_or_rr_or_rnr(chan); l2cap_rx_state_wait_p()
6441 return l2cap_rx_state_recv(chan, control, NULL, event); l2cap_rx_state_wait_p()
6444 static int l2cap_rx_state_wait_f(struct l2cap_chan *chan, l2cap_rx_state_wait_f() argument
6453 clear_bit(CONN_REMOTE_BUSY, &chan->conn_state); l2cap_rx_state_wait_f()
6455 chan->rx_state = L2CAP_RX_STATE_RECV; l2cap_rx_state_wait_f()
6456 l2cap_process_reqseq(chan, control->reqseq); l2cap_rx_state_wait_f()
6458 if (!skb_queue_empty(&chan->tx_q)) l2cap_rx_state_wait_f()
6459 chan->tx_send_head = skb_peek(&chan->tx_q); l2cap_rx_state_wait_f()
6461 chan->tx_send_head = NULL; l2cap_rx_state_wait_f()
6466 chan->next_tx_seq = control->reqseq; l2cap_rx_state_wait_f()
6467 chan->unacked_frames = 0; l2cap_rx_state_wait_f()
6469 if (chan->hs_hcon) l2cap_rx_state_wait_f()
6470 chan->conn->mtu = chan->hs_hcon->hdev->block_mtu; l2cap_rx_state_wait_f()
6472 chan->conn->mtu = chan->conn->hcon->hdev->acl_mtu; l2cap_rx_state_wait_f()
6474 err = l2cap_resegment(chan); l2cap_rx_state_wait_f()
6477 err = l2cap_rx_state_recv(chan, control, skb, event); l2cap_rx_state_wait_f()
6482 static bool __valid_reqseq(struct l2cap_chan *chan, u16 reqseq) __valid_reqseq() argument
6487 unacked = __seq_offset(chan, chan->next_tx_seq, chan->expected_ack_seq); __valid_reqseq()
6488 return __seq_offset(chan, chan->next_tx_seq, reqseq) <= unacked; __valid_reqseq()
6491 static int l2cap_rx(struct l2cap_chan *chan, struct l2cap_ctrl *control, l2cap_rx() argument
6496 BT_DBG("chan %p, control %p, skb %p, event %d, state %d", chan, l2cap_rx()
6497 control, skb, event, chan->rx_state); l2cap_rx()
6499 if (__valid_reqseq(chan, control->reqseq)) { l2cap_rx()
6500 switch (chan->rx_state) { l2cap_rx()
6502 err = l2cap_rx_state_recv(chan, control, skb, event); l2cap_rx()
6505 err = l2cap_rx_state_srej_sent(chan, control, skb, l2cap_rx()
6509 err = l2cap_rx_state_wait_p(chan, control, skb, event); l2cap_rx()
6512 err = l2cap_rx_state_wait_f(chan, control, skb, event); l2cap_rx()
6520 control->reqseq, chan->next_tx_seq, l2cap_rx()
6521 chan->expected_ack_seq); l2cap_rx()
6522 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_rx()
6528 static int l2cap_stream_rx(struct l2cap_chan *chan, struct l2cap_ctrl *control, l2cap_stream_rx() argument
6533 BT_DBG("chan %p, control %p, skb %p, state %d", chan, control, skb, l2cap_stream_rx()
6534 chan->rx_state); l2cap_stream_rx()
6536 if (l2cap_classify_txseq(chan, control->txseq) == l2cap_stream_rx()
6538 l2cap_pass_to_tx(chan, control); l2cap_stream_rx()
6540 BT_DBG("buffer_seq %d->%d", chan->buffer_seq, l2cap_stream_rx()
6541 __next_seq(chan, chan->buffer_seq)); l2cap_stream_rx()
6543 chan->buffer_seq = __next_seq(chan, chan->buffer_seq); l2cap_stream_rx()
6545 l2cap_reassemble_sdu(chan, skb, control); l2cap_stream_rx()
6547 if (chan->sdu) { l2cap_stream_rx()
6548 kfree_skb(chan->sdu); l2cap_stream_rx()
6549 chan->sdu = NULL; l2cap_stream_rx()
6551 chan->sdu_last_frag = NULL; l2cap_stream_rx()
6552 chan->sdu_len = 0; l2cap_stream_rx()
6560 chan->last_acked_seq = control->txseq; l2cap_stream_rx()
6561 chan->expected_tx_seq = __next_seq(chan, control->txseq); l2cap_stream_rx()
6566 static int l2cap_data_rcv(struct l2cap_chan *chan, struct sk_buff *skb) l2cap_data_rcv() argument
6572 __unpack_control(chan, skb); l2cap_data_rcv()
6581 if (l2cap_check_fcs(chan, skb)) l2cap_data_rcv()
6587 if (chan->fcs == L2CAP_FCS_CRC16) l2cap_data_rcv()
6590 if (len > chan->mps) { l2cap_data_rcv()
6591 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_data_rcv()
6605 if (control->final && chan->tx_state != L2CAP_TX_STATE_WAIT_F) l2cap_data_rcv()
6608 if (chan->mode != L2CAP_MODE_STREAMING) { l2cap_data_rcv()
6610 err = l2cap_rx(chan, control, skb, event); l2cap_data_rcv()
6612 err = l2cap_stream_rx(chan, control, skb); l2cap_data_rcv()
6616 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_data_rcv()
6624 if (chan->mode == L2CAP_MODE_STREAMING) l2cap_data_rcv()
6633 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_data_rcv()
6639 chan->tx_state != L2CAP_TX_STATE_WAIT_F)) l2cap_data_rcv()
6643 if (l2cap_rx(chan, control, skb, event)) l2cap_data_rcv()
6644 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_data_rcv()
6654 static void l2cap_chan_le_send_credits(struct l2cap_chan *chan) l2cap_chan_le_send_credits() argument
6656 struct l2cap_conn *conn = chan->conn; l2cap_chan_le_send_credits()
6663 if (chan->rx_credits >= (le_max_credits + 1) / 2) l2cap_chan_le_send_credits()
6666 return_credits = le_max_credits - chan->rx_credits; l2cap_chan_le_send_credits()
6668 BT_DBG("chan %p returning %u credits to sender", chan, return_credits); l2cap_chan_le_send_credits()
6670 chan->rx_credits += return_credits; l2cap_chan_le_send_credits()
6672 pkt.cid = cpu_to_le16(chan->scid); l2cap_chan_le_send_credits()
6675 chan->ident = l2cap_get_ident(conn); l2cap_chan_le_send_credits()
6677 l2cap_send_cmd(conn, chan->ident, L2CAP_LE_CREDITS, sizeof(pkt), &pkt); l2cap_chan_le_send_credits()
6680 static int l2cap_le_data_rcv(struct l2cap_chan *chan, struct sk_buff *skb) l2cap_le_data_rcv() argument
6684 if (!chan->rx_credits) { l2cap_le_data_rcv()
6686 l2cap_send_disconn_req(chan, ECONNRESET); l2cap_le_data_rcv()
6690 if (chan->imtu < skb->len) { l2cap_le_data_rcv()
6695 chan->rx_credits--; l2cap_le_data_rcv()
6696 BT_DBG("rx_credits %u -> %u", chan->rx_credits + 1, chan->rx_credits); l2cap_le_data_rcv()
6698 l2cap_chan_le_send_credits(chan); l2cap_le_data_rcv()
6702 if (!chan->sdu) { l2cap_le_data_rcv()
6709 sdu_len, skb->len, chan->imtu); l2cap_le_data_rcv()
6711 if (sdu_len > chan->imtu) { l2cap_le_data_rcv()
6724 return chan->ops->recv(chan, skb); l2cap_le_data_rcv()
6726 chan->sdu = skb; l2cap_le_data_rcv()
6727 chan->sdu_len = sdu_len; l2cap_le_data_rcv()
6728 chan->sdu_last_frag = skb; l2cap_le_data_rcv()
6733 BT_DBG("SDU fragment. chan->sdu->len %u skb->len %u chan->sdu_len %u", l2cap_le_data_rcv()
6734 chan->sdu->len, skb->len, chan->sdu_len); l2cap_le_data_rcv()
6736 if (chan->sdu->len + skb->len > chan->sdu_len) { l2cap_le_data_rcv()
6742 append_skb_frag(chan->sdu, skb, &chan->sdu_last_frag); l2cap_le_data_rcv()
6745 if (chan->sdu->len == chan->sdu_len) { l2cap_le_data_rcv()
6746 err = chan->ops->recv(chan, chan->sdu); l2cap_le_data_rcv()
6748 chan->sdu = NULL; l2cap_le_data_rcv()
6749 chan->sdu_last_frag = NULL; l2cap_le_data_rcv()
6750 chan->sdu_len = 0; l2cap_le_data_rcv()
6757 kfree_skb(chan->sdu); l2cap_le_data_rcv()
6758 chan->sdu = NULL; l2cap_le_data_rcv()
6759 chan->sdu_last_frag = NULL; l2cap_le_data_rcv()
6760 chan->sdu_len = 0; l2cap_le_data_rcv()
6773 struct l2cap_chan *chan; l2cap_data_channel() local
6775 chan = l2cap_get_chan_by_scid(conn, cid); l2cap_data_channel()
6776 if (!chan) { l2cap_data_channel()
6778 chan = a2mp_channel_create(conn, skb); l2cap_data_channel()
6779 if (!chan) { l2cap_data_channel()
6784 l2cap_chan_lock(chan); l2cap_data_channel()
6793 BT_DBG("chan %p, len %d", chan, skb->len); l2cap_data_channel()
6799 if (chan->chan_type == L2CAP_CHAN_FIXED) l2cap_data_channel()
6800 l2cap_chan_ready(chan); l2cap_data_channel()
6802 if (chan->state != BT_CONNECTED) l2cap_data_channel()
6805 switch (chan->mode) { l2cap_data_channel()
6807 if (l2cap_le_data_rcv(chan, skb) < 0) l2cap_data_channel()
6818 if (chan->imtu < skb->len) { l2cap_data_channel()
6823 if (!chan->ops->recv(chan, skb)) l2cap_data_channel()
6829 l2cap_data_rcv(chan, skb); l2cap_data_channel()
6833 BT_DBG("chan %p: bad mode 0x%2.2x", chan, chan->mode); l2cap_data_channel()
6841 l2cap_chan_unlock(chan); l2cap_data_channel()
6848 struct l2cap_chan *chan; l2cap_conless_channel() local
6853 chan = l2cap_global_chan_by_psm(0, psm, &hcon->src, &hcon->dst, l2cap_conless_channel()
6855 if (!chan) l2cap_conless_channel()
6858 BT_DBG("chan %p, len %d", chan, skb->len); l2cap_conless_channel()
6860 if (chan->state != BT_BOUND && chan->state != BT_CONNECTED) l2cap_conless_channel()
6863 if (chan->imtu < skb->len) l2cap_conless_channel()
6870 if (!chan->ops->recv(chan, skb)) { l2cap_conless_channel()
6871 l2cap_chan_put(chan); l2cap_conless_channel()
6876 l2cap_chan_put(chan); l2cap_conless_channel()
7026 int l2cap_chan_connect(struct l2cap_chan *chan, __le16 psm, u16 cid, l2cap_chan_connect() argument
7034 BT_DBG("%pMR -> %pMR (type %u) psm 0x%2.2x", &chan->src, dst, l2cap_chan_connect()
7037 hdev = hci_get_route(dst, &chan->src); l2cap_chan_connect()
7044 chan->chan_type != L2CAP_CHAN_RAW) { l2cap_chan_connect()
7049 if (chan->chan_type == L2CAP_CHAN_CONN_ORIENTED && !psm) { l2cap_chan_connect()
7054 if (chan->chan_type == L2CAP_CHAN_FIXED && !cid) { l2cap_chan_connect()
7059 switch (chan->mode) { l2cap_chan_connect()
7063 l2cap_le_flowctl_init(chan); l2cap_chan_connect()
7075 switch (chan->state) { l2cap_chan_connect()
7099 bacpy(&chan->dst, dst); l2cap_chan_connect()
7100 chan->dst_type = dst_type; l2cap_chan_connect()
7102 chan->psm = psm; l2cap_chan_connect()
7103 chan->dcid = cid; l2cap_chan_connect()
7120 hcon = hci_connect_le(hdev, dst, dst_type, chan->sec_level, l2cap_chan_connect()
7123 u8 auth_type = l2cap_get_auth_type(chan); l2cap_chan_connect()
7124 hcon = hci_connect_acl(hdev, dst, chan->sec_level, auth_type); l2cap_chan_connect()
7140 l2cap_chan_lock(chan); l2cap_chan_connect()
7149 bacpy(&chan->src, &hcon->src); l2cap_chan_connect()
7150 chan->src_type = bdaddr_src_type(hcon); l2cap_chan_connect()
7152 __l2cap_chan_add(conn, chan); l2cap_chan_connect()
7157 l2cap_state_change(chan, BT_CONNECT); l2cap_chan_connect()
7158 __set_chan_timer(chan, chan->ops->get_sndtimeo(chan)); l2cap_chan_connect()
7160 /* Release chan->sport so that it can be reused by other l2cap_chan_connect()
7164 chan->sport = 0; l2cap_chan_connect()
7168 if (chan->chan_type != L2CAP_CHAN_CONN_ORIENTED) { l2cap_chan_connect()
7169 __clear_chan_timer(chan); l2cap_chan_connect()
7170 if (l2cap_chan_check_security(chan, true)) l2cap_chan_connect()
7171 l2cap_state_change(chan, BT_CONNECTED); l2cap_chan_connect()
7173 l2cap_do_start(chan); l2cap_chan_connect()
7179 l2cap_chan_unlock(chan); l2cap_chan_connect()
7289 struct l2cap_chan *chan, *next; l2cap_connect_cfm() local
7296 chan = pchan->ops->new_connection(pchan); l2cap_connect_cfm()
7297 if (chan) { l2cap_connect_cfm()
7298 bacpy(&chan->src, &hcon->src); l2cap_connect_cfm()
7299 bacpy(&chan->dst, &hcon->dst); l2cap_connect_cfm()
7300 chan->src_type = bdaddr_src_type(hcon); l2cap_connect_cfm()
7301 chan->dst_type = dst_type; l2cap_connect_cfm()
7303 __l2cap_chan_add(conn, chan); l2cap_connect_cfm()
7337 static inline void l2cap_check_encryption(struct l2cap_chan *chan, u8 encrypt) l2cap_check_encryption() argument
7339 if (chan->chan_type != L2CAP_CHAN_CONN_ORIENTED) l2cap_check_encryption()
7343 if (chan->sec_level == BT_SECURITY_MEDIUM) { l2cap_check_encryption()
7344 __set_chan_timer(chan, L2CAP_ENC_TIMEOUT); l2cap_check_encryption()
7345 } else if (chan->sec_level == BT_SECURITY_HIGH || l2cap_check_encryption()
7346 chan->sec_level == BT_SECURITY_FIPS) l2cap_check_encryption()
7347 l2cap_chan_close(chan, ECONNREFUSED); l2cap_check_encryption()
7349 if (chan->sec_level == BT_SECURITY_MEDIUM) l2cap_check_encryption()
7350 __clear_chan_timer(chan); l2cap_check_encryption()
7357 struct l2cap_chan *chan; l2cap_security_cfm() local
7366 list_for_each_entry(chan, &conn->chan_l, list) { l2cap_security_cfm()
7367 l2cap_chan_lock(chan); l2cap_security_cfm()
7369 BT_DBG("chan %p scid 0x%4.4x state %s", chan, chan->scid, l2cap_security_cfm()
7370 state_to_string(chan->state)); l2cap_security_cfm()
7372 if (chan->scid == L2CAP_CID_A2MP) { l2cap_security_cfm()
7373 l2cap_chan_unlock(chan); l2cap_security_cfm()
7378 chan->sec_level = hcon->sec_level; l2cap_security_cfm()
7380 if (!__l2cap_no_conn_pending(chan)) { l2cap_security_cfm()
7381 l2cap_chan_unlock(chan); l2cap_security_cfm()
7385 if (!status && (chan->state == BT_CONNECTED || l2cap_security_cfm()
7386 chan->state == BT_CONFIG)) { l2cap_security_cfm()
7387 chan->ops->resume(chan); l2cap_security_cfm()
7388 l2cap_check_encryption(chan, encrypt); l2cap_security_cfm()
7389 l2cap_chan_unlock(chan); l2cap_security_cfm()
7393 if (chan->state == BT_CONNECT) { l2cap_security_cfm()
7395 l2cap_start_connection(chan); l2cap_security_cfm()
7397 __set_chan_timer(chan, L2CAP_DISC_TIMEOUT); l2cap_security_cfm()
7398 } else if (chan->state == BT_CONNECT2 && l2cap_security_cfm()
7399 chan->mode != L2CAP_MODE_LE_FLOWCTL) { l2cap_security_cfm()
7404 if (test_bit(FLAG_DEFER_SETUP, &chan->flags)) { l2cap_security_cfm()
7407 chan->ops->defer(chan); l2cap_security_cfm()
7409 l2cap_state_change(chan, BT_CONFIG); l2cap_security_cfm()
7414 l2cap_state_change(chan, BT_DISCONN); l2cap_security_cfm()
7415 __set_chan_timer(chan, L2CAP_DISC_TIMEOUT); l2cap_security_cfm()
7420 rsp.scid = cpu_to_le16(chan->dcid); l2cap_security_cfm()
7421 rsp.dcid = cpu_to_le16(chan->scid); l2cap_security_cfm()
7424 l2cap_send_cmd(conn, chan->ident, L2CAP_CONN_RSP, l2cap_security_cfm()
7427 if (!test_bit(CONF_REQ_SENT, &chan->conf_state) && l2cap_security_cfm()
7430 set_bit(CONF_REQ_SENT, &chan->conf_state); l2cap_security_cfm()
7433 l2cap_build_conf_req(chan, buf), l2cap_security_cfm()
7435 chan->num_conf_req++; l2cap_security_cfm()
7439 l2cap_chan_unlock(chan); l2cap_security_cfm()
H A Dl2cap_sock.c83 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_bind() local
129 err = l2cap_add_scid(chan, __le16_to_cpu(la.l2_cid)); l2cap_sock_bind()
131 err = l2cap_add_psm(chan, &la.l2_bdaddr, la.l2_psm); l2cap_sock_bind()
136 switch (chan->chan_type) { l2cap_sock_bind()
139 chan->sec_level = BT_SECURITY_SDP; l2cap_sock_bind()
144 chan->sec_level = BT_SECURITY_SDP; l2cap_sock_bind()
147 chan->sec_level = BT_SECURITY_SDP; l2cap_sock_bind()
155 set_bit(FLAG_HOLD_HCI_CONN, &chan->flags); l2cap_sock_bind()
159 bacpy(&chan->src, &la.l2_bdaddr); l2cap_sock_bind()
160 chan->src_type = la.l2_bdaddr_type; l2cap_sock_bind()
162 if (chan->psm && bdaddr_type_is_le(chan->src_type)) l2cap_sock_bind()
163 chan->mode = L2CAP_MODE_LE_FLOWCTL; l2cap_sock_bind()
165 chan->state = BT_BOUND; l2cap_sock_bind()
177 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_connect() local
198 * conflicts with the address given to connect(). If chan->src l2cap_sock_connect()
200 * chan->src_type and la.l2_bdaddr_type do not need to match. l2cap_sock_connect()
202 if (chan->src_type == BDADDR_BREDR && bacmp(&chan->src, BDADDR_ANY) && l2cap_sock_connect()
210 if (chan->scid != L2CAP_CID_ATT || l2cap_sock_connect()
220 chan->src_type = BDADDR_LE_PUBLIC; l2cap_sock_connect()
223 if (chan->src_type != BDADDR_BREDR && la.l2_bdaddr_type == BDADDR_BREDR) l2cap_sock_connect()
233 if (chan->psm && bdaddr_type_is_le(chan->src_type)) l2cap_sock_connect()
234 chan->mode = L2CAP_MODE_LE_FLOWCTL; l2cap_sock_connect()
236 err = l2cap_chan_connect(chan, la.l2_psm, __le16_to_cpu(la.l2_cid), l2cap_sock_connect()
254 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_listen() local
271 switch (chan->mode) { l2cap_sock_listen()
292 atomic_set(&chan->nesting, L2CAP_NESTING_PARENT); l2cap_sock_listen()
294 chan->state = BT_LISTEN; l2cap_sock_listen()
363 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_getname() local
376 la->l2_psm = chan->psm; l2cap_sock_getname()
379 bacpy(&la->l2_bdaddr, &chan->dst); l2cap_sock_getname()
380 la->l2_cid = cpu_to_le16(chan->dcid); l2cap_sock_getname()
381 la->l2_bdaddr_type = chan->dst_type; l2cap_sock_getname()
383 bacpy(&la->l2_bdaddr, &chan->src); l2cap_sock_getname()
384 la->l2_cid = cpu_to_le16(chan->scid); l2cap_sock_getname()
385 la->l2_bdaddr_type = chan->src_type; l2cap_sock_getname()
395 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_getsockopt_old() local
414 if (bdaddr_type_is_le(chan->src_type) && l2cap_sock_getsockopt_old()
415 chan->scid != L2CAP_CID_ATT) { l2cap_sock_getsockopt_old()
421 opts.imtu = chan->imtu; l2cap_sock_getsockopt_old()
422 opts.omtu = chan->omtu; l2cap_sock_getsockopt_old()
423 opts.flush_to = chan->flush_to; l2cap_sock_getsockopt_old()
424 opts.mode = chan->mode; l2cap_sock_getsockopt_old()
425 opts.fcs = chan->fcs; l2cap_sock_getsockopt_old()
426 opts.max_tx = chan->max_tx; l2cap_sock_getsockopt_old()
427 opts.txwin_size = chan->tx_win; l2cap_sock_getsockopt_old()
436 switch (chan->sec_level) { l2cap_sock_getsockopt_old()
456 if (test_bit(FLAG_ROLE_SWITCH, &chan->flags)) l2cap_sock_getsockopt_old()
459 if (test_bit(FLAG_FORCE_RELIABLE, &chan->flags)) l2cap_sock_getsockopt_old()
476 cinfo.hci_handle = chan->conn->hcon->handle; l2cap_sock_getsockopt_old()
477 memcpy(cinfo.dev_class, chan->conn->hcon->dev_class, 3); l2cap_sock_getsockopt_old()
498 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_getsockopt() local
518 if (chan->chan_type != L2CAP_CHAN_CONN_ORIENTED && l2cap_sock_getsockopt()
519 chan->chan_type != L2CAP_CHAN_FIXED && l2cap_sock_getsockopt()
520 chan->chan_type != L2CAP_CHAN_RAW) { l2cap_sock_getsockopt()
526 if (chan->conn) { l2cap_sock_getsockopt()
527 sec.level = chan->conn->hcon->sec_level; l2cap_sock_getsockopt()
530 sec.key_size = chan->conn->hcon->enc_key_size; l2cap_sock_getsockopt()
532 sec.level = chan->sec_level; l2cap_sock_getsockopt()
554 if (put_user(test_bit(FLAG_FLUSHABLE, &chan->flags), l2cap_sock_getsockopt()
567 pwr.force_active = test_bit(FLAG_FORCE_ACTIVE, &chan->flags); l2cap_sock_getsockopt()
576 if (put_user(chan->chan_policy, (u32 __user *) optval)) l2cap_sock_getsockopt()
581 if (!bdaddr_type_is_le(chan->src_type)) { l2cap_sock_getsockopt()
591 if (put_user(chan->omtu, (u16 __user *) optval)) l2cap_sock_getsockopt()
596 if (!bdaddr_type_is_le(chan->src_type)) { l2cap_sock_getsockopt()
601 if (put_user(chan->imtu, (u16 __user *) optval)) l2cap_sock_getsockopt()
614 static bool l2cap_valid_mtu(struct l2cap_chan *chan, u16 mtu) l2cap_valid_mtu() argument
616 switch (chan->scid) { l2cap_valid_mtu()
634 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_setsockopt_old() local
645 if (bdaddr_type_is_le(chan->src_type)) { l2cap_sock_setsockopt_old()
655 opts.imtu = chan->imtu; l2cap_sock_setsockopt_old()
656 opts.omtu = chan->omtu; l2cap_sock_setsockopt_old()
657 opts.flush_to = chan->flush_to; l2cap_sock_setsockopt_old()
658 opts.mode = chan->mode; l2cap_sock_setsockopt_old()
659 opts.fcs = chan->fcs; l2cap_sock_setsockopt_old()
660 opts.max_tx = chan->max_tx; l2cap_sock_setsockopt_old()
661 opts.txwin_size = chan->tx_win; l2cap_sock_setsockopt_old()
674 if (!l2cap_valid_mtu(chan, opts.imtu)) { l2cap_sock_setsockopt_old()
679 chan->mode = opts.mode; l2cap_sock_setsockopt_old()
680 switch (chan->mode) { l2cap_sock_setsockopt_old()
684 clear_bit(CONF_STATE2_DEVICE, &chan->conf_state); l2cap_sock_setsockopt_old()
696 chan->imtu = opts.imtu; l2cap_sock_setsockopt_old()
697 chan->omtu = opts.omtu; l2cap_sock_setsockopt_old()
698 chan->fcs = opts.fcs; l2cap_sock_setsockopt_old()
699 chan->max_tx = opts.max_tx; l2cap_sock_setsockopt_old()
700 chan->tx_win = opts.txwin_size; l2cap_sock_setsockopt_old()
701 chan->flush_to = opts.flush_to; l2cap_sock_setsockopt_old()
716 chan->sec_level = BT_SECURITY_LOW; l2cap_sock_setsockopt_old()
718 chan->sec_level = BT_SECURITY_MEDIUM; l2cap_sock_setsockopt_old()
720 chan->sec_level = BT_SECURITY_HIGH; l2cap_sock_setsockopt_old()
723 set_bit(FLAG_ROLE_SWITCH, &chan->flags); l2cap_sock_setsockopt_old()
725 clear_bit(FLAG_ROLE_SWITCH, &chan->flags); l2cap_sock_setsockopt_old()
728 set_bit(FLAG_FORCE_RELIABLE, &chan->flags); l2cap_sock_setsockopt_old()
730 clear_bit(FLAG_FORCE_RELIABLE, &chan->flags); l2cap_sock_setsockopt_old()
746 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_setsockopt() local
765 if (chan->chan_type != L2CAP_CHAN_CONN_ORIENTED && l2cap_sock_setsockopt()
766 chan->chan_type != L2CAP_CHAN_FIXED && l2cap_sock_setsockopt()
767 chan->chan_type != L2CAP_CHAN_RAW) { l2cap_sock_setsockopt()
786 chan->sec_level = sec.level; l2cap_sock_setsockopt()
788 if (!chan->conn) l2cap_sock_setsockopt()
791 conn = chan->conn; l2cap_sock_setsockopt()
794 if (chan->scid == L2CAP_CID_ATT) { l2cap_sock_setsockopt()
797 set_bit(FLAG_PENDING_SECURITY, &chan->flags); l2cap_sock_setsockopt()
799 chan->state = BT_CONFIG; l2cap_sock_setsockopt()
805 if (!l2cap_chan_check_security(chan, true)) l2cap_sock_setsockopt()
827 set_bit(FLAG_DEFER_SETUP, &chan->flags); l2cap_sock_setsockopt()
830 clear_bit(FLAG_DEFER_SETUP, &chan->flags); l2cap_sock_setsockopt()
846 conn = chan->conn; l2cap_sock_setsockopt()
856 set_bit(FLAG_FLUSHABLE, &chan->flags); l2cap_sock_setsockopt()
858 clear_bit(FLAG_FLUSHABLE, &chan->flags); l2cap_sock_setsockopt()
862 if (chan->chan_type != L2CAP_CHAN_CONN_ORIENTED && l2cap_sock_setsockopt()
863 chan->chan_type != L2CAP_CHAN_RAW) { l2cap_sock_setsockopt()
877 set_bit(FLAG_FORCE_ACTIVE, &chan->flags); l2cap_sock_setsockopt()
879 clear_bit(FLAG_FORCE_ACTIVE, &chan->flags); l2cap_sock_setsockopt()
893 if (chan->mode != L2CAP_MODE_ERTM && l2cap_sock_setsockopt()
894 chan->mode != L2CAP_MODE_STREAMING) { l2cap_sock_setsockopt()
899 chan->chan_policy = (u8) opt; l2cap_sock_setsockopt()
902 chan->move_role == L2CAP_MOVE_ROLE_NONE) l2cap_sock_setsockopt()
903 l2cap_move_start(chan); l2cap_sock_setsockopt()
908 if (!bdaddr_type_is_le(chan->src_type)) { l2cap_sock_setsockopt()
920 if (!bdaddr_type_is_le(chan->src_type)) { l2cap_sock_setsockopt()
935 chan->imtu = opt; l2cap_sock_setsockopt()
951 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_sendmsg() local
972 l2cap_chan_lock(chan); l2cap_sock_sendmsg()
973 err = l2cap_chan_send(chan, msg, len); l2cap_sock_sendmsg()
974 l2cap_chan_unlock(chan); l2cap_sock_sendmsg()
990 if (bdaddr_type_is_le(pi->chan->src_type)) { l2cap_sock_recvmsg()
992 pi->chan->state = BT_CONNECTED; l2cap_sock_recvmsg()
993 __l2cap_le_connect_rsp_defer(pi->chan); l2cap_sock_recvmsg()
996 pi->chan->state = BT_CONFIG; l2cap_sock_recvmsg()
997 __l2cap_connect_rsp_defer(pi->chan); l2cap_sock_recvmsg()
1011 if (pi->chan->mode != L2CAP_MODE_ERTM) l2cap_sock_recvmsg()
1018 if (!test_bit(CONN_LOCAL_BUSY, &pi->chan->conn_state)) l2cap_sock_recvmsg()
1033 l2cap_chan_busy(pi->chan, 0); l2cap_sock_recvmsg()
1052 l2cap_chan_put(l2cap_pi(sk)->chan); l2cap_sock_kill()
1059 struct l2cap_chan *chan = l2cap_pi(sk)->chan; __l2cap_wait_ack() local
1066 while (chan->unacked_frames > 0 && chan->conn) { __l2cap_wait_ack()
1092 struct l2cap_chan *chan; l2cap_sock_shutdown() local
1101 chan = l2cap_pi(sk)->chan; l2cap_sock_shutdown()
1102 conn = chan->conn; l2cap_sock_shutdown()
1104 BT_DBG("chan %p state %s", chan, state_to_string(chan->state)); l2cap_sock_shutdown()
1109 l2cap_chan_lock(chan); l2cap_sock_shutdown()
1113 if (chan->mode == L2CAP_MODE_ERTM) l2cap_sock_shutdown()
1119 l2cap_chan_close(chan, 0); l2cap_sock_shutdown()
1132 l2cap_chan_unlock(chan); l2cap_sock_shutdown()
1168 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_cleanup_listen() local
1170 BT_DBG("child chan %p state %s", chan, l2cap_sock_cleanup_listen()
1171 state_to_string(chan->state)); l2cap_sock_cleanup_listen()
1173 l2cap_chan_lock(chan); l2cap_sock_cleanup_listen()
1174 __clear_chan_timer(chan); l2cap_sock_cleanup_listen()
1175 l2cap_chan_close(chan, ECONNRESET); l2cap_sock_cleanup_listen()
1176 l2cap_chan_unlock(chan); l2cap_sock_cleanup_listen()
1182 static struct l2cap_chan *l2cap_sock_new_connection_cb(struct l2cap_chan *chan) l2cap_sock_new_connection_cb() argument
1184 struct sock *sk, *parent = chan->data; l2cap_sock_new_connection_cb()
1210 return l2cap_pi(sk)->chan; l2cap_sock_new_connection_cb()
1213 static int l2cap_sock_recv_cb(struct l2cap_chan *chan, struct sk_buff *skb) l2cap_sock_recv_cb() argument
1215 struct sock *sk = chan->data; l2cap_sock_recv_cb()
1236 if (err < 0 && chan->mode == L2CAP_MODE_ERTM) { l2cap_sock_recv_cb()
1238 l2cap_chan_busy(chan, 1); l2cap_sock_recv_cb()
1248 static void l2cap_sock_close_cb(struct l2cap_chan *chan) l2cap_sock_close_cb() argument
1250 struct sock *sk = chan->data; l2cap_sock_close_cb()
1255 static void l2cap_sock_teardown_cb(struct l2cap_chan *chan, int err) l2cap_sock_teardown_cb() argument
1257 struct sock *sk = chan->data; l2cap_sock_teardown_cb()
1260 BT_DBG("chan %p state %s", chan, state_to_string(chan->state)); l2cap_sock_teardown_cb()
1269 lock_sock_nested(sk, atomic_read(&chan->nesting)); l2cap_sock_teardown_cb()
1275 switch (chan->state) { l2cap_sock_teardown_cb()
1283 chan->state = BT_CLOSED; l2cap_sock_teardown_cb()
1288 chan->state = BT_CLOSED; l2cap_sock_teardown_cb()
1305 static void l2cap_sock_state_change_cb(struct l2cap_chan *chan, int state, l2cap_sock_state_change_cb() argument
1308 struct sock *sk = chan->data; l2cap_sock_state_change_cb()
1316 static struct sk_buff *l2cap_sock_alloc_skb_cb(struct l2cap_chan *chan, l2cap_sock_alloc_skb_cb() argument
1320 struct sock *sk = chan->data; l2cap_sock_alloc_skb_cb()
1324 l2cap_chan_unlock(chan); l2cap_sock_alloc_skb_cb()
1326 l2cap_chan_lock(chan); l2cap_sock_alloc_skb_cb()
1333 bt_cb(skb)->l2cap.chan = chan; l2cap_sock_alloc_skb_cb()
1338 static void l2cap_sock_ready_cb(struct l2cap_chan *chan) l2cap_sock_ready_cb() argument
1340 struct sock *sk = chan->data; l2cap_sock_ready_cb()
1358 static void l2cap_sock_defer_cb(struct l2cap_chan *chan) l2cap_sock_defer_cb() argument
1360 struct sock *parent, *sk = chan->data; l2cap_sock_defer_cb()
1371 static void l2cap_sock_resume_cb(struct l2cap_chan *chan) l2cap_sock_resume_cb() argument
1373 struct sock *sk = chan->data; l2cap_sock_resume_cb()
1375 if (test_and_clear_bit(FLAG_PENDING_SECURITY, &chan->flags)) { l2cap_sock_resume_cb()
1377 chan->state = BT_CONNECTED; l2cap_sock_resume_cb()
1384 static void l2cap_sock_set_shutdown_cb(struct l2cap_chan *chan) l2cap_sock_set_shutdown_cb() argument
1386 struct sock *sk = chan->data; l2cap_sock_set_shutdown_cb()
1393 static long l2cap_sock_get_sndtimeo_cb(struct l2cap_chan *chan) l2cap_sock_get_sndtimeo_cb() argument
1395 struct sock *sk = chan->data; l2cap_sock_get_sndtimeo_cb()
1400 static void l2cap_sock_suspend_cb(struct l2cap_chan *chan) l2cap_sock_suspend_cb() argument
1402 struct sock *sk = chan->data; l2cap_sock_suspend_cb()
1428 if (l2cap_pi(sk)->chan) l2cap_sock_destruct()
1429 l2cap_chan_put(l2cap_pi(sk)->chan); l2cap_sock_destruct()
1455 struct l2cap_chan *chan = l2cap_pi(sk)->chan; l2cap_sock_init() local
1460 struct l2cap_chan *pchan = l2cap_pi(parent)->chan; l2cap_sock_init()
1465 chan->chan_type = pchan->chan_type; l2cap_sock_init()
1466 chan->imtu = pchan->imtu; l2cap_sock_init()
1467 chan->omtu = pchan->omtu; l2cap_sock_init()
1468 chan->conf_state = pchan->conf_state; l2cap_sock_init()
1469 chan->mode = pchan->mode; l2cap_sock_init()
1470 chan->fcs = pchan->fcs; l2cap_sock_init()
1471 chan->max_tx = pchan->max_tx; l2cap_sock_init()
1472 chan->tx_win = pchan->tx_win; l2cap_sock_init()
1473 chan->tx_win_max = pchan->tx_win_max; l2cap_sock_init()
1474 chan->sec_level = pchan->sec_level; l2cap_sock_init()
1475 chan->flags = pchan->flags; l2cap_sock_init()
1476 chan->tx_credits = pchan->tx_credits; l2cap_sock_init()
1477 chan->rx_credits = pchan->rx_credits; l2cap_sock_init()
1479 if (chan->chan_type == L2CAP_CHAN_FIXED) { l2cap_sock_init()
1480 chan->scid = pchan->scid; l2cap_sock_init()
1481 chan->dcid = pchan->scid; l2cap_sock_init()
1488 chan->chan_type = L2CAP_CHAN_RAW; l2cap_sock_init()
1491 chan->chan_type = L2CAP_CHAN_CONN_LESS; l2cap_sock_init()
1496 chan->chan_type = L2CAP_CHAN_CONN_ORIENTED; l2cap_sock_init()
1500 chan->imtu = L2CAP_DEFAULT_MTU; l2cap_sock_init()
1501 chan->omtu = 0; l2cap_sock_init()
1503 chan->mode = L2CAP_MODE_ERTM; l2cap_sock_init()
1504 set_bit(CONF_STATE2_DEVICE, &chan->conf_state); l2cap_sock_init()
1506 chan->mode = L2CAP_MODE_BASIC; l2cap_sock_init()
1509 l2cap_chan_set_defaults(chan); l2cap_sock_init()
1513 chan->flush_to = L2CAP_DEFAULT_FLUSH_TO; l2cap_sock_init()
1515 chan->data = sk; l2cap_sock_init()
1516 chan->ops = &l2cap_chan_ops; l2cap_sock_init()
1529 struct l2cap_chan *chan; l2cap_sock_alloc() local
1546 chan = l2cap_chan_create(); l2cap_sock_alloc()
1547 if (!chan) { l2cap_sock_alloc()
1552 l2cap_chan_hold(chan); l2cap_sock_alloc()
1554 l2cap_pi(sk)->chan = chan; l2cap_sock_alloc()
H A D6lowpan.c43 struct l2cap_chan *chan; member in struct:skb_cb
67 struct l2cap_chan *chan; member in struct:lowpan_peer
124 &peer->chan->dst, peer->chan->dst_type); peer_lookup_ba()
126 if (bacmp(&peer->chan->dst, ba)) peer_lookup_ba()
129 if (type == peer->chan->dst_type) { peer_lookup_ba()
141 struct l2cap_chan *chan) __peer_lookup_chan()
146 if (peer->chan == chan) __peer_lookup_chan()
159 if (peer->chan->conn == conn) __peer_lookup_conn()
210 &peer->chan->dst, peer->chan->dst_type, peer_lookup_dst()
273 struct l2cap_chan *chan) iphc_decompress()
283 peer = __peer_lookup_chan(dev, chan); iphc_decompress()
310 struct l2cap_chan *chan) recv_pkt()
358 ret = iphc_decompress(local_skb, dev, chan); recv_pkt()
393 static int chan_recv_cb(struct l2cap_chan *chan, struct sk_buff *skb) chan_recv_cb() argument
399 peer = lookup_peer(chan->conn); chan_recv_cb()
403 dev = lookup_dev(chan->conn); chan_recv_cb()
407 err = recv_pkt(skb, dev->netdev, chan); chan_recv_cb()
462 lowpan_cb(skb)->chan = NULL; setup_header()
491 lowpan_cb(skb)->chan = peer->chan; setup_header()
523 static int send_pkt(struct l2cap_chan *chan, struct sk_buff *skb, send_pkt() argument
533 chan->data = skb; send_pkt()
541 err = l2cap_chan_send(chan, &msg, skb->len); send_pkt()
583 BT_DBG("xmit %s to %pMR type %d IP %pI6c chan %p", send_mcast_pkt()
585 &pentry->chan->dst, pentry->chan->dst_type, send_mcast_pkt()
586 &pentry->peer_addr, pentry->chan); send_mcast_pkt()
587 ret = send_pkt(pentry->chan, local_skb, netdev); send_mcast_pkt()
625 if (lowpan_cb(skb)->chan) { bt_xmit()
626 BT_DBG("xmit %s to %pMR type %d IP %pI6c chan %p", bt_xmit()
628 &lowpan_cb(skb)->addr, lowpan_cb(skb)->chan); bt_xmit()
629 err = send_pkt(lowpan_cb(skb)->chan, skb, netdev); bt_xmit()
768 struct l2cap_chan *chan; chan_create() local
770 chan = l2cap_chan_create(); chan_create()
771 if (!chan) chan_create()
774 l2cap_chan_set_defaults(chan); chan_create()
776 chan->chan_type = L2CAP_CHAN_CONN_ORIENTED; chan_create()
777 chan->mode = L2CAP_MODE_LE_FLOWCTL; chan_create()
778 chan->omtu = 65535; chan_create()
779 chan->imtu = chan->omtu; chan_create()
781 return chan; chan_create()
786 struct l2cap_chan *chan; chan_open() local
788 chan = chan_create(); chan_open()
789 if (!chan) chan_open()
792 chan->remote_mps = chan->omtu; chan_open()
793 chan->mps = chan->omtu; chan_open()
795 chan->state = BT_CONNECTED; chan_open()
797 return chan; chan_open()
808 static struct l2cap_chan *add_peer_chan(struct l2cap_chan *chan, add_peer_chan() argument
817 peer->chan = chan; add_peer_chan()
823 set_addr((u8 *)&peer->peer_addr.s6_addr + 8, chan->dst.b, add_peer_chan()
824 chan->dst_type); add_peer_chan()
832 set_ip_addr_bits(chan->dst_type, (u8 *)&peer->peer_addr.s6_addr + 8); add_peer_chan()
843 return peer->chan; add_peer_chan()
846 static int setup_netdev(struct l2cap_chan *chan, struct lowpan_dev **dev) setup_netdev() argument
856 set_dev_addr(netdev, &chan->src, chan->src_type); setup_netdev()
859 SET_NETDEV_DEV(netdev, &chan->conn->hcon->dev); setup_netdev()
870 netdev->ifindex, &chan->dst, chan->dst_type, setup_netdev()
871 &chan->src, chan->src_type); setup_netdev()
876 (*dev)->hdev = chan->conn->hcon->hdev; setup_netdev()
890 static inline void chan_ready_cb(struct l2cap_chan *chan) chan_ready_cb() argument
894 dev = lookup_dev(chan->conn); chan_ready_cb()
896 BT_DBG("chan %p conn %p dev %p", chan, chan->conn, dev); chan_ready_cb()
899 if (setup_netdev(chan, &dev) < 0) { chan_ready_cb()
900 l2cap_chan_del(chan, -ENOENT); chan_ready_cb()
908 add_peer_chan(chan, dev); chan_ready_cb()
914 struct l2cap_chan *chan; chan_new_conn_cb() local
916 chan = chan_open(pchan); chan_new_conn_cb()
917 chan->ops = pchan->ops; chan_new_conn_cb()
919 BT_DBG("chan %p pchan %p", chan, pchan); chan_new_conn_cb()
921 return chan; chan_new_conn_cb()
934 static void chan_close_cb(struct l2cap_chan *chan) chan_close_cb() argument
942 BT_DBG("chan %p conn %p", chan, chan->conn); chan_close_cb()
944 if (chan->conn && chan->conn->hcon) { chan_close_cb()
945 if (!is_bt_6lowpan(chan->conn->hcon)) chan_close_cb()
958 peer = __peer_lookup_chan(dev, chan); chan_close_cb()
965 BT_DBG("chan %p orig refcnt %d", chan, chan_close_cb()
966 atomic_read(&chan->kref.refcount)); chan_close_cb()
968 l2cap_chan_put(chan); chan_close_cb()
991 static void chan_state_change_cb(struct l2cap_chan *chan, int state, int err) chan_state_change_cb() argument
993 BT_DBG("chan %p conn %p state %s err %d", chan, chan->conn, chan_state_change_cb()
997 static struct sk_buff *chan_alloc_skb_cb(struct l2cap_chan *chan, chan_alloc_skb_cb() argument
1008 static void chan_suspend_cb(struct l2cap_chan *chan) chan_suspend_cb() argument
1010 struct sk_buff *skb = chan->data; chan_suspend_cb()
1012 BT_DBG("chan %p conn %p skb %p", chan, chan->conn, skb); chan_suspend_cb()
1020 static void chan_resume_cb(struct l2cap_chan *chan) chan_resume_cb() argument
1022 struct sk_buff *skb = chan->data; chan_resume_cb()
1024 BT_DBG("chan %p conn %p skb %p", chan, chan->conn, skb); chan_resume_cb()
1032 static long chan_get_sndtimeo_cb(struct l2cap_chan *chan) chan_get_sndtimeo_cb() argument
1087 BT_DBG("chan %p err %d", pchan, err); bt_6lowpan_connect()
1104 BT_DBG("peer %p chan %p", peer, peer->chan); bt_6lowpan_disconnect()
1106 l2cap_chan_close(peer->chan, ENOENT); bt_6lowpan_disconnect()
1129 BT_DBG("chan %p src type %d", pchan, pchan->src_type); bt_6lowpan_listen()
1196 new_peer->chan = peer->chan; disconnect_all_peers()
1207 l2cap_chan_close(peer->chan, ENOENT); disconnect_all_peers()
1347 &peer->chan->dst, peer->chan->dst_type); lowpan_control_show()
140 __peer_lookup_chan(struct lowpan_dev *dev, struct l2cap_chan *chan) __peer_lookup_chan() argument
272 iphc_decompress(struct sk_buff *skb, struct net_device *netdev, struct l2cap_chan *chan) iphc_decompress() argument
309 recv_pkt(struct sk_buff *skb, struct net_device *dev, struct l2cap_chan *chan) recv_pkt() argument
H A Da2mp.c50 struct l2cap_chan *chan = mgr->a2mp_chan; a2mp_send() local
67 l2cap_chan_send(chan, &msg, total_len); a2mp_send()
247 struct l2cap_chan *chan; a2mp_discover_rsp() local
251 list_for_each_entry(chan, &conn->chan_l, list) { a2mp_discover_rsp()
253 BT_DBG("chan %p state %s", chan, a2mp_discover_rsp()
254 state_to_string(chan->state)); a2mp_discover_rsp()
256 if (chan->scid == L2CAP_CID_A2MP) a2mp_discover_rsp()
259 l2cap_chan_lock(chan); a2mp_discover_rsp()
261 if (chan->state == BT_CONNECT) a2mp_discover_rsp()
262 l2cap_send_conn_req(chan); a2mp_discover_rsp()
264 l2cap_chan_unlock(chan); a2mp_discover_rsp()
588 static int a2mp_chan_recv_cb(struct l2cap_chan *chan, struct sk_buff *skb) a2mp_chan_recv_cb() argument
591 struct amp_mgr *mgr = chan->data; a2mp_chan_recv_cb()
688 static void a2mp_chan_close_cb(struct l2cap_chan *chan) a2mp_chan_close_cb() argument
690 l2cap_chan_put(chan); a2mp_chan_close_cb()
693 static void a2mp_chan_state_change_cb(struct l2cap_chan *chan, int state, a2mp_chan_state_change_cb() argument
696 struct amp_mgr *mgr = chan->data; a2mp_chan_state_change_cb()
701 BT_DBG("chan %p state %s", chan, state_to_string(state)); a2mp_chan_state_change_cb()
703 chan->state = state; a2mp_chan_state_change_cb()
713 static struct sk_buff *a2mp_chan_alloc_skb_cb(struct l2cap_chan *chan, a2mp_chan_alloc_skb_cb() argument
745 struct l2cap_chan *chan; a2mp_chan_open() local
748 chan = l2cap_chan_create(); a2mp_chan_open()
749 if (!chan) a2mp_chan_open()
752 BT_DBG("chan %p", chan); a2mp_chan_open()
754 chan->chan_type = L2CAP_CHAN_FIXED; a2mp_chan_open()
755 chan->scid = L2CAP_CID_A2MP; a2mp_chan_open()
756 chan->dcid = L2CAP_CID_A2MP; a2mp_chan_open()
757 chan->omtu = L2CAP_A2MP_DEFAULT_MTU; a2mp_chan_open()
758 chan->imtu = L2CAP_A2MP_DEFAULT_MTU; a2mp_chan_open()
759 chan->flush_to = L2CAP_DEFAULT_FLUSH_TO; a2mp_chan_open()
761 chan->ops = &a2mp_chan_ops; a2mp_chan_open()
763 l2cap_chan_set_defaults(chan); a2mp_chan_open()
764 chan->remote_max_tx = chan->max_tx; a2mp_chan_open()
765 chan->remote_tx_win = chan->tx_win; a2mp_chan_open()
767 chan->retrans_timeout = L2CAP_DEFAULT_RETRANS_TO; a2mp_chan_open()
768 chan->monitor_timeout = L2CAP_DEFAULT_MONITOR_TO; a2mp_chan_open()
770 skb_queue_head_init(&chan->tx_q); a2mp_chan_open()
772 chan->mode = L2CAP_MODE_ERTM; a2mp_chan_open()
774 err = l2cap_ertm_init(chan); a2mp_chan_open()
776 l2cap_chan_del(chan, 0); a2mp_chan_open()
780 chan->conf_state = 0; a2mp_chan_open()
783 __l2cap_chan_add(conn, chan); a2mp_chan_open()
785 l2cap_chan_add(conn, chan); a2mp_chan_open()
787 chan->remote_mps = chan->omtu; a2mp_chan_open()
788 chan->mps = chan->omtu; a2mp_chan_open()
790 chan->state = BT_CONNECTED; a2mp_chan_open()
792 return chan; a2mp_chan_open()
829 struct l2cap_chan *chan; amp_mgr_create() local
839 chan = a2mp_chan_open(conn, locked); amp_mgr_create()
840 if (!chan) { amp_mgr_create()
845 mgr->a2mp_chan = chan; amp_mgr_create()
846 chan->data = mgr; amp_mgr_create()
877 BT_DBG("mgr: %p chan %p", mgr, mgr->a2mp_chan); a2mp_channel_create()
1006 void a2mp_discover_amp(struct l2cap_chan *chan) a2mp_discover_amp() argument
1008 struct l2cap_conn *conn = chan->conn; a2mp_discover_amp()
1012 BT_DBG("chan %p conn %p mgr %p", chan, conn, mgr); a2mp_discover_amp()
1020 mgr->bredr_chan = chan; a2mp_discover_amp()
H A Dsmp.c493 struct l2cap_chan *chan = hdev->smp_data; smp_irk_matches() local
498 if (!chan || !chan->data) smp_irk_matches()
501 smp = chan->data; smp_irk_matches()
514 struct l2cap_chan *chan = hdev->smp_data; smp_generate_rpa() local
518 if (!chan || !chan->data) smp_generate_rpa()
521 smp = chan->data; smp_generate_rpa()
539 struct l2cap_chan *chan = hdev->smp_data; smp_generate_oob() local
543 if (!chan || !chan->data) smp_generate_oob()
546 smp = chan->data; smp_generate_oob()
586 struct l2cap_chan *chan = conn->smp; smp_send_cmd() local
591 if (!chan) smp_send_cmd()
606 l2cap_chan_send(chan, &msg, 1 + len); smp_send_cmd()
608 if (!chan->data) smp_send_cmd()
611 smp = chan->data; smp_send_cmd()
646 struct l2cap_chan *chan = conn->smp; build_pairing_cmd() local
647 struct smp_chan *smp = chan->data; build_pairing_cmd()
720 struct l2cap_chan *chan = conn->smp; check_enc_key_size() local
721 struct smp_chan *smp = chan->data; check_enc_key_size()
734 struct l2cap_chan *chan = conn->smp; smp_chan_destroy() local
735 struct smp_chan *smp = chan->data; smp_chan_destroy()
781 chan->data = NULL; smp_chan_destroy()
789 struct l2cap_chan *chan = conn->smp; smp_failure() local
798 if (chan->data) smp_failure()
845 struct l2cap_chan *chan = conn->smp; tk_request() local
846 struct smp_chan *smp = chan->data; tk_request()
1028 struct l2cap_chan *chan = conn->smp; smp_notify_keys() local
1029 struct smp_chan *smp = chan->data; smp_notify_keys()
1361 struct l2cap_chan *chan = conn->smp; smp_chan_create() local
1384 chan->data = smp; smp_chan_create()
1605 struct l2cap_chan *chan; smp_user_confirm_reply() local
1615 chan = conn->smp; smp_user_confirm_reply()
1616 if (!chan) smp_user_confirm_reply()
1619 l2cap_chan_lock(chan); smp_user_confirm_reply()
1620 if (!chan->data) { smp_user_confirm_reply()
1625 smp = chan->data; smp_user_confirm_reply()
1663 l2cap_chan_unlock(chan); smp_user_confirm_reply()
1710 struct l2cap_chan *chan = conn->smp; smp_cmd_pairing_req() local
1724 if (!chan->data) smp_cmd_pairing_req()
1727 smp = chan->data; smp_cmd_pairing_req()
1845 struct l2cap_chan *chan = hdev->smp_data; sc_send_public_key() local
1848 if (!chan || !chan->data) sc_send_public_key()
1851 smp_dev = chan->data; sc_send_public_key()
1895 struct l2cap_chan *chan = conn->smp; smp_cmd_pairing_rsp() local
1896 struct smp_chan *smp = chan->data; smp_cmd_pairing_rsp()
2050 struct l2cap_chan *chan = conn->smp; smp_cmd_pairing_confirm() local
2051 struct smp_chan *smp = chan->data; smp_cmd_pairing_confirm()
2092 struct l2cap_chan *chan = conn->smp; smp_cmd_pairing_random() local
2093 struct smp_chan *smp = chan->data; smp_cmd_pairing_random()
2286 struct l2cap_chan *chan; smp_conn_security() local
2310 chan = conn->smp; smp_conn_security()
2311 if (!chan) { smp_conn_security()
2316 l2cap_chan_lock(chan); smp_conn_security()
2319 if (chan->data) { smp_conn_security()
2362 l2cap_chan_unlock(chan); smp_conn_security()
2369 struct l2cap_chan *chan = conn->smp; smp_cmd_encrypt_info() local
2370 struct smp_chan *smp = chan->data; smp_cmd_encrypt_info()
2389 struct l2cap_chan *chan = conn->smp; smp_cmd_master_ident() local
2390 struct smp_chan *smp = chan->data; smp_cmd_master_ident()
2425 struct l2cap_chan *chan = conn->smp; smp_cmd_ident_info() local
2426 struct smp_chan *smp = chan->data; smp_cmd_ident_info()
2446 struct l2cap_chan *chan = conn->smp; smp_cmd_ident_addr_info() local
2447 struct smp_chan *smp = chan->data; smp_cmd_ident_addr_info()
2501 struct l2cap_chan *chan = conn->smp; smp_cmd_sign_info() local
2502 struct smp_chan *smp = chan->data; smp_cmd_sign_info()
2578 struct l2cap_chan *chan = conn->smp; smp_cmd_public_key() local
2579 struct smp_chan *smp = chan->data; smp_cmd_public_key()
2690 struct l2cap_chan *chan = conn->smp; smp_cmd_dhkey_check() local
2692 struct smp_chan *smp = chan->data; smp_cmd_dhkey_check()
2762 static int smp_sig_channel(struct l2cap_chan *chan, struct sk_buff *skb) smp_sig_channel() argument
2764 struct l2cap_conn *conn = chan->conn; smp_sig_channel()
2781 smp = chan->data; smp_sig_channel()
2875 static void smp_teardown_cb(struct l2cap_chan *chan, int err) smp_teardown_cb() argument
2877 struct l2cap_conn *conn = chan->conn; smp_teardown_cb()
2879 BT_DBG("chan %p", chan); smp_teardown_cb()
2881 if (chan->data) smp_teardown_cb()
2885 l2cap_chan_put(chan); smp_teardown_cb()
2888 static void bredr_pairing(struct l2cap_chan *chan) bredr_pairing() argument
2890 struct l2cap_conn *conn = chan->conn; bredr_pairing()
2896 BT_DBG("chan %p", chan); bredr_pairing()
2927 /* Remote must support SMP fixed chan for BR/EDR */ bredr_pairing()
2932 if (chan->data) bredr_pairing()
2956 static void smp_resume_cb(struct l2cap_chan *chan) smp_resume_cb() argument
2958 struct smp_chan *smp = chan->data; smp_resume_cb()
2959 struct l2cap_conn *conn = chan->conn; smp_resume_cb()
2962 BT_DBG("chan %p", chan); smp_resume_cb()
2965 bredr_pairing(chan); smp_resume_cb()
2980 static void smp_ready_cb(struct l2cap_chan *chan) smp_ready_cb() argument
2982 struct l2cap_conn *conn = chan->conn; smp_ready_cb()
2985 BT_DBG("chan %p", chan); smp_ready_cb()
2987 conn->smp = chan; smp_ready_cb()
2988 l2cap_chan_hold(chan); smp_ready_cb()
2991 bredr_pairing(chan); smp_ready_cb()
2994 static int smp_recv_cb(struct l2cap_chan *chan, struct sk_buff *skb) smp_recv_cb() argument
2998 BT_DBG("chan %p", chan); smp_recv_cb()
3000 err = smp_sig_channel(chan, skb); smp_recv_cb()
3002 struct smp_chan *smp = chan->data; smp_recv_cb()
3007 hci_disconnect(chan->conn->hcon, HCI_ERROR_AUTH_FAILURE); smp_recv_cb()
3013 static struct sk_buff *smp_alloc_skb_cb(struct l2cap_chan *chan, smp_alloc_skb_cb() argument
3024 bt_cb(skb)->l2cap.chan = chan; smp_alloc_skb_cb()
3048 struct l2cap_chan *chan; smp_new_conn_cb() local
3052 chan = l2cap_chan_create(); smp_new_conn_cb()
3053 if (!chan) smp_new_conn_cb()
3056 chan->chan_type = pchan->chan_type; smp_new_conn_cb()
3057 chan->ops = &smp_chan_ops; smp_new_conn_cb()
3058 chan->scid = pchan->scid; smp_new_conn_cb()
3059 chan->dcid = chan->scid; smp_new_conn_cb()
3060 chan->imtu = pchan->imtu; smp_new_conn_cb()
3061 chan->omtu = pchan->omtu; smp_new_conn_cb()
3062 chan->mode = pchan->mode; smp_new_conn_cb()
3069 atomic_set(&chan->nesting, L2CAP_NESTING_SMP); smp_new_conn_cb()
3071 BT_DBG("created chan %p", chan); smp_new_conn_cb()
3073 return chan; smp_new_conn_cb()
3096 struct l2cap_chan *chan; smp_add_cid() local
3129 chan = l2cap_chan_create(); smp_add_cid()
3130 if (!chan) { smp_add_cid()
3139 chan->data = smp; smp_add_cid()
3141 l2cap_add_scid(chan, cid); smp_add_cid()
3143 l2cap_chan_set_defaults(chan); smp_add_cid()
3148 hci_copy_identity_address(hdev, &chan->src, &bdaddr_type); smp_add_cid()
3151 chan->src_type = BDADDR_LE_PUBLIC; smp_add_cid()
3153 chan->src_type = BDADDR_LE_RANDOM; smp_add_cid()
3155 bacpy(&chan->src, &hdev->bdaddr); smp_add_cid()
3156 chan->src_type = BDADDR_BREDR; smp_add_cid()
3159 chan->state = BT_LISTEN; smp_add_cid()
3160 chan->mode = L2CAP_MODE_BASIC; smp_add_cid()
3161 chan->imtu = L2CAP_DEFAULT_MTU; smp_add_cid()
3162 chan->ops = &smp_root_chan_ops; smp_add_cid()
3165 atomic_set(&chan->nesting, L2CAP_NESTING_PARENT); smp_add_cid()
3167 return chan; smp_add_cid()
3170 static void smp_del_chan(struct l2cap_chan *chan) smp_del_chan() argument
3174 BT_DBG("chan %p", chan); smp_del_chan()
3176 smp = chan->data; smp_del_chan()
3178 chan->data = NULL; smp_del_chan()
3186 l2cap_chan_put(chan); smp_del_chan()
3222 struct l2cap_chan *chan; force_bredr_smp_write() local
3224 chan = smp_add_cid(hdev, L2CAP_CID_SMP_BREDR); force_bredr_smp_write()
3225 if (IS_ERR(chan)) force_bredr_smp_write()
3226 return PTR_ERR(chan); force_bredr_smp_write()
3228 hdev->smp_bredr_data = chan; force_bredr_smp_write()
3230 struct l2cap_chan *chan; force_bredr_smp_write() local
3232 chan = hdev->smp_bredr_data; force_bredr_smp_write()
3234 smp_del_chan(chan); force_bredr_smp_write()
3251 struct l2cap_chan *chan; smp_register() local
3262 chan = hdev->smp_data; smp_register()
3264 smp_del_chan(chan); smp_register()
3267 chan = smp_add_cid(hdev, L2CAP_CID_SMP); smp_register()
3268 if (IS_ERR(chan)) smp_register()
3269 return PTR_ERR(chan); smp_register()
3271 hdev->smp_data = chan; smp_register()
3287 chan = hdev->smp_bredr_data; smp_register()
3289 smp_del_chan(chan); smp_register()
3292 chan = smp_add_cid(hdev, L2CAP_CID_SMP_BREDR); smp_register()
3293 if (IS_ERR(chan)) { smp_register()
3294 int err = PTR_ERR(chan); smp_register()
3295 chan = hdev->smp_data; smp_register()
3297 smp_del_chan(chan); smp_register()
3301 hdev->smp_bredr_data = chan; smp_register()
3308 struct l2cap_chan *chan; smp_unregister() local
3311 chan = hdev->smp_bredr_data; smp_unregister()
3313 smp_del_chan(chan); smp_unregister()
3317 chan = hdev->smp_data; smp_unregister()
3319 smp_del_chan(chan); smp_unregister()
/linux-4.1.27/drivers/video/fbdev/aty/
H A Dradeon_i2c.c19 struct radeon_i2c_chan *chan = data; radeon_gpio_setscl() local
20 struct radeonfb_info *rinfo = chan->rinfo; radeon_gpio_setscl()
23 val = INREG(chan->ddc_reg) & ~(VGA_DDC_CLK_OUT_EN); radeon_gpio_setscl()
27 OUTREG(chan->ddc_reg, val); radeon_gpio_setscl()
28 (void)INREG(chan->ddc_reg); radeon_gpio_setscl()
33 struct radeon_i2c_chan *chan = data; radeon_gpio_setsda() local
34 struct radeonfb_info *rinfo = chan->rinfo; radeon_gpio_setsda()
37 val = INREG(chan->ddc_reg) & ~(VGA_DDC_DATA_OUT_EN); radeon_gpio_setsda()
41 OUTREG(chan->ddc_reg, val); radeon_gpio_setsda()
42 (void)INREG(chan->ddc_reg); radeon_gpio_setsda()
47 struct radeon_i2c_chan *chan = data; radeon_gpio_getscl() local
48 struct radeonfb_info *rinfo = chan->rinfo; radeon_gpio_getscl()
51 val = INREG(chan->ddc_reg); radeon_gpio_getscl()
58 struct radeon_i2c_chan *chan = data; radeon_gpio_getsda() local
59 struct radeonfb_info *rinfo = chan->rinfo; radeon_gpio_getsda()
62 val = INREG(chan->ddc_reg); radeon_gpio_getsda()
67 static int radeon_setup_i2c_bus(struct radeon_i2c_chan *chan, const char *name) radeon_setup_i2c_bus() argument
71 snprintf(chan->adapter.name, sizeof(chan->adapter.name), radeon_setup_i2c_bus()
73 chan->adapter.owner = THIS_MODULE; radeon_setup_i2c_bus()
74 chan->adapter.algo_data = &chan->algo; radeon_setup_i2c_bus()
75 chan->adapter.dev.parent = &chan->rinfo->pdev->dev; radeon_setup_i2c_bus()
76 chan->algo.setsda = radeon_gpio_setsda; radeon_setup_i2c_bus()
77 chan->algo.setscl = radeon_gpio_setscl; radeon_setup_i2c_bus()
78 chan->algo.getsda = radeon_gpio_getsda; radeon_setup_i2c_bus()
79 chan->algo.getscl = radeon_gpio_getscl; radeon_setup_i2c_bus()
80 chan->algo.udelay = 10; radeon_setup_i2c_bus()
81 chan->algo.timeout = 20; radeon_setup_i2c_bus()
82 chan->algo.data = chan; radeon_setup_i2c_bus()
84 i2c_set_adapdata(&chan->adapter, chan); radeon_setup_i2c_bus()
87 radeon_gpio_setsda(chan, 1); radeon_setup_i2c_bus()
88 radeon_gpio_setscl(chan, 1); radeon_setup_i2c_bus()
91 rc = i2c_bit_add_bus(&chan->adapter); radeon_setup_i2c_bus()
93 dev_dbg(&chan->rinfo->pdev->dev, "I2C bus %s registered.\n", name); radeon_setup_i2c_bus()
95 dev_warn(&chan->rinfo->pdev->dev, "Failed to register I2C bus %s.\n", name); radeon_setup_i2c_bus()
/linux-4.1.27/drivers/gpu/drm/gma500/
H A Doaktrail_lvds_i2c.c64 #define LPC_READ_REG(chan, r) inl((chan)->reg + (r))
65 #define LPC_WRITE_REG(chan, r, val) outl((val), (chan)->reg + (r))
69 struct psb_intel_i2c_chan *chan = data; get_clock() local
72 val = LPC_READ_REG(chan, RGIO); get_clock()
74 LPC_WRITE_REG(chan, RGIO, val); get_clock()
75 tmp = LPC_READ_REG(chan, RGLVL); get_clock()
76 val = (LPC_READ_REG(chan, RGLVL) & GPIO_CLOCK) ? 1 : 0; get_clock()
83 struct psb_intel_i2c_chan *chan = data; get_data() local
86 val = LPC_READ_REG(chan, RGIO); get_data()
88 LPC_WRITE_REG(chan, RGIO, val); get_data()
89 tmp = LPC_READ_REG(chan, RGLVL); get_data()
90 val = (LPC_READ_REG(chan, RGLVL) & GPIO_DATA) ? 1 : 0; get_data()
97 struct psb_intel_i2c_chan *chan = data; set_clock() local
101 val = LPC_READ_REG(chan, RGIO); set_clock()
103 LPC_WRITE_REG(chan, RGIO, val); set_clock()
105 val = LPC_READ_REG(chan, RGIO); set_clock()
107 LPC_WRITE_REG(chan, RGIO, val); set_clock()
108 val = LPC_READ_REG(chan, RGLVL); set_clock()
110 LPC_WRITE_REG(chan, RGLVL, val); set_clock()
116 struct psb_intel_i2c_chan *chan = data; set_data() local
120 val = LPC_READ_REG(chan, RGIO); set_data()
122 LPC_WRITE_REG(chan, RGIO, val); set_data()
124 val = LPC_READ_REG(chan, RGIO); set_data()
126 LPC_WRITE_REG(chan, RGIO, val); set_data()
127 val = LPC_READ_REG(chan, RGLVL); set_data()
129 LPC_WRITE_REG(chan, RGLVL, val); set_data()
138 struct psb_intel_i2c_chan *chan; oaktrail_lvds_i2c_init() local
140 chan = kzalloc(sizeof(struct psb_intel_i2c_chan), GFP_KERNEL); oaktrail_lvds_i2c_init()
141 if (!chan) oaktrail_lvds_i2c_init()
144 chan->drm_dev = dev; oaktrail_lvds_i2c_init()
145 chan->reg = dev_priv->lpc_gpio_base; oaktrail_lvds_i2c_init()
146 strncpy(chan->adapter.name, "gma500 LPC", I2C_NAME_SIZE - 1); oaktrail_lvds_i2c_init()
147 chan->adapter.owner = THIS_MODULE; oaktrail_lvds_i2c_init()
148 chan->adapter.algo_data = &chan->algo; oaktrail_lvds_i2c_init()
149 chan->adapter.dev.parent = &dev->pdev->dev; oaktrail_lvds_i2c_init()
150 chan->algo.setsda = set_data; oaktrail_lvds_i2c_init()
151 chan->algo.setscl = set_clock; oaktrail_lvds_i2c_init()
152 chan->algo.getsda = get_data; oaktrail_lvds_i2c_init()
153 chan->algo.getscl = get_clock; oaktrail_lvds_i2c_init()
154 chan->algo.udelay = 100; oaktrail_lvds_i2c_init()
155 chan->algo.timeout = usecs_to_jiffies(2200); oaktrail_lvds_i2c_init()
156 chan->algo.data = chan; oaktrail_lvds_i2c_init()
158 i2c_set_adapdata(&chan->adapter, chan); oaktrail_lvds_i2c_init()
160 set_data(chan, 1); oaktrail_lvds_i2c_init()
161 set_clock(chan, 1); oaktrail_lvds_i2c_init()
164 if (i2c_bit_add_bus(&chan->adapter)) { oaktrail_lvds_i2c_init()
165 kfree(chan); oaktrail_lvds_i2c_init()
169 gma_encoder->ddc_bus = chan; oaktrail_lvds_i2c_init()
H A Dintel_i2c.c35 struct psb_intel_i2c_chan *chan = data; get_clock() local
36 struct drm_device *dev = chan->drm_dev; get_clock()
39 val = REG_READ(chan->reg); get_clock()
45 struct psb_intel_i2c_chan *chan = data; get_data() local
46 struct drm_device *dev = chan->drm_dev; get_data()
49 val = REG_READ(chan->reg); get_data()
55 struct psb_intel_i2c_chan *chan = data; set_clock() local
56 struct drm_device *dev = chan->drm_dev; set_clock()
61 REG_READ(chan->reg) & (GPIO_DATA_PULLUP_DISABLE | set_clock()
69 REG_WRITE(chan->reg, reserved | clock_bits); set_clock()
75 struct psb_intel_i2c_chan *chan = data; set_data() local
76 struct drm_device *dev = chan->drm_dev; set_data()
81 REG_READ(chan->reg) & (GPIO_DATA_PULLUP_DISABLE | set_data()
91 REG_WRITE(chan->reg, reserved | data_bits); set_data()
119 struct psb_intel_i2c_chan *chan; psb_intel_i2c_create() local
121 chan = kzalloc(sizeof(struct psb_intel_i2c_chan), GFP_KERNEL); psb_intel_i2c_create()
122 if (!chan) psb_intel_i2c_create()
125 chan->drm_dev = dev; psb_intel_i2c_create()
126 chan->reg = reg; psb_intel_i2c_create()
127 snprintf(chan->adapter.name, I2C_NAME_SIZE, "intel drm %s", name); psb_intel_i2c_create()
128 chan->adapter.owner = THIS_MODULE; psb_intel_i2c_create()
129 chan->adapter.algo_data = &chan->algo; psb_intel_i2c_create()
130 chan->adapter.dev.parent = &dev->pdev->dev; psb_intel_i2c_create()
131 chan->algo.setsda = set_data; psb_intel_i2c_create()
132 chan->algo.setscl = set_clock; psb_intel_i2c_create()
133 chan->algo.getsda = get_data; psb_intel_i2c_create()
134 chan->algo.getscl = get_clock; psb_intel_i2c_create()
135 chan->algo.udelay = 20; psb_intel_i2c_create()
136 chan->algo.timeout = usecs_to_jiffies(2200); psb_intel_i2c_create()
137 chan->algo.data = chan; psb_intel_i2c_create()
139 i2c_set_adapdata(&chan->adapter, chan); psb_intel_i2c_create()
141 if (i2c_bit_add_bus(&chan->adapter)) psb_intel_i2c_create()
145 set_data(chan, 1); psb_intel_i2c_create()
146 set_clock(chan, 1); psb_intel_i2c_create()
149 return chan; psb_intel_i2c_create()
152 kfree(chan); psb_intel_i2c_create()
162 void psb_intel_i2c_destroy(struct psb_intel_i2c_chan *chan) psb_intel_i2c_destroy() argument
164 if (!chan) psb_intel_i2c_destroy()
167 i2c_del_adapter(&chan->adapter); psb_intel_i2c_destroy()
168 kfree(chan); psb_intel_i2c_destroy()
/linux-4.1.27/drivers/video/fbdev/riva/
H A Drivafb-i2c.c29 struct riva_i2c_chan *chan = data; riva_gpio_setscl() local
30 struct riva_par *par = chan->par; riva_gpio_setscl()
33 VGA_WR08(par->riva.PCIO, 0x3d4, chan->ddc_base + 1); riva_gpio_setscl()
41 VGA_WR08(par->riva.PCIO, 0x3d4, chan->ddc_base + 1); riva_gpio_setscl()
47 struct riva_i2c_chan *chan = data; riva_gpio_setsda() local
48 struct riva_par *par = chan->par; riva_gpio_setsda()
51 VGA_WR08(par->riva.PCIO, 0x3d4, chan->ddc_base + 1); riva_gpio_setsda()
59 VGA_WR08(par->riva.PCIO, 0x3d4, chan->ddc_base + 1); riva_gpio_setsda()
65 struct riva_i2c_chan *chan = data; riva_gpio_getscl() local
66 struct riva_par *par = chan->par; riva_gpio_getscl()
69 VGA_WR08(par->riva.PCIO, 0x3d4, chan->ddc_base); riva_gpio_getscl()
78 struct riva_i2c_chan *chan = data; riva_gpio_getsda() local
79 struct riva_par *par = chan->par; riva_gpio_getsda()
82 VGA_WR08(par->riva.PCIO, 0x3d4, chan->ddc_base); riva_gpio_getsda()
89 static int riva_setup_i2c_bus(struct riva_i2c_chan *chan, const char *name, riva_setup_i2c_bus() argument
94 strcpy(chan->adapter.name, name); riva_setup_i2c_bus()
95 chan->adapter.owner = THIS_MODULE; riva_setup_i2c_bus()
96 chan->adapter.class = i2c_class; riva_setup_i2c_bus()
97 chan->adapter.algo_data = &chan->algo; riva_setup_i2c_bus()
98 chan->adapter.dev.parent = &chan->par->pdev->dev; riva_setup_i2c_bus()
99 chan->algo.setsda = riva_gpio_setsda; riva_setup_i2c_bus()
100 chan->algo.setscl = riva_gpio_setscl; riva_setup_i2c_bus()
101 chan->algo.getsda = riva_gpio_getsda; riva_setup_i2c_bus()
102 chan->algo.getscl = riva_gpio_getscl; riva_setup_i2c_bus()
103 chan->algo.udelay = 40; riva_setup_i2c_bus()
104 chan->algo.timeout = msecs_to_jiffies(2); riva_setup_i2c_bus()
105 chan->algo.data = chan; riva_setup_i2c_bus()
107 i2c_set_adapdata(&chan->adapter, chan); riva_setup_i2c_bus()
110 riva_gpio_setsda(chan, 1); riva_setup_i2c_bus()
111 riva_gpio_setscl(chan, 1); riva_setup_i2c_bus()
114 rc = i2c_bit_add_bus(&chan->adapter); riva_setup_i2c_bus()
116 dev_dbg(&chan->par->pdev->dev, "I2C bus %s registered.\n", name); riva_setup_i2c_bus()
118 dev_warn(&chan->par->pdev->dev, riva_setup_i2c_bus()
120 chan->par = NULL; riva_setup_i2c_bus()
128 par->chan[0].par = par; riva_create_i2c_busses()
129 par->chan[1].par = par; riva_create_i2c_busses()
130 par->chan[2].par = par; riva_create_i2c_busses()
132 par->chan[0].ddc_base = 0x36; riva_create_i2c_busses()
133 par->chan[1].ddc_base = 0x3e; riva_create_i2c_busses()
134 par->chan[2].ddc_base = 0x50; riva_create_i2c_busses()
135 riva_setup_i2c_bus(&par->chan[0], "BUS1", I2C_CLASS_HWMON); riva_create_i2c_busses()
136 riva_setup_i2c_bus(&par->chan[1], "BUS2", 0); riva_create_i2c_busses()
137 riva_setup_i2c_bus(&par->chan[2], "BUS3", 0); riva_create_i2c_busses()
145 if (!par->chan[i].par) riva_delete_i2c_busses()
147 i2c_del_adapter(&par->chan[i].adapter); riva_delete_i2c_busses()
148 par->chan[i].par = NULL; riva_delete_i2c_busses()
156 if (par->chan[conn].par) riva_probe_i2c_connector()
157 edid = fb_ddc_read(&par->chan[conn].adapter); riva_probe_i2c_connector()
/linux-4.1.27/arch/sh/drivers/dma/
H A Ddma-sh.c30 static unsigned long dma_find_base(unsigned int chan) dma_find_base() argument
35 if (chan >= 6) dma_find_base()
42 static unsigned long dma_base_addr(unsigned int chan) dma_base_addr() argument
44 unsigned long base = dma_find_base(chan); dma_base_addr()
47 if (chan >= 9) dma_base_addr()
48 chan -= 6; dma_base_addr()
49 if (chan >= 4) dma_base_addr()
52 return base + (chan * 0x10); dma_base_addr()
56 static inline unsigned int get_dmte_irq(unsigned int chan) get_dmte_irq() argument
58 return chan >= 6 ? DMTE6_IRQ : DMTE0_IRQ; get_dmte_irq()
78 static inline unsigned int get_dmte_irq(unsigned int chan) get_dmte_irq() argument
80 return dmte_irq_map[chan]; get_dmte_irq()
94 static inline unsigned int calc_xmit_shift(struct dma_channel *chan) calc_xmit_shift() argument
96 u32 chcr = __raw_readl(dma_base_addr(chan->chan) + CHCR); calc_xmit_shift()
111 struct dma_channel *chan = dev_id; dma_tei() local
114 chcr = __raw_readl(dma_base_addr(chan->chan) + CHCR); dma_tei()
120 __raw_writel(chcr, (dma_base_addr(chan->chan) + CHCR)); dma_tei()
122 wake_up(&chan->wait_queue); dma_tei()
127 static int sh_dmac_request_dma(struct dma_channel *chan) sh_dmac_request_dma() argument
129 if (unlikely(!(chan->flags & DMA_TEI_CAPABLE))) sh_dmac_request_dma()
132 return request_irq(get_dmte_irq(chan->chan), dma_tei, IRQF_SHARED, sh_dmac_request_dma()
133 chan->dev_id, chan); sh_dmac_request_dma()
136 static void sh_dmac_free_dma(struct dma_channel *chan) sh_dmac_free_dma() argument
138 free_irq(get_dmte_irq(chan->chan), chan); sh_dmac_free_dma()
142 sh_dmac_configure_channel(struct dma_channel *chan, unsigned long chcr) sh_dmac_configure_channel() argument
149 chan->flags |= DMA_TEI_CAPABLE; sh_dmac_configure_channel()
151 chan->flags &= ~DMA_TEI_CAPABLE; sh_dmac_configure_channel()
154 __raw_writel(chcr, (dma_base_addr(chan->chan) + CHCR)); sh_dmac_configure_channel()
156 chan->flags |= DMA_CONFIGURED; sh_dmac_configure_channel()
160 static void sh_dmac_enable_dma(struct dma_channel *chan) sh_dmac_enable_dma() argument
165 chcr = __raw_readl(dma_base_addr(chan->chan) + CHCR); sh_dmac_enable_dma()
168 if (chan->flags & DMA_TEI_CAPABLE) sh_dmac_enable_dma()
171 __raw_writel(chcr, (dma_base_addr(chan->chan) + CHCR)); sh_dmac_enable_dma()
173 if (chan->flags & DMA_TEI_CAPABLE) { sh_dmac_enable_dma()
174 irq = get_dmte_irq(chan->chan); sh_dmac_enable_dma()
179 static void sh_dmac_disable_dma(struct dma_channel *chan) sh_dmac_disable_dma() argument
184 if (chan->flags & DMA_TEI_CAPABLE) { sh_dmac_disable_dma()
185 irq = get_dmte_irq(chan->chan); sh_dmac_disable_dma()
189 chcr = __raw_readl(dma_base_addr(chan->chan) + CHCR); sh_dmac_disable_dma()
191 __raw_writel(chcr, (dma_base_addr(chan->chan) + CHCR)); sh_dmac_disable_dma()
194 static int sh_dmac_xfer_dma(struct dma_channel *chan) sh_dmac_xfer_dma() argument
200 if (unlikely(!(chan->flags & DMA_CONFIGURED))) sh_dmac_xfer_dma()
201 sh_dmac_configure_channel(chan, 0); sh_dmac_xfer_dma()
203 sh_dmac_disable_dma(chan); sh_dmac_xfer_dma()
220 if (chan->sar || (mach_is_dreamcast() && sh_dmac_xfer_dma()
221 chan->chan == PVR2_CASCADE_CHAN)) sh_dmac_xfer_dma()
222 __raw_writel(chan->sar, (dma_base_addr(chan->chan) + SAR)); sh_dmac_xfer_dma()
223 if (chan->dar || (mach_is_dreamcast() && sh_dmac_xfer_dma()
224 chan->chan == PVR2_CASCADE_CHAN)) sh_dmac_xfer_dma()
225 __raw_writel(chan->dar, (dma_base_addr(chan->chan) + DAR)); sh_dmac_xfer_dma()
227 __raw_writel(chan->count >> calc_xmit_shift(chan), sh_dmac_xfer_dma()
228 (dma_base_addr(chan->chan) + TCR)); sh_dmac_xfer_dma()
230 sh_dmac_enable_dma(chan); sh_dmac_xfer_dma()
235 static int sh_dmac_get_dma_residue(struct dma_channel *chan) sh_dmac_get_dma_residue() argument
237 if (!(__raw_readl(dma_base_addr(chan->chan) + CHCR) & CHCR_DE)) sh_dmac_get_dma_residue()
240 return __raw_readl(dma_base_addr(chan->chan) + TCR) sh_dmac_get_dma_residue()
241 << calc_xmit_shift(chan); sh_dmac_get_dma_residue()
H A Ddma-g2.c64 struct dma_channel *chan = info->channels + i; g2_dma_interrupt() local
66 wake_up(&chan->wait_queue); g2_dma_interrupt()
76 static int g2_enable_dma(struct dma_channel *chan) g2_enable_dma() argument
78 unsigned int chan_nr = chan->chan; g2_enable_dma()
86 static int g2_disable_dma(struct dma_channel *chan) g2_disable_dma() argument
88 unsigned int chan_nr = chan->chan; g2_disable_dma()
96 static int g2_xfer_dma(struct dma_channel *chan) g2_xfer_dma() argument
98 unsigned int chan_nr = chan->chan; g2_xfer_dma()
100 if (chan->sar & 31) { g2_xfer_dma()
101 printk("g2dma: unaligned source 0x%lx\n", chan->sar); g2_xfer_dma()
105 if (chan->dar & 31) { g2_xfer_dma()
106 printk("g2dma: unaligned dest 0x%lx\n", chan->dar); g2_xfer_dma()
111 if (chan->count & 31) g2_xfer_dma()
112 chan->count = (chan->count + (32 - 1)) & ~(32 - 1); g2_xfer_dma()
115 chan->dar += 0xa0800000; g2_xfer_dma()
118 chan->mode = !chan->mode; g2_xfer_dma()
120 flush_icache_range((unsigned long)chan->sar, chan->count); g2_xfer_dma()
122 g2_disable_dma(chan); g2_xfer_dma()
124 g2_dma->channel[chan_nr].g2_addr = chan->dar & 0x1fffffe0; g2_xfer_dma()
125 g2_dma->channel[chan_nr].root_addr = chan->sar & 0x1fffffe0; g2_xfer_dma()
126 g2_dma->channel[chan_nr].size = (chan->count & ~31) | 0x80000000; g2_xfer_dma()
127 g2_dma->channel[chan_nr].direction = chan->mode; g2_xfer_dma()
136 g2_enable_dma(chan); g2_xfer_dma()
139 pr_debug("count, sar, dar, mode, ctrl, chan, xfer: %ld, 0x%08lx, " g2_xfer_dma()
152 static int g2_get_residue(struct dma_channel *chan) g2_get_residue() argument
154 return g2_bytes_remaining(chan->chan); g2_get_residue()
H A Ddma-api.c27 struct dma_info *get_dma_info(unsigned int chan) get_dma_info() argument
36 if ((chan < info->first_vchannel_nr) || get_dma_info()
37 (chan >= info->first_vchannel_nr + info->nr_channels)) get_dma_info()
76 struct dma_channel *get_dma_channel(unsigned int chan) get_dma_channel() argument
78 struct dma_info *info = get_dma_info(chan); get_dma_channel()
87 if (channel->vchan == chan) get_dma_channel()
95 int get_dma_residue(unsigned int chan) get_dma_residue() argument
97 struct dma_info *info = get_dma_info(chan); get_dma_residue()
98 struct dma_channel *channel = get_dma_channel(chan); get_dma_residue()
158 if (request_dma(channel->chan, dev_id) == 0) request_dma_bycap()
159 return channel->chan; request_dma_bycap()
188 return channel->chan; dmac_search_free_channel()
194 int request_dma(unsigned int chan, const char *dev_id) request_dma() argument
197 struct dma_info *info = get_dma_info(chan); request_dma()
200 channel = get_dma_channel(chan); request_dma()
218 void free_dma(unsigned int chan) free_dma() argument
220 struct dma_info *info = get_dma_info(chan); free_dma()
221 struct dma_channel *channel = get_dma_channel(chan); free_dma()
230 void dma_wait_for_completion(unsigned int chan) dma_wait_for_completion() argument
232 struct dma_info *info = get_dma_info(chan); dma_wait_for_completion()
233 struct dma_channel *channel = get_dma_channel(chan); dma_wait_for_completion()
275 void dma_configure_channel(unsigned int chan, unsigned long flags) dma_configure_channel() argument
277 struct dma_info *info = get_dma_info(chan); dma_configure_channel()
278 struct dma_channel *channel = get_dma_channel(chan); dma_configure_channel()
285 int dma_xfer(unsigned int chan, unsigned long from, dma_xfer() argument
288 struct dma_info *info = get_dma_info(chan); dma_xfer()
289 struct dma_channel *channel = get_dma_channel(chan); dma_xfer()
300 int dma_extend(unsigned int chan, unsigned long op, void *param) dma_extend() argument
302 struct dma_info *info = get_dma_info(chan); dma_extend()
303 struct dma_channel *channel = get_dma_channel(chan); dma_extend()
386 struct dma_channel *chan = &info->channels[i]; register_dmac() local
388 atomic_set(&chan->busy, 0); register_dmac()
390 chan->chan = info->first_channel_nr + i; register_dmac()
391 chan->vchan = info->first_channel_nr + i + total_channels; register_dmac()
393 memcpy(chan->dev_id, "Unused", 7); register_dmac()
396 chan->flags |= DMA_TEI_CAPABLE; register_dmac()
398 init_waitqueue_head(&chan->wait_queue); register_dmac()
399 dma_create_sysfs_files(chan, info); register_dmac()
H A Ddma-pvr2.c41 static int pvr2_request_dma(struct dma_channel *chan) pvr2_request_dma() argument
51 static int pvr2_get_dma_residue(struct dma_channel *chan) pvr2_get_dma_residue() argument
56 static int pvr2_xfer_dma(struct dma_channel *chan) pvr2_xfer_dma() argument
58 if (chan->sar || !chan->dar) pvr2_xfer_dma()
63 __raw_writel(chan->dar, PVR2_DMA_ADDR); pvr2_xfer_dma()
64 __raw_writel(chan->count, PVR2_DMA_COUNT); pvr2_xfer_dma()
65 __raw_writel(chan->mode & DMA_MODE_MASK, PVR2_DMA_MODE); pvr2_xfer_dma()
H A Ddma-sysfs.c40 channel->chan, info->name, dma_show_devices()
124 int dma_create_sysfs_files(struct dma_channel *chan, struct dma_info *info) dma_create_sysfs_files() argument
126 struct device *dev = &chan->dev; dma_create_sysfs_files()
130 dev->id = chan->vchan; dma_create_sysfs_files()
148 snprintf(name, sizeof(name), "dma%d", chan->chan); dma_create_sysfs_files()
152 void dma_remove_sysfs_files(struct dma_channel *chan, struct dma_info *info) dma_remove_sysfs_files() argument
154 struct device *dev = &chan->dev; dma_remove_sysfs_files()
163 snprintf(name, sizeof(name), "dma%d", chan->chan); dma_remove_sysfs_files()
/linux-4.1.27/drivers/dma/
H A Dfsldma.c43 #define chan_dbg(chan, fmt, arg...) \
44 dev_dbg(chan->dev, "%s: " fmt, chan->name, ##arg)
45 #define chan_err(chan, fmt, arg...) \
46 dev_err(chan->dev, "%s: " fmt, chan->name, ##arg)
54 static void set_sr(struct fsldma_chan *chan, u32 val) set_sr() argument
56 DMA_OUT(chan, &chan->regs->sr, val, 32); set_sr()
59 static u32 get_sr(struct fsldma_chan *chan) get_sr() argument
61 return DMA_IN(chan, &chan->regs->sr, 32); get_sr()
64 static void set_mr(struct fsldma_chan *chan, u32 val) set_mr() argument
66 DMA_OUT(chan, &chan->regs->mr, val, 32); set_mr()
69 static u32 get_mr(struct fsldma_chan *chan) get_mr() argument
71 return DMA_IN(chan, &chan->regs->mr, 32); get_mr()
74 static void set_cdar(struct fsldma_chan *chan, dma_addr_t addr) set_cdar() argument
76 DMA_OUT(chan, &chan->regs->cdar, addr | FSL_DMA_SNEN, 64); set_cdar()
79 static dma_addr_t get_cdar(struct fsldma_chan *chan) get_cdar() argument
81 return DMA_IN(chan, &chan->regs->cdar, 64) & ~FSL_DMA_SNEN; get_cdar()
84 static void set_bcr(struct fsldma_chan *chan, u32 val) set_bcr() argument
86 DMA_OUT(chan, &chan->regs->bcr, val, 32); set_bcr()
89 static u32 get_bcr(struct fsldma_chan *chan) get_bcr() argument
91 return DMA_IN(chan, &chan->regs->bcr, 32); get_bcr()
98 static void set_desc_cnt(struct fsldma_chan *chan, set_desc_cnt() argument
101 hw->count = CPU_TO_DMA(chan, count, 32); set_desc_cnt()
104 static void set_desc_src(struct fsldma_chan *chan, set_desc_src() argument
109 snoop_bits = ((chan->feature & FSL_DMA_IP_MASK) == FSL_DMA_IP_85XX) set_desc_src()
111 hw->src_addr = CPU_TO_DMA(chan, snoop_bits | src, 64); set_desc_src()
114 static void set_desc_dst(struct fsldma_chan *chan, set_desc_dst() argument
119 snoop_bits = ((chan->feature & FSL_DMA_IP_MASK) == FSL_DMA_IP_85XX) set_desc_dst()
121 hw->dst_addr = CPU_TO_DMA(chan, snoop_bits | dst, 64); set_desc_dst()
124 static void set_desc_next(struct fsldma_chan *chan, set_desc_next() argument
129 snoop_bits = ((chan->feature & FSL_DMA_IP_MASK) == FSL_DMA_IP_83XX) set_desc_next()
131 hw->next_ln_addr = CPU_TO_DMA(chan, snoop_bits | next, 64); set_desc_next()
134 static void set_ld_eol(struct fsldma_chan *chan, struct fsl_desc_sw *desc) set_ld_eol() argument
138 snoop_bits = ((chan->feature & FSL_DMA_IP_MASK) == FSL_DMA_IP_83XX) set_ld_eol()
141 desc->hw.next_ln_addr = CPU_TO_DMA(chan, set_ld_eol()
142 DMA_TO_CPU(chan, desc->hw.next_ln_addr, 64) | FSL_DMA_EOL set_ld_eol()
150 static void dma_init(struct fsldma_chan *chan) dma_init() argument
153 set_mr(chan, 0); dma_init()
155 switch (chan->feature & FSL_DMA_IP_MASK) { dma_init()
162 set_mr(chan, FSL_DMA_MR_BWC | FSL_DMA_MR_EIE dma_init()
170 set_mr(chan, FSL_DMA_MR_EOTIE | FSL_DMA_MR_PRC_RM); dma_init()
175 static int dma_is_idle(struct fsldma_chan *chan) dma_is_idle() argument
177 u32 sr = get_sr(chan); dma_is_idle()
188 static void dma_start(struct fsldma_chan *chan) dma_start() argument
192 mode = get_mr(chan); dma_start()
194 if (chan->feature & FSL_DMA_CHAN_PAUSE_EXT) { dma_start()
195 set_bcr(chan, 0); dma_start()
201 if (chan->feature & FSL_DMA_CHAN_START_EXT) { dma_start()
208 set_mr(chan, mode); dma_start()
211 static void dma_halt(struct fsldma_chan *chan) dma_halt() argument
217 mode = get_mr(chan); dma_halt()
224 if ((chan->feature & FSL_DMA_IP_MASK) == FSL_DMA_IP_85XX) { dma_halt()
226 set_mr(chan, mode); dma_halt()
233 set_mr(chan, mode); dma_halt()
237 if (dma_is_idle(chan)) dma_halt()
243 if (!dma_is_idle(chan)) dma_halt()
244 chan_err(chan, "DMA halt timeout!\n"); dma_halt()
249 * @chan : Freescale DMA channel
258 static void fsl_chan_set_src_loop_size(struct fsldma_chan *chan, int size) fsl_chan_set_src_loop_size() argument
262 mode = get_mr(chan); fsl_chan_set_src_loop_size()
276 set_mr(chan, mode); fsl_chan_set_src_loop_size()
281 * @chan : Freescale DMA channel
290 static void fsl_chan_set_dst_loop_size(struct fsldma_chan *chan, int size) fsl_chan_set_dst_loop_size() argument
294 mode = get_mr(chan); fsl_chan_set_dst_loop_size()
308 set_mr(chan, mode); fsl_chan_set_dst_loop_size()
313 * @chan : Freescale DMA channel
323 static void fsl_chan_set_request_count(struct fsldma_chan *chan, int size) fsl_chan_set_request_count() argument
329 mode = get_mr(chan); fsl_chan_set_request_count()
332 set_mr(chan, mode); fsl_chan_set_request_count()
337 * @chan : Freescale DMA channel
344 static void fsl_chan_toggle_ext_pause(struct fsldma_chan *chan, int enable) fsl_chan_toggle_ext_pause() argument
347 chan->feature |= FSL_DMA_CHAN_PAUSE_EXT; fsl_chan_toggle_ext_pause()
349 chan->feature &= ~FSL_DMA_CHAN_PAUSE_EXT; fsl_chan_toggle_ext_pause()
354 * @chan : Freescale DMA channel
362 static void fsl_chan_toggle_ext_start(struct fsldma_chan *chan, int enable) fsl_chan_toggle_ext_start() argument
365 chan->feature |= FSL_DMA_CHAN_START_EXT; fsl_chan_toggle_ext_start()
367 chan->feature &= ~FSL_DMA_CHAN_START_EXT; fsl_chan_toggle_ext_start()
372 struct fsldma_chan *chan; fsl_dma_external_start() local
377 chan = to_fsl_chan(dchan); fsl_dma_external_start()
379 fsl_chan_toggle_ext_start(chan, enable); fsl_dma_external_start()
384 static void append_ld_queue(struct fsldma_chan *chan, struct fsl_desc_sw *desc) append_ld_queue() argument
386 struct fsl_desc_sw *tail = to_fsl_desc(chan->ld_pending.prev); append_ld_queue()
388 if (list_empty(&chan->ld_pending)) append_ld_queue()
398 set_desc_next(chan, &tail->hw, desc->async_tx.phys); append_ld_queue()
405 list_splice_tail_init(&desc->tx_list, &chan->ld_pending); append_ld_queue()
410 struct fsldma_chan *chan = to_fsl_chan(tx->chan); fsl_dma_tx_submit() local
415 spin_lock_bh(&chan->desc_lock); fsl_dma_tx_submit()
418 if (unlikely(chan->pm_state != RUNNING)) { fsl_dma_tx_submit()
419 chan_dbg(chan, "cannot submit due to suspend\n"); fsl_dma_tx_submit()
420 spin_unlock_bh(&chan->desc_lock); fsl_dma_tx_submit()
434 append_ld_queue(chan, desc); fsl_dma_tx_submit()
436 spin_unlock_bh(&chan->desc_lock); fsl_dma_tx_submit()
443 * @chan : Freescale DMA channel
446 static void fsl_dma_free_descriptor(struct fsldma_chan *chan, fsl_dma_free_descriptor() argument
450 chan_dbg(chan, "LD %p free\n", desc); fsl_dma_free_descriptor()
451 dma_pool_free(chan->desc_pool, desc, desc->async_tx.phys); fsl_dma_free_descriptor()
456 * @chan : Freescale DMA channel
460 static struct fsl_desc_sw *fsl_dma_alloc_descriptor(struct fsldma_chan *chan) fsl_dma_alloc_descriptor() argument
465 desc = dma_pool_alloc(chan->desc_pool, GFP_ATOMIC, &pdesc); fsl_dma_alloc_descriptor()
467 chan_dbg(chan, "out of memory for link descriptor\n"); fsl_dma_alloc_descriptor()
473 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); fsl_dma_alloc_descriptor()
477 chan_dbg(chan, "LD %p allocated\n", desc); fsl_dma_alloc_descriptor()
485 * @chan: Freescale DMA channel
490 static void fsldma_clean_completed_descriptor(struct fsldma_chan *chan) fsldma_clean_completed_descriptor() argument
495 list_for_each_entry_safe(desc, _desc, &chan->ld_completed, node) fsldma_clean_completed_descriptor()
497 fsl_dma_free_descriptor(chan, desc); fsldma_clean_completed_descriptor()
502 * @chan: Freescale DMA channel
509 static dma_cookie_t fsldma_run_tx_complete_actions(struct fsldma_chan *chan, fsldma_run_tx_complete_actions() argument
522 chan_dbg(chan, "LD %p callback\n", desc); fsldma_run_tx_complete_actions()
536 * @chan: Freescale DMA channel
542 static void fsldma_clean_running_descriptor(struct fsldma_chan *chan, fsldma_clean_running_descriptor() argument
557 list_add_tail(&desc->node, &chan->ld_completed); fsldma_clean_running_descriptor()
561 dma_pool_free(chan->desc_pool, desc, desc->async_tx.phys); fsldma_clean_running_descriptor()
566 * @chan : Freescale DMA channel
569 * LOCKING: must hold chan->desc_lock
571 static void fsl_chan_xfer_ld_queue(struct fsldma_chan *chan) fsl_chan_xfer_ld_queue() argument
579 if (list_empty(&chan->ld_pending)) { fsl_chan_xfer_ld_queue()
580 chan_dbg(chan, "no pending LDs\n"); fsl_chan_xfer_ld_queue()
589 if (!chan->idle) { fsl_chan_xfer_ld_queue()
590 chan_dbg(chan, "DMA controller still busy\n"); fsl_chan_xfer_ld_queue()
603 chan_dbg(chan, "idle, starting controller\n"); fsl_chan_xfer_ld_queue()
604 desc = list_first_entry(&chan->ld_pending, struct fsl_desc_sw, node); fsl_chan_xfer_ld_queue()
605 list_splice_tail_init(&chan->ld_pending, &chan->ld_running); fsl_chan_xfer_ld_queue()
612 if ((chan->feature & FSL_DMA_IP_MASK) == FSL_DMA_IP_85XX) { fsl_chan_xfer_ld_queue()
615 mode = get_mr(chan); fsl_chan_xfer_ld_queue()
617 set_mr(chan, mode); fsl_chan_xfer_ld_queue()
624 set_cdar(chan, desc->async_tx.phys); fsl_chan_xfer_ld_queue()
625 get_cdar(chan); fsl_chan_xfer_ld_queue()
627 dma_start(chan); fsl_chan_xfer_ld_queue()
628 chan->idle = false; fsl_chan_xfer_ld_queue()
634 * @chan: Freescale DMA channel
640 static void fsldma_cleanup_descriptors(struct fsldma_chan *chan) fsldma_cleanup_descriptors() argument
644 dma_addr_t curr_phys = get_cdar(chan); fsldma_cleanup_descriptors()
647 fsldma_clean_completed_descriptor(chan); fsldma_cleanup_descriptors()
650 list_for_each_entry_safe(desc, _desc, &chan->ld_running, node) { fsldma_cleanup_descriptors()
665 if (!dma_is_idle(chan)) fsldma_cleanup_descriptors()
669 cookie = fsldma_run_tx_complete_actions(chan, desc, cookie); fsldma_cleanup_descriptors()
671 fsldma_clean_running_descriptor(chan, desc); fsldma_cleanup_descriptors()
680 fsl_chan_xfer_ld_queue(chan); fsldma_cleanup_descriptors()
683 chan->common.completed_cookie = cookie; fsldma_cleanup_descriptors()
688 * @chan : Freescale DMA channel
696 struct fsldma_chan *chan = to_fsl_chan(dchan); fsl_dma_alloc_chan_resources() local
699 if (chan->desc_pool) fsl_dma_alloc_chan_resources()
706 chan->desc_pool = dma_pool_create(chan->name, chan->dev, fsl_dma_alloc_chan_resources()
709 if (!chan->desc_pool) { fsl_dma_alloc_chan_resources()
710 chan_err(chan, "unable to allocate descriptor pool\n"); fsl_dma_alloc_chan_resources()
720 * @chan: Freescae DMA channel
723 * LOCKING: must hold chan->desc_lock
725 static void fsldma_free_desc_list(struct fsldma_chan *chan, fsldma_free_desc_list() argument
731 fsl_dma_free_descriptor(chan, desc); fsldma_free_desc_list()
734 static void fsldma_free_desc_list_reverse(struct fsldma_chan *chan, fsldma_free_desc_list_reverse() argument
740 fsl_dma_free_descriptor(chan, desc); fsldma_free_desc_list_reverse()
745 * @chan : Freescale DMA channel
749 struct fsldma_chan *chan = to_fsl_chan(dchan); fsl_dma_free_chan_resources() local
751 chan_dbg(chan, "free all channel resources\n"); fsl_dma_free_chan_resources()
752 spin_lock_bh(&chan->desc_lock); fsl_dma_free_chan_resources()
753 fsldma_cleanup_descriptors(chan); fsl_dma_free_chan_resources()
754 fsldma_free_desc_list(chan, &chan->ld_pending); fsl_dma_free_chan_resources()
755 fsldma_free_desc_list(chan, &chan->ld_running); fsl_dma_free_chan_resources()
756 fsldma_free_desc_list(chan, &chan->ld_completed); fsl_dma_free_chan_resources()
757 spin_unlock_bh(&chan->desc_lock); fsl_dma_free_chan_resources()
759 dma_pool_destroy(chan->desc_pool); fsl_dma_free_chan_resources()
760 chan->desc_pool = NULL; fsl_dma_free_chan_resources()
768 struct fsldma_chan *chan; fsl_dma_prep_memcpy() local
778 chan = to_fsl_chan(dchan); fsl_dma_prep_memcpy()
783 new = fsl_dma_alloc_descriptor(chan); fsl_dma_prep_memcpy()
785 chan_err(chan, "%s\n", msg_ld_oom); fsl_dma_prep_memcpy()
791 set_desc_cnt(chan, &new->hw, copy); fsl_dma_prep_memcpy()
792 set_desc_src(chan, &new->hw, dma_src); fsl_dma_prep_memcpy()
793 set_desc_dst(chan, &new->hw, dma_dst); fsl_dma_prep_memcpy()
798 set_desc_next(chan, &prev->hw, new->async_tx.phys); fsl_dma_prep_memcpy()
816 set_ld_eol(chan, new); fsl_dma_prep_memcpy()
824 fsldma_free_desc_list_reverse(chan, &first->tx_list); fsl_dma_prep_memcpy()
834 struct fsldma_chan *chan = to_fsl_chan(dchan); fsl_dma_prep_sg() local
868 new = fsl_dma_alloc_descriptor(chan); fsl_dma_prep_sg()
870 chan_err(chan, "%s\n", msg_ld_oom); fsl_dma_prep_sg()
874 set_desc_cnt(chan, &new->hw, len); fsl_dma_prep_sg()
875 set_desc_src(chan, &new->hw, src); fsl_dma_prep_sg()
876 set_desc_dst(chan, &new->hw, dst); fsl_dma_prep_sg()
881 set_desc_next(chan, &prev->hw, new->async_tx.phys); fsl_dma_prep_sg()
932 set_ld_eol(chan, new); fsl_dma_prep_sg()
940 fsldma_free_desc_list_reverse(chan, &first->tx_list); fsl_dma_prep_sg()
946 struct fsldma_chan *chan; fsl_dma_device_terminate_all() local
951 chan = to_fsl_chan(dchan); fsl_dma_device_terminate_all()
953 spin_lock_bh(&chan->desc_lock); fsl_dma_device_terminate_all()
956 dma_halt(chan); fsl_dma_device_terminate_all()
959 fsldma_free_desc_list(chan, &chan->ld_pending); fsl_dma_device_terminate_all()
960 fsldma_free_desc_list(chan, &chan->ld_running); fsl_dma_device_terminate_all()
961 fsldma_free_desc_list(chan, &chan->ld_completed); fsl_dma_device_terminate_all()
962 chan->idle = true; fsl_dma_device_terminate_all()
964 spin_unlock_bh(&chan->desc_lock); fsl_dma_device_terminate_all()
971 struct fsldma_chan *chan; fsl_dma_device_config() local
977 chan = to_fsl_chan(dchan); fsl_dma_device_config()
980 if (!chan->set_request_count) fsl_dma_device_config()
989 chan->set_request_count(chan, size); fsl_dma_device_config()
996 * @chan : Freescale DMA channel
1000 struct fsldma_chan *chan = to_fsl_chan(dchan); fsl_dma_memcpy_issue_pending() local
1002 spin_lock_bh(&chan->desc_lock); fsl_dma_memcpy_issue_pending()
1003 fsl_chan_xfer_ld_queue(chan); fsl_dma_memcpy_issue_pending()
1004 spin_unlock_bh(&chan->desc_lock); fsl_dma_memcpy_issue_pending()
1009 * @chan : Freescale DMA channel
1015 struct fsldma_chan *chan = to_fsl_chan(dchan); fsl_tx_status() local
1022 spin_lock_bh(&chan->desc_lock); fsl_tx_status()
1023 fsldma_cleanup_descriptors(chan); fsl_tx_status()
1024 spin_unlock_bh(&chan->desc_lock); fsl_tx_status()
1035 struct fsldma_chan *chan = data; fsldma_chan_irq() local
1039 stat = get_sr(chan); fsldma_chan_irq()
1040 set_sr(chan, stat); fsldma_chan_irq()
1041 chan_dbg(chan, "irq: stat = 0x%x\n", stat); fsldma_chan_irq()
1049 chan_err(chan, "Transfer Error!\n"); fsldma_chan_irq()
1057 chan_dbg(chan, "irq: Programming Error INT\n"); fsldma_chan_irq()
1059 if (get_bcr(chan) != 0) fsldma_chan_irq()
1060 chan_err(chan, "Programming Error!\n"); fsldma_chan_irq()
1068 chan_dbg(chan, "irq: End-of-Chain link INT\n"); fsldma_chan_irq()
1078 chan_dbg(chan, "irq: End-of-link INT\n"); fsldma_chan_irq()
1083 if (!dma_is_idle(chan)) fsldma_chan_irq()
1084 chan_err(chan, "irq: controller not idle!\n"); fsldma_chan_irq()
1088 chan_err(chan, "irq: unhandled sr 0x%08x\n", stat); fsldma_chan_irq()
1095 tasklet_schedule(&chan->tasklet); fsldma_chan_irq()
1096 chan_dbg(chan, "irq: Exit\n"); fsldma_chan_irq()
1102 struct fsldma_chan *chan = (struct fsldma_chan *)data; dma_do_tasklet() local
1104 chan_dbg(chan, "tasklet entry\n"); dma_do_tasklet()
1106 spin_lock_bh(&chan->desc_lock); dma_do_tasklet()
1109 chan->idle = true; dma_do_tasklet()
1112 fsldma_cleanup_descriptors(chan); dma_do_tasklet()
1114 spin_unlock_bh(&chan->desc_lock); dma_do_tasklet()
1116 chan_dbg(chan, "tasklet exit\n"); dma_do_tasklet()
1122 struct fsldma_chan *chan; fsldma_ctrl_irq() local
1133 chan = fdev->chan[i]; fsldma_ctrl_irq()
1134 if (!chan) fsldma_ctrl_irq()
1138 dev_dbg(fdev->dev, "IRQ: chan %d\n", chan->id); fsldma_ctrl_irq()
1139 fsldma_chan_irq(irq, chan); fsldma_ctrl_irq()
1152 struct fsldma_chan *chan; fsldma_free_irqs() local
1162 chan = fdev->chan[i]; fsldma_free_irqs()
1163 if (chan && chan->irq != NO_IRQ) { fsldma_free_irqs()
1164 chan_dbg(chan, "free per-channel IRQ\n"); fsldma_free_irqs()
1165 free_irq(chan->irq, chan); fsldma_free_irqs()
1172 struct fsldma_chan *chan; fsldma_request_irqs() local
1186 chan = fdev->chan[i]; fsldma_request_irqs()
1187 if (!chan) fsldma_request_irqs()
1190 if (chan->irq == NO_IRQ) { fsldma_request_irqs()
1191 chan_err(chan, "interrupts property missing in device tree\n"); fsldma_request_irqs()
1196 chan_dbg(chan, "request per-channel IRQ\n"); fsldma_request_irqs()
1197 ret = request_irq(chan->irq, fsldma_chan_irq, IRQF_SHARED, fsldma_request_irqs()
1198 "fsldma-chan", chan); fsldma_request_irqs()
1200 chan_err(chan, "unable to request per-channel IRQ\n"); fsldma_request_irqs()
1209 chan = fdev->chan[i]; fsldma_request_irqs()
1210 if (!chan) fsldma_request_irqs()
1213 if (chan->irq == NO_IRQ) fsldma_request_irqs()
1216 free_irq(chan->irq, chan); fsldma_request_irqs()
1229 struct fsldma_chan *chan; fsl_dma_chan_probe() local
1234 chan = kzalloc(sizeof(*chan), GFP_KERNEL); fsl_dma_chan_probe()
1235 if (!chan) { fsl_dma_chan_probe()
1242 chan->regs = of_iomap(node, 0); fsl_dma_chan_probe()
1243 if (!chan->regs) { fsl_dma_chan_probe()
1255 chan->feature = feature; fsl_dma_chan_probe()
1257 fdev->feature = chan->feature; fsl_dma_chan_probe()
1263 WARN_ON(fdev->feature != chan->feature); fsl_dma_chan_probe()
1265 chan->dev = fdev->dev; fsl_dma_chan_probe()
1266 chan->id = (res.start & 0xfff) < 0x300 ? fsl_dma_chan_probe()
1269 if (chan->id >= FSL_DMA_MAX_CHANS_PER_DEVICE) { fsl_dma_chan_probe()
1275 fdev->chan[chan->id] = chan; fsl_dma_chan_probe()
1276 tasklet_init(&chan->tasklet, dma_do_tasklet, (unsigned long)chan); fsl_dma_chan_probe()
1277 snprintf(chan->name, sizeof(chan->name), "chan%d", chan->id); fsl_dma_chan_probe()
1280 dma_init(chan); fsl_dma_chan_probe()
1283 set_cdar(chan, 0); fsl_dma_chan_probe()
1285 switch (chan->feature & FSL_DMA_IP_MASK) { fsl_dma_chan_probe()
1287 chan->toggle_ext_pause = fsl_chan_toggle_ext_pause; fsl_dma_chan_probe()
1289 chan->toggle_ext_start = fsl_chan_toggle_ext_start; fsl_dma_chan_probe()
1290 chan->set_src_loop_size = fsl_chan_set_src_loop_size; fsl_dma_chan_probe()
1291 chan->set_dst_loop_size = fsl_chan_set_dst_loop_size; fsl_dma_chan_probe()
1292 chan->set_request_count = fsl_chan_set_request_count; fsl_dma_chan_probe()
1295 spin_lock_init(&chan->desc_lock); fsl_dma_chan_probe()
1296 INIT_LIST_HEAD(&chan->ld_pending); fsl_dma_chan_probe()
1297 INIT_LIST_HEAD(&chan->ld_running); fsl_dma_chan_probe()
1298 INIT_LIST_HEAD(&chan->ld_completed); fsl_dma_chan_probe()
1299 chan->idle = true; fsl_dma_chan_probe()
1301 chan->pm_state = RUNNING; fsl_dma_chan_probe()
1304 chan->common.device = &fdev->common; fsl_dma_chan_probe()
1305 dma_cookie_init(&chan->common); fsl_dma_chan_probe()
1308 chan->irq = irq_of_parse_and_map(node, 0); fsl_dma_chan_probe()
1311 list_add_tail(&chan->common.device_node, &fdev->common.channels); fsl_dma_chan_probe()
1313 dev_info(fdev->dev, "#%d (%s), irq %d\n", chan->id, compatible, fsl_dma_chan_probe()
1314 chan->irq != NO_IRQ ? chan->irq : fdev->irq); fsl_dma_chan_probe()
1319 iounmap(chan->regs); fsl_dma_chan_probe()
1321 kfree(chan); fsl_dma_chan_probe()
1326 static void fsl_dma_chan_remove(struct fsldma_chan *chan) fsl_dma_chan_remove() argument
1328 irq_dispose_mapping(chan->irq); fsl_dma_chan_remove()
1329 list_del(&chan->common.device_node); fsl_dma_chan_remove()
1330 iounmap(chan->regs); fsl_dma_chan_remove()
1331 kfree(chan); fsl_dma_chan_remove()
1436 if (fdev->chan[i]) fsldma_of_remove()
1437 fsl_dma_chan_remove(fdev->chan[i]); fsldma_of_remove()
1451 struct fsldma_chan *chan; fsldma_suspend_late() local
1455 chan = fdev->chan[i]; fsldma_suspend_late()
1456 if (!chan) fsldma_suspend_late()
1459 spin_lock_bh(&chan->desc_lock); fsldma_suspend_late()
1460 if (unlikely(!chan->idle)) fsldma_suspend_late()
1462 chan->regs_save.mr = get_mr(chan); fsldma_suspend_late()
1463 chan->pm_state = SUSPENDED; fsldma_suspend_late()
1464 spin_unlock_bh(&chan->desc_lock); fsldma_suspend_late()
1470 chan = fdev->chan[i]; fsldma_suspend_late()
1471 if (!chan) fsldma_suspend_late()
1473 chan->pm_state = RUNNING; fsldma_suspend_late()
1474 spin_unlock_bh(&chan->desc_lock); fsldma_suspend_late()
1483 struct fsldma_chan *chan; fsldma_resume_early() local
1488 chan = fdev->chan[i]; fsldma_resume_early()
1489 if (!chan) fsldma_resume_early()
1492 spin_lock_bh(&chan->desc_lock); fsldma_resume_early()
1493 mode = chan->regs_save.mr fsldma_resume_early()
1495 set_mr(chan, mode); fsldma_resume_early()
1496 chan->pm_state = RUNNING; fsldma_resume_early()
1497 spin_unlock_bh(&chan->desc_lock); fsldma_resume_early()
H A Ddma-jz4740.c131 struct jz4740_dmaengine_chan chan[JZ_DMA_NR_CHANS]; member in struct:jz4740_dma_dev
135 struct jz4740_dmaengine_chan *chan) jz4740_dma_chan_get_dev()
137 return container_of(chan->vchan.chan.device, struct jz4740_dma_dev, jz4740_dma_chan_get_dev()
143 return container_of(c, struct jz4740_dmaengine_chan, vchan.chan); to_jz4740_dma_chan()
211 struct jz4740_dmaengine_chan *chan = to_jz4740_dma_chan(c); jz4740_dma_slave_config() local
212 struct jz4740_dma_dev *dmadev = jz4740_dma_chan_get_dev(chan); jz4740_dma_slave_config()
223 chan->fifo_addr = config->dst_addr; jz4740_dma_slave_config()
228 chan->fifo_addr = config->src_addr; jz4740_dma_slave_config()
239 chan->transfer_shift = 1; jz4740_dma_slave_config()
242 chan->transfer_shift = 2; jz4740_dma_slave_config()
245 chan->transfer_shift = 4; jz4740_dma_slave_config()
248 chan->transfer_shift = 5; jz4740_dma_slave_config()
251 chan->transfer_shift = 0; jz4740_dma_slave_config()
262 jz4740_dma_write(dmadev, JZ_REG_DMA_CMD(chan->id), cmd); jz4740_dma_slave_config()
263 jz4740_dma_write(dmadev, JZ_REG_DMA_STATUS_CTRL(chan->id), 0); jz4740_dma_slave_config()
264 jz4740_dma_write(dmadev, JZ_REG_DMA_REQ_TYPE(chan->id), jz4740_dma_slave_config()
272 struct jz4740_dmaengine_chan *chan = to_jz4740_dma_chan(c); jz4740_dma_terminate_all() local
273 struct jz4740_dma_dev *dmadev = jz4740_dma_chan_get_dev(chan); jz4740_dma_terminate_all()
277 spin_lock_irqsave(&chan->vchan.lock, flags); jz4740_dma_terminate_all()
278 jz4740_dma_write_mask(dmadev, JZ_REG_DMA_STATUS_CTRL(chan->id), 0, jz4740_dma_terminate_all()
280 chan->desc = NULL; jz4740_dma_terminate_all()
281 vchan_get_all_descriptors(&chan->vchan, &head); jz4740_dma_terminate_all()
282 spin_unlock_irqrestore(&chan->vchan.lock, flags); jz4740_dma_terminate_all()
284 vchan_dma_desc_free_list(&chan->vchan, &head); jz4740_dma_terminate_all()
289 static int jz4740_dma_start_transfer(struct jz4740_dmaengine_chan *chan) jz4740_dma_start_transfer() argument
291 struct jz4740_dma_dev *dmadev = jz4740_dma_chan_get_dev(chan); jz4740_dma_start_transfer()
296 jz4740_dma_write_mask(dmadev, JZ_REG_DMA_STATUS_CTRL(chan->id), 0, jz4740_dma_start_transfer()
299 if (!chan->desc) { jz4740_dma_start_transfer()
300 vdesc = vchan_next_desc(&chan->vchan); jz4740_dma_start_transfer()
303 chan->desc = to_jz4740_dma_desc(vdesc); jz4740_dma_start_transfer()
304 chan->next_sg = 0; jz4740_dma_start_transfer()
307 if (chan->next_sg == chan->desc->num_sgs) jz4740_dma_start_transfer()
308 chan->next_sg = 0; jz4740_dma_start_transfer()
310 sg = &chan->desc->sg[chan->next_sg]; jz4740_dma_start_transfer()
312 if (chan->desc->direction == DMA_MEM_TO_DEV) { jz4740_dma_start_transfer()
314 dst_addr = chan->fifo_addr; jz4740_dma_start_transfer()
316 src_addr = chan->fifo_addr; jz4740_dma_start_transfer()
319 jz4740_dma_write(dmadev, JZ_REG_DMA_SRC_ADDR(chan->id), src_addr); jz4740_dma_start_transfer()
320 jz4740_dma_write(dmadev, JZ_REG_DMA_DST_ADDR(chan->id), dst_addr); jz4740_dma_start_transfer()
321 jz4740_dma_write(dmadev, JZ_REG_DMA_TRANSFER_COUNT(chan->id), jz4740_dma_start_transfer()
322 sg->len >> chan->transfer_shift); jz4740_dma_start_transfer()
324 chan->next_sg++; jz4740_dma_start_transfer()
326 jz4740_dma_write_mask(dmadev, JZ_REG_DMA_STATUS_CTRL(chan->id), jz4740_dma_start_transfer()
338 static void jz4740_dma_chan_irq(struct jz4740_dmaengine_chan *chan) jz4740_dma_chan_irq() argument
340 spin_lock(&chan->vchan.lock); jz4740_dma_chan_irq()
341 if (chan->desc) { jz4740_dma_chan_irq()
342 if (chan->desc->cyclic) { jz4740_dma_chan_irq()
343 vchan_cyclic_callback(&chan->desc->vdesc); jz4740_dma_chan_irq()
345 if (chan->next_sg == chan->desc->num_sgs) { jz4740_dma_chan_irq()
346 list_del(&chan->desc->vdesc.node); jz4740_dma_chan_irq()
347 vchan_cookie_complete(&chan->desc->vdesc); jz4740_dma_chan_irq()
348 chan->desc = NULL; jz4740_dma_chan_irq()
352 jz4740_dma_start_transfer(chan); jz4740_dma_chan_irq()
353 spin_unlock(&chan->vchan.lock); jz4740_dma_chan_irq()
371 jz4740_dma_chan_irq(&dmadev->chan[i]); jz4740_dma_irq()
380 struct jz4740_dmaengine_chan *chan = to_jz4740_dma_chan(c); jz4740_dma_issue_pending() local
383 spin_lock_irqsave(&chan->vchan.lock, flags); jz4740_dma_issue_pending()
384 if (vchan_issue_pending(&chan->vchan) && !chan->desc) jz4740_dma_issue_pending()
385 jz4740_dma_start_transfer(chan); jz4740_dma_issue_pending()
386 spin_unlock_irqrestore(&chan->vchan.lock, flags); jz4740_dma_issue_pending()
394 struct jz4740_dmaengine_chan *chan = to_jz4740_dma_chan(c); jz4740_dma_prep_slave_sg() local
412 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
420 struct jz4740_dmaengine_chan *chan = to_jz4740_dma_chan(c); jz4740_dma_prep_dma_cyclic() local
443 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); jz4740_dma_prep_dma_cyclic()
446 static size_t jz4740_dma_desc_residue(struct jz4740_dmaengine_chan *chan, jz4740_dma_desc_residue() argument
449 struct jz4740_dma_dev *dmadev = jz4740_dma_chan_get_dev(chan); jz4740_dma_desc_residue()
460 JZ_REG_DMA_TRANSFER_COUNT(chan->id)); jz4740_dma_desc_residue()
461 residue += count << chan->transfer_shift; jz4740_dma_desc_residue()
470 struct jz4740_dmaengine_chan *chan = to_jz4740_dma_chan(c); jz4740_dma_tx_status() local
479 spin_lock_irqsave(&chan->vchan.lock, flags); jz4740_dma_tx_status()
480 vdesc = vchan_find_desc(&chan->vchan, cookie); jz4740_dma_tx_status()
481 if (cookie == chan->desc->vdesc.tx.cookie) { jz4740_dma_tx_status()
482 state->residue = jz4740_dma_desc_residue(chan, chan->desc, jz4740_dma_tx_status()
483 chan->next_sg); jz4740_dma_tx_status()
485 state->residue = jz4740_dma_desc_residue(chan, jz4740_dma_tx_status()
490 spin_unlock_irqrestore(&chan->vchan.lock, flags); jz4740_dma_tx_status()
510 struct jz4740_dmaengine_chan *chan; jz4740_dma_probe() local
552 chan = &dmadev->chan[i]; jz4740_dma_probe()
553 chan->id = i; jz4740_dma_probe()
554 chan->vchan.desc_free = jz4740_dma_desc_free; jz4740_dma_probe()
555 vchan_init(&chan->vchan, dd); jz4740_dma_probe()
134 jz4740_dma_chan_get_dev( struct jz4740_dmaengine_chan *chan) jz4740_dma_chan_get_dev() argument
H A Dnbpfaxi.c157 struct nbpf_channel *chan; member in struct:nbpf_desc
230 struct nbpf_channel chan[]; member in struct:nbpf_device
298 static inline u32 nbpf_chan_read(struct nbpf_channel *chan, nbpf_chan_read() argument
301 u32 data = ioread32(chan->base + offset); nbpf_chan_read()
302 dev_dbg(chan->dma_chan.device->dev, "%s(0x%p + 0x%x) = 0x%x\n", nbpf_chan_read()
303 __func__, chan->base, offset, data); nbpf_chan_read()
307 static inline void nbpf_chan_write(struct nbpf_channel *chan, nbpf_chan_write() argument
310 iowrite32(data, chan->base + offset); nbpf_chan_write()
311 dev_dbg(chan->dma_chan.device->dev, "%s(0x%p + 0x%x) = 0x%x\n", nbpf_chan_write()
312 __func__, chan->base, offset, data); nbpf_chan_write()
332 static void nbpf_chan_halt(struct nbpf_channel *chan) nbpf_chan_halt() argument
334 nbpf_chan_write(chan, NBPF_CHAN_CTRL, NBPF_CHAN_CTRL_CLREN); nbpf_chan_halt()
337 static bool nbpf_status_get(struct nbpf_channel *chan) nbpf_status_get() argument
339 u32 status = nbpf_read(chan->nbpf, NBPF_DSTAT_END); nbpf_status_get()
341 return status & BIT(chan - chan->nbpf->chan); nbpf_status_get()
344 static void nbpf_status_ack(struct nbpf_channel *chan) nbpf_status_ack() argument
346 nbpf_chan_write(chan, NBPF_CHAN_CTRL, NBPF_CHAN_CTRL_CLREND); nbpf_status_ack()
356 return nbpf->chan + __ffs(error); nbpf_error_get_channel()
359 static void nbpf_error_clear(struct nbpf_channel *chan) nbpf_error_clear() argument
365 nbpf_chan_halt(chan); nbpf_error_clear()
368 status = nbpf_chan_read(chan, NBPF_CHAN_STAT); nbpf_error_clear()
375 dev_err(chan->dma_chan.device->dev, nbpf_error_clear()
378 nbpf_chan_write(chan, NBPF_CHAN_CTRL, NBPF_CHAN_CTRL_SWRST); nbpf_error_clear()
383 struct nbpf_channel *chan = desc->chan; nbpf_start() local
386 nbpf_chan_write(chan, NBPF_CHAN_NXLA, (u32)ldesc->hwdesc_dma_addr); nbpf_start()
387 nbpf_chan_write(chan, NBPF_CHAN_CTRL, NBPF_CHAN_CTRL_SETEN | NBPF_CHAN_CTRL_CLRSUS); nbpf_start()
388 chan->paused = false; nbpf_start()
392 nbpf_chan_write(chan, NBPF_CHAN_CTRL, NBPF_CHAN_CTRL_STG); nbpf_start()
394 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): next 0x%x, cur 0x%x\n", __func__, nbpf_start()
395 nbpf_chan_read(chan, NBPF_CHAN_NXLA), nbpf_chan_read(chan, NBPF_CHAN_CRLA)); nbpf_start()
400 static void nbpf_chan_prepare(struct nbpf_channel *chan) nbpf_chan_prepare() argument
402 chan->dmarq_cfg = (chan->flags & NBPF_SLAVE_RQ_HIGH ? NBPF_CHAN_CFG_HIEN : 0) | nbpf_chan_prepare()
403 (chan->flags & NBPF_SLAVE_RQ_LOW ? NBPF_CHAN_CFG_LOEN : 0) | nbpf_chan_prepare()
404 (chan->flags & NBPF_SLAVE_RQ_LEVEL ? nbpf_chan_prepare()
406 chan->terminal; nbpf_chan_prepare()
409 static void nbpf_chan_prepare_default(struct nbpf_channel *chan) nbpf_chan_prepare_default() argument
412 chan->dmarq_cfg = NBPF_CHAN_CFG_AM & 0x400; nbpf_chan_prepare_default()
413 chan->terminal = 0; nbpf_chan_prepare_default()
414 chan->flags = 0; nbpf_chan_prepare_default()
417 static void nbpf_chan_configure(struct nbpf_channel *chan) nbpf_chan_configure() argument
424 nbpf_chan_write(chan, NBPF_CHAN_CFG, NBPF_CHAN_CFG_DMS | chan->dmarq_cfg); nbpf_chan_configure()
481 struct nbpf_channel *chan = desc->chan; nbpf_prep_one() local
482 struct device *dev = chan->dma_chan.device->dev; nbpf_prep_one()
509 mem_xfer = nbpf_xfer_ds(chan->nbpf, size); nbpf_prep_one()
513 can_burst = chan->slave_src_width >= 3; nbpf_prep_one()
515 chan->slave_src_burst : chan->slave_src_width); nbpf_prep_one()
520 if (mem_xfer > chan->slave_src_burst && !can_burst) nbpf_prep_one()
521 mem_xfer = chan->slave_src_burst; nbpf_prep_one()
529 slave_xfer = min(mem_xfer, chan->slave_dst_width >= 3 ? nbpf_prep_one()
530 chan->slave_dst_burst : chan->slave_dst_width); nbpf_prep_one()
545 hwdesc->config |= chan->dmarq_cfg | (last ? 0 : NBPF_CHAN_CFG_DEM) | nbpf_prep_one()
558 static size_t nbpf_bytes_left(struct nbpf_channel *chan) nbpf_bytes_left() argument
560 return nbpf_chan_read(chan, NBPF_CHAN_CUR_TR_BYTE); nbpf_bytes_left()
573 struct nbpf_channel *chan = nbpf_to_chan(dchan); nbpf_issue_pending() local
578 spin_lock_irqsave(&chan->lock, flags); nbpf_issue_pending()
579 if (list_empty(&chan->queued)) nbpf_issue_pending()
582 list_splice_tail_init(&chan->queued, &chan->active); nbpf_issue_pending()
584 if (!chan->running) { nbpf_issue_pending()
585 struct nbpf_desc *desc = list_first_entry(&chan->active, nbpf_issue_pending()
588 chan->running = desc; nbpf_issue_pending()
592 spin_unlock_irqrestore(&chan->lock, flags); nbpf_issue_pending()
598 struct nbpf_channel *chan = nbpf_to_chan(dchan); nbpf_tx_status() local
605 spin_lock_irqsave(&chan->lock, flags); nbpf_tx_status()
606 running = chan->running ? chan->running->async_tx.cookie : -EINVAL; nbpf_tx_status()
609 state->residue = nbpf_bytes_left(chan); nbpf_tx_status()
616 list_for_each_entry(desc, &chan->active, node) nbpf_tx_status()
623 list_for_each_entry(desc, &chan->queued, node) nbpf_tx_status()
633 spin_unlock_irqrestore(&chan->lock, flags); nbpf_tx_status()
636 if (chan->paused) nbpf_tx_status()
645 struct nbpf_channel *chan = desc->chan; nbpf_tx_submit() local
649 spin_lock_irqsave(&chan->lock, flags); nbpf_tx_submit()
651 list_add_tail(&desc->node, &chan->queued); nbpf_tx_submit()
652 spin_unlock_irqrestore(&chan->lock, flags); nbpf_tx_submit()
654 dev_dbg(chan->dma_chan.device->dev, "Entry %s(%d)\n", __func__, cookie); nbpf_tx_submit()
659 static int nbpf_desc_page_alloc(struct nbpf_channel *chan) nbpf_desc_page_alloc() argument
661 struct dma_chan *dchan = &chan->dma_chan; nbpf_desc_page_alloc()
694 desc->chan = chan; nbpf_desc_page_alloc()
703 spin_lock_irq(&chan->lock); nbpf_desc_page_alloc()
704 list_splice_tail(&lhead, &chan->free_links); nbpf_desc_page_alloc()
705 list_splice_tail(&head, &chan->free); nbpf_desc_page_alloc()
706 list_add(&dpage->node, &chan->desc_page); nbpf_desc_page_alloc()
707 spin_unlock_irq(&chan->lock); nbpf_desc_page_alloc()
714 struct nbpf_channel *chan = desc->chan; nbpf_desc_put() local
718 spin_lock_irqsave(&chan->lock, flags); nbpf_desc_put()
720 list_move(&ldesc->node, &chan->free_links); nbpf_desc_put()
722 list_add(&desc->node, &chan->free); nbpf_desc_put()
723 spin_unlock_irqrestore(&chan->lock, flags); nbpf_desc_put()
726 static void nbpf_scan_acked(struct nbpf_channel *chan) nbpf_scan_acked() argument
732 spin_lock_irqsave(&chan->lock, flags); nbpf_scan_acked()
733 list_for_each_entry_safe(desc, tmp, &chan->done, node) nbpf_scan_acked()
738 spin_unlock_irqrestore(&chan->lock, flags); nbpf_scan_acked()
752 static struct nbpf_desc *nbpf_desc_get(struct nbpf_channel *chan, size_t len) nbpf_desc_get() argument
757 nbpf_scan_acked(chan); nbpf_desc_get()
759 spin_lock_irq(&chan->lock); nbpf_desc_get()
764 if (list_empty(&chan->free)) { nbpf_desc_get()
766 spin_unlock_irq(&chan->lock); nbpf_desc_get()
767 ret = nbpf_desc_page_alloc(chan); nbpf_desc_get()
770 spin_lock_irq(&chan->lock); nbpf_desc_get()
773 desc = list_first_entry(&chan->free, struct nbpf_desc, node); nbpf_desc_get()
777 if (list_empty(&chan->free_links)) { nbpf_desc_get()
779 spin_unlock_irq(&chan->lock); nbpf_desc_get()
780 ret = nbpf_desc_page_alloc(chan); nbpf_desc_get()
785 spin_lock_irq(&chan->lock); nbpf_desc_get()
789 ldesc = list_first_entry(&chan->free_links, nbpf_desc_get()
804 spin_unlock_irq(&chan->lock); nbpf_desc_get()
809 static void nbpf_chan_idle(struct nbpf_channel *chan) nbpf_chan_idle() argument
815 spin_lock_irqsave(&chan->lock, flags); nbpf_chan_idle()
817 list_splice_init(&chan->done, &head); nbpf_chan_idle()
818 list_splice_init(&chan->active, &head); nbpf_chan_idle()
819 list_splice_init(&chan->queued, &head); nbpf_chan_idle()
821 chan->running = NULL; nbpf_chan_idle()
823 spin_unlock_irqrestore(&chan->lock, flags); nbpf_chan_idle()
826 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): force-free desc %p cookie %d\n", nbpf_chan_idle()
835 struct nbpf_channel *chan = nbpf_to_chan(dchan); nbpf_pause() local
839 chan->paused = true; nbpf_pause()
840 nbpf_chan_write(chan, NBPF_CHAN_CTRL, NBPF_CHAN_CTRL_SETSUS); nbpf_pause()
842 nbpf_chan_write(chan, NBPF_CHAN_CTRL, NBPF_CHAN_CTRL_CLREN); nbpf_pause()
849 struct nbpf_channel *chan = nbpf_to_chan(dchan); nbpf_terminate_all() local
854 nbpf_chan_halt(chan); nbpf_terminate_all()
855 nbpf_chan_idle(chan); nbpf_terminate_all()
863 struct nbpf_channel *chan = nbpf_to_chan(dchan); nbpf_config() local
868 * We could check config->slave_id to match chan->terminal here, nbpf_config()
873 chan->slave_dst_addr = config->dst_addr; nbpf_config()
874 chan->slave_dst_width = nbpf_xfer_size(chan->nbpf, nbpf_config()
876 chan->slave_dst_burst = nbpf_xfer_size(chan->nbpf, nbpf_config()
879 chan->slave_src_addr = config->src_addr; nbpf_config()
880 chan->slave_src_width = nbpf_xfer_size(chan->nbpf, nbpf_config()
882 chan->slave_src_burst = nbpf_xfer_size(chan->nbpf, nbpf_config()
889 static struct dma_async_tx_descriptor *nbpf_prep_sg(struct nbpf_channel *chan, nbpf_prep_sg() argument
921 desc = nbpf_desc_get(chan, len); nbpf_prep_sg()
962 struct nbpf_channel *chan = nbpf_to_chan(dchan); nbpf_prep_memcpy() local
978 return nbpf_prep_sg(chan, &src_sg, &dst_sg, 1, nbpf_prep_memcpy()
988 struct nbpf_channel *chan = nbpf_to_chan(dchan); nbpf_prep_memcpy_sg() local
993 return nbpf_prep_sg(chan, src_sg, dst_sg, src_nents, nbpf_prep_memcpy_sg()
1001 struct nbpf_channel *chan = nbpf_to_chan(dchan); nbpf_prep_slave_sg() local
1010 sg_dma_address(&slave_sg) = chan->slave_dst_addr; nbpf_prep_slave_sg()
1011 return nbpf_prep_sg(chan, sgl, &slave_sg, sg_len, nbpf_prep_slave_sg()
1015 sg_dma_address(&slave_sg) = chan->slave_src_addr; nbpf_prep_slave_sg()
1016 return nbpf_prep_sg(chan, &slave_sg, sgl, sg_len, nbpf_prep_slave_sg()
1026 struct nbpf_channel *chan = nbpf_to_chan(dchan); nbpf_alloc_chan_resources() local
1029 INIT_LIST_HEAD(&chan->free); nbpf_alloc_chan_resources()
1030 INIT_LIST_HEAD(&chan->free_links); nbpf_alloc_chan_resources()
1031 INIT_LIST_HEAD(&chan->queued); nbpf_alloc_chan_resources()
1032 INIT_LIST_HEAD(&chan->active); nbpf_alloc_chan_resources()
1033 INIT_LIST_HEAD(&chan->done); nbpf_alloc_chan_resources()
1035 ret = nbpf_desc_page_alloc(chan); nbpf_alloc_chan_resources()
1040 chan->terminal); nbpf_alloc_chan_resources()
1042 nbpf_chan_configure(chan); nbpf_alloc_chan_resources()
1049 struct nbpf_channel *chan = nbpf_to_chan(dchan); nbpf_free_chan_resources() local
1054 nbpf_chan_halt(chan); nbpf_free_chan_resources()
1055 nbpf_chan_idle(chan); nbpf_free_chan_resources()
1057 nbpf_chan_prepare_default(chan); nbpf_free_chan_resources()
1059 list_for_each_entry_safe(dpage, tmp, &chan->desc_page, node) { nbpf_free_chan_resources()
1077 struct nbpf_channel *chan; nbpf_of_xlate() local
1089 chan = nbpf_to_chan(dchan); nbpf_of_xlate()
1091 chan->terminal = dma_spec->args[0]; nbpf_of_xlate()
1092 chan->flags = dma_spec->args[1]; nbpf_of_xlate()
1094 nbpf_chan_prepare(chan); nbpf_of_xlate()
1095 nbpf_chan_configure(chan); nbpf_of_xlate()
1102 struct nbpf_channel *chan = (struct nbpf_channel *)data; nbpf_chan_tasklet() local
1107 while (!list_empty(&chan->done)) { nbpf_chan_tasklet()
1110 spin_lock_irq(&chan->lock); nbpf_chan_tasklet()
1112 list_for_each_entry_safe(desc, tmp, &chan->done, node) { nbpf_chan_tasklet()
1123 spin_unlock_irq(&chan->lock); nbpf_chan_tasklet()
1135 spin_unlock_irq(&chan->lock); nbpf_chan_tasklet()
1157 spin_unlock_irq(&chan->lock); nbpf_chan_tasklet()
1169 struct nbpf_channel *chan = dev; nbpf_chan_irq() local
1170 bool done = nbpf_status_get(chan); nbpf_chan_irq()
1178 nbpf_status_ack(chan); nbpf_chan_irq()
1180 dev_dbg(&chan->dma_chan.dev->device, "%s()\n", __func__); nbpf_chan_irq()
1182 spin_lock(&chan->lock); nbpf_chan_irq()
1183 desc = chan->running; nbpf_chan_irq()
1192 list_move_tail(&desc->node, &chan->done); nbpf_chan_irq()
1193 chan->running = NULL; nbpf_chan_irq()
1195 if (!list_empty(&chan->active)) { nbpf_chan_irq()
1196 desc = list_first_entry(&chan->active, nbpf_chan_irq()
1199 chan->running = desc; nbpf_chan_irq()
1203 spin_unlock(&chan->lock); nbpf_chan_irq()
1206 tasklet_schedule(&chan->tasklet); nbpf_chan_irq()
1222 struct nbpf_channel *chan = nbpf_error_get_channel(nbpf, error); nbpf_err_irq() local
1224 nbpf_error_clear(chan); nbpf_err_irq()
1225 nbpf_chan_idle(chan); nbpf_err_irq()
1235 struct nbpf_channel *chan = nbpf->chan + n; nbpf_chan_probe() local
1238 chan->nbpf = nbpf; nbpf_chan_probe()
1239 chan->base = nbpf->base + NBPF_REG_CHAN_OFFSET + NBPF_REG_CHAN_SIZE * n; nbpf_chan_probe()
1240 INIT_LIST_HEAD(&chan->desc_page); nbpf_chan_probe()
1241 spin_lock_init(&chan->lock); nbpf_chan_probe()
1242 chan->dma_chan.device = dma_dev; nbpf_chan_probe()
1243 dma_cookie_init(&chan->dma_chan); nbpf_chan_probe()
1244 nbpf_chan_prepare_default(chan); nbpf_chan_probe()
1246 dev_dbg(dma_dev->dev, "%s(): channel %d: -> %p\n", __func__, n, chan->base); nbpf_chan_probe()
1248 snprintf(chan->name, sizeof(chan->name), "nbpf %d", n); nbpf_chan_probe()
1250 tasklet_init(&chan->tasklet, nbpf_chan_tasklet, (unsigned long)chan); nbpf_chan_probe()
1251 ret = devm_request_irq(dma_dev->dev, chan->irq, nbpf_chan_probe()
1253 chan->name, chan); nbpf_chan_probe()
1258 list_add_tail(&chan->dma_chan.device_node, nbpf_chan_probe()
1302 sizeof(nbpf->chan[0]), GFP_KERNEL); nbpf_probe()
1344 nbpf->chan[i].irq = irqbuf[0]; nbpf_probe()
1351 struct nbpf_channel *chan; nbpf_probe() local
1353 for (i = 0, chan = nbpf->chan; i <= num_channels; nbpf_probe()
1354 i++, chan++) { nbpf_probe()
1358 chan->irq = irqbuf[i]; nbpf_probe()
1361 if (chan != nbpf->chan + num_channels) nbpf_probe()
1371 nbpf->chan[i].irq = irq; nbpf_probe()
H A Ddmaengine.c84 return chan_dev->chan; dev_to_dma_chan()
90 struct dma_chan *chan; memcpy_count_show() local
96 chan = dev_to_dma_chan(dev); memcpy_count_show()
97 if (chan) { memcpy_count_show()
99 count += per_cpu_ptr(chan->local, i)->memcpy_count; memcpy_count_show()
112 struct dma_chan *chan; bytes_transferred_show() local
118 chan = dev_to_dma_chan(dev); bytes_transferred_show()
119 if (chan) { bytes_transferred_show()
121 count += per_cpu_ptr(chan->local, i)->bytes_transferred; bytes_transferred_show()
134 struct dma_chan *chan; in_use_show() local
138 chan = dev_to_dma_chan(dev); in_use_show()
139 if (chan) in_use_show()
140 err = sprintf(buf, "%d\n", chan->client_count); in_use_show()
192 static struct module *dma_chan_to_owner(struct dma_chan *chan) dma_chan_to_owner() argument
194 return chan->device->dev->driver->owner; dma_chan_to_owner()
199 * @chan - channel to balance ->client_count versus dmaengine_ref_count
203 static void balance_ref_count(struct dma_chan *chan) balance_ref_count() argument
205 struct module *owner = dma_chan_to_owner(chan); balance_ref_count()
207 while (chan->client_count < dmaengine_ref_count) { balance_ref_count()
209 chan->client_count++; balance_ref_count()
215 * @chan - channel to grab
219 static int dma_chan_get(struct dma_chan *chan) dma_chan_get() argument
221 struct module *owner = dma_chan_to_owner(chan); dma_chan_get()
225 if (chan->client_count) { dma_chan_get()
234 if (chan->device->device_alloc_chan_resources) { dma_chan_get()
235 ret = chan->device->device_alloc_chan_resources(chan); dma_chan_get()
240 if (!dma_has_cap(DMA_PRIVATE, chan->device->cap_mask)) dma_chan_get()
241 balance_ref_count(chan); dma_chan_get()
244 chan->client_count++; dma_chan_get()
254 * @chan - channel to release
258 static void dma_chan_put(struct dma_chan *chan) dma_chan_put() argument
261 if (!chan->client_count) dma_chan_put()
264 chan->client_count--; dma_chan_put()
265 module_put(dma_chan_to_owner(chan)); dma_chan_put()
268 if (!chan->client_count && chan->device->device_free_chan_resources) dma_chan_put()
269 chan->device->device_free_chan_resources(chan); dma_chan_put()
272 enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie) dma_sync_wait() argument
277 dma_async_issue_pending(chan); dma_sync_wait()
279 status = dma_async_is_tx_complete(chan, cookie, NULL, NULL); dma_sync_wait()
300 * @chan - associated channel for this entry
303 struct dma_chan *chan; member in struct:dma_chan_tbl_ent
350 return this_cpu_read(channel_table[tx_type]->chan); dma_find_channel()
360 struct dma_chan *chan; dma_issue_pending_all() local
366 list_for_each_entry(chan, &device->channels, device_node) dma_issue_pending_all()
367 if (chan->client_count) dma_issue_pending_all()
368 device->device_issue_pending(chan); dma_issue_pending_all()
377 static bool dma_chan_is_local(struct dma_chan *chan, int cpu) dma_chan_is_local() argument
379 int node = dev_to_node(chan->device->dev); dma_chan_is_local()
396 struct dma_chan *chan; min_chan() local
404 list_for_each_entry(chan, &device->channels, device_node) { min_chan()
405 if (!chan->client_count) min_chan()
407 if (!min || chan->table_count < min->table_count) min_chan()
408 min = chan; min_chan()
410 if (dma_chan_is_local(chan, cpu)) min_chan()
412 chan->table_count < localmin->table_count) min_chan()
413 localmin = chan; min_chan()
417 chan = localmin ? localmin : min; min_chan()
419 if (chan) min_chan()
420 chan->table_count++; min_chan()
422 return chan; min_chan()
435 struct dma_chan *chan; dma_channel_rebalance() local
443 per_cpu_ptr(channel_table[cap], cpu)->chan = NULL; dma_channel_rebalance()
448 list_for_each_entry(chan, &device->channels, device_node) dma_channel_rebalance()
449 chan->table_count = 0; dma_channel_rebalance()
459 chan = min_chan(cap, cpu); for_each_dma_cap_mask()
460 per_cpu_ptr(channel_table[cap], cpu)->chan = chan; for_each_dma_cap_mask()
464 int dma_get_slave_caps(struct dma_chan *chan, struct dma_slave_caps *caps) dma_get_slave_caps() argument
468 if (!chan || !caps) dma_get_slave_caps()
471 device = chan->device; dma_get_slave_caps()
505 struct dma_chan *chan; private_candidate() local
515 list_for_each_entry(chan, &dev->channels, device_node) { private_candidate()
517 if (chan->client_count) private_candidate()
521 list_for_each_entry(chan, &dev->channels, device_node) { private_candidate()
522 if (chan->client_count) { private_candidate()
524 __func__, dma_chan_name(chan)); private_candidate()
527 if (fn && !fn(chan, fn_param)) { private_candidate()
529 __func__, dma_chan_name(chan)); private_candidate()
532 return chan; private_candidate()
540 * @chan: target channel
542 struct dma_chan *dma_get_slave_channel(struct dma_chan *chan) dma_get_slave_channel() argument
549 if (chan->client_count == 0) { dma_get_slave_channel()
550 err = dma_chan_get(chan); dma_get_slave_channel()
553 __func__, dma_chan_name(chan), err); dma_get_slave_channel()
555 chan = NULL; dma_get_slave_channel()
560 return chan; dma_get_slave_channel()
567 struct dma_chan *chan; dma_get_any_slave_channel() local
576 chan = private_candidate(&mask, device, NULL, NULL); dma_get_any_slave_channel()
577 if (chan) { dma_get_any_slave_channel()
580 err = dma_chan_get(chan); dma_get_any_slave_channel()
583 __func__, dma_chan_name(chan), err); dma_get_any_slave_channel()
584 chan = NULL; dma_get_any_slave_channel()
592 return chan; dma_get_any_slave_channel()
608 struct dma_chan *chan = NULL; __dma_request_channel() local
614 chan = private_candidate(mask, device, fn, fn_param); __dma_request_channel()
615 if (chan) { __dma_request_channel()
623 err = dma_chan_get(chan); __dma_request_channel()
627 __func__, dma_chan_name(chan)); __dma_request_channel()
631 __func__, dma_chan_name(chan), err); __dma_request_channel()
636 chan = NULL; __dma_request_channel()
643 chan ? "success" : "fail", __dma_request_channel()
644 chan ? dma_chan_name(chan) : NULL); __dma_request_channel()
646 return chan; __dma_request_channel()
689 void dma_release_channel(struct dma_chan *chan) dma_release_channel() argument
692 WARN_ONCE(chan->client_count != 1, dma_release_channel()
693 "chan reference count %d != 1\n", chan->client_count); dma_release_channel()
694 dma_chan_put(chan); dma_release_channel()
696 if (--chan->device->privatecnt == 0) dma_release_channel()
697 dma_cap_clear(DMA_PRIVATE, chan->device->cap_mask); dma_release_channel()
708 struct dma_chan *chan; dmaengine_get() local
718 list_for_each_entry(chan, &device->channels, device_node) { dmaengine_get()
719 err = dma_chan_get(chan); dmaengine_get()
726 __func__, dma_chan_name(chan), err); dmaengine_get()
746 struct dma_chan *chan; dmaengine_put() local
755 list_for_each_entry(chan, &device->channels, device_node) dmaengine_put()
756 dma_chan_put(chan); dmaengine_put()
822 struct dma_chan* chan; dma_async_device_register() local
870 list_for_each_entry(chan, &device->channels, device_node) { dma_async_device_register()
872 chan->local = alloc_percpu(typeof(*chan->local)); dma_async_device_register()
873 if (chan->local == NULL) dma_async_device_register()
875 chan->dev = kzalloc(sizeof(*chan->dev), GFP_KERNEL); dma_async_device_register()
876 if (chan->dev == NULL) { dma_async_device_register()
877 free_percpu(chan->local); dma_async_device_register()
878 chan->local = NULL; dma_async_device_register()
882 chan->chan_id = chancnt++; dma_async_device_register()
883 chan->dev->device.class = &dma_devclass; dma_async_device_register()
884 chan->dev->device.parent = device->dev; dma_async_device_register()
885 chan->dev->chan = chan; dma_async_device_register()
886 chan->dev->idr_ref = idr_ref; dma_async_device_register()
887 chan->dev->dev_id = device->dev_id; dma_async_device_register()
889 dev_set_name(&chan->dev->device, "dma%dchan%d", dma_async_device_register()
890 device->dev_id, chan->chan_id); dma_async_device_register()
892 rc = device_register(&chan->dev->device); dma_async_device_register()
894 free_percpu(chan->local); dma_async_device_register()
895 chan->local = NULL; dma_async_device_register()
896 kfree(chan->dev); dma_async_device_register()
900 chan->client_count = 0; dma_async_device_register()
907 list_for_each_entry(chan, &device->channels, device_node) { dma_async_device_register()
911 if (dma_chan_get(chan) == -ENODEV) { dma_async_device_register()
939 list_for_each_entry(chan, &device->channels, device_node) { dma_async_device_register()
940 if (chan->local == NULL) dma_async_device_register()
943 chan->dev->chan = NULL; dma_async_device_register()
945 device_unregister(&chan->dev->device); dma_async_device_register()
946 free_percpu(chan->local); dma_async_device_register()
961 struct dma_chan *chan; dma_async_device_unregister() local
968 list_for_each_entry(chan, &device->channels, device_node) { dma_async_device_unregister()
969 WARN_ONCE(chan->client_count, dma_async_device_unregister()
971 __func__, chan->client_count); dma_async_device_unregister()
973 chan->dev->chan = NULL; dma_async_device_unregister()
975 device_unregister(&chan->dev->device); dma_async_device_unregister()
976 free_percpu(chan->local); dma_async_device_unregister()
1111 struct dma_chan *chan) dma_async_tx_descriptor_init()
1113 tx->chan = chan; dma_async_tx_descriptor_init()
1139 return dma_sync_wait(tx->chan, tx->cookie); dma_wait_for_async_tx()
1151 struct dma_chan *chan; dma_run_dependencies() local
1158 chan = dep->chan; dma_run_dependencies()
1168 if (dep_next && dep_next->chan == chan) dma_run_dependencies()
1177 chan->device->device_issue_pending(chan); dma_run_dependencies()
1110 dma_async_tx_descriptor_init(struct dma_async_tx_descriptor *tx, struct dma_chan *chan) dma_async_tx_descriptor_init() argument
H A Dmmp_pdma.c96 struct dma_chan chan; member in struct:mmp_pdma_chan
140 container_of(dchan, struct mmp_pdma_chan, chan)
242 struct mmp_pdma_device *pdev = to_mmp_pdma_dev(pchan->chan.device); lookup_phy()
275 struct mmp_pdma_device *pdev = to_mmp_pdma_dev(pchan->chan.device); mmp_pdma_free_phy()
296 static void start_pending_queue(struct mmp_pdma_chan *chan) start_pending_queue() argument
301 if (!chan->idle) { start_pending_queue()
302 dev_dbg(chan->dev, "DMA controller still busy\n"); start_pending_queue()
306 if (list_empty(&chan->chain_pending)) { start_pending_queue()
308 mmp_pdma_free_phy(chan); start_pending_queue()
309 dev_dbg(chan->dev, "no pending list\n"); start_pending_queue()
313 if (!chan->phy) { start_pending_queue()
314 chan->phy = lookup_phy(chan); start_pending_queue()
315 if (!chan->phy) { start_pending_queue()
316 dev_dbg(chan->dev, "no free dma channel\n"); start_pending_queue()
325 desc = list_first_entry(&chan->chain_pending, start_pending_queue()
327 list_splice_tail_init(&chan->chain_pending, &chan->chain_running); start_pending_queue()
333 set_desc(chan->phy, desc->async_tx.phys); start_pending_queue()
334 enable_chan(chan->phy); start_pending_queue()
335 chan->idle = false; start_pending_queue()
342 struct mmp_pdma_chan *chan = to_mmp_pdma_chan(tx->chan); mmp_pdma_tx_submit() local
348 spin_lock_irqsave(&chan->desc_lock, flags); mmp_pdma_tx_submit()
355 list_splice_tail_init(&desc->tx_list, &chan->chain_pending); mmp_pdma_tx_submit()
357 spin_unlock_irqrestore(&chan->desc_lock, flags); mmp_pdma_tx_submit()
363 mmp_pdma_alloc_descriptor(struct mmp_pdma_chan *chan) mmp_pdma_alloc_descriptor() argument
368 desc = dma_pool_alloc(chan->desc_pool, GFP_ATOMIC, &pdesc); mmp_pdma_alloc_descriptor()
370 dev_err(chan->dev, "out of memory for link descriptor\n"); mmp_pdma_alloc_descriptor()
376 dma_async_tx_descriptor_init(&desc->async_tx, &chan->chan); mmp_pdma_alloc_descriptor()
394 struct mmp_pdma_chan *chan = to_mmp_pdma_chan(dchan); mmp_pdma_alloc_chan_resources() local
396 if (chan->desc_pool) mmp_pdma_alloc_chan_resources()
399 chan->desc_pool = dma_pool_create(dev_name(&dchan->dev->device), mmp_pdma_alloc_chan_resources()
400 chan->dev, mmp_pdma_alloc_chan_resources()
404 if (!chan->desc_pool) { mmp_pdma_alloc_chan_resources()
405 dev_err(chan->dev, "unable to allocate descriptor pool\n"); mmp_pdma_alloc_chan_resources()
409 mmp_pdma_free_phy(chan); mmp_pdma_alloc_chan_resources()
410 chan->idle = true; mmp_pdma_alloc_chan_resources()
411 chan->dev_addr = 0; mmp_pdma_alloc_chan_resources()
415 static void mmp_pdma_free_desc_list(struct mmp_pdma_chan *chan, mmp_pdma_free_desc_list() argument
422 dma_pool_free(chan->desc_pool, desc, desc->async_tx.phys); list_for_each_entry_safe()
428 struct mmp_pdma_chan *chan = to_mmp_pdma_chan(dchan); mmp_pdma_free_chan_resources() local
431 spin_lock_irqsave(&chan->desc_lock, flags); mmp_pdma_free_chan_resources()
432 mmp_pdma_free_desc_list(chan, &chan->chain_pending); mmp_pdma_free_chan_resources()
433 mmp_pdma_free_desc_list(chan, &chan->chain_running); mmp_pdma_free_chan_resources()
434 spin_unlock_irqrestore(&chan->desc_lock, flags); mmp_pdma_free_chan_resources()
436 dma_pool_destroy(chan->desc_pool); mmp_pdma_free_chan_resources()
437 chan->desc_pool = NULL; mmp_pdma_free_chan_resources()
438 chan->idle = true; mmp_pdma_free_chan_resources()
439 chan->dev_addr = 0; mmp_pdma_free_chan_resources()
440 mmp_pdma_free_phy(chan); mmp_pdma_free_chan_resources()
449 struct mmp_pdma_chan *chan; mmp_pdma_prep_memcpy() local
459 chan = to_mmp_pdma_chan(dchan); mmp_pdma_prep_memcpy()
460 chan->byte_align = false; mmp_pdma_prep_memcpy()
462 if (!chan->dir) { mmp_pdma_prep_memcpy()
463 chan->dir = DMA_MEM_TO_MEM; mmp_pdma_prep_memcpy()
464 chan->dcmd = DCMD_INCTRGADDR | DCMD_INCSRCADDR; mmp_pdma_prep_memcpy()
465 chan->dcmd |= DCMD_BURST32; mmp_pdma_prep_memcpy()
470 new = mmp_pdma_alloc_descriptor(chan); mmp_pdma_prep_memcpy()
472 dev_err(chan->dev, "no memory for desc\n"); mmp_pdma_prep_memcpy()
478 chan->byte_align = true; mmp_pdma_prep_memcpy()
480 new->desc.dcmd = chan->dcmd | (DCMD_LENGTH & copy); mmp_pdma_prep_memcpy()
495 if (chan->dir == DMA_MEM_TO_DEV) { mmp_pdma_prep_memcpy()
497 } else if (chan->dir == DMA_DEV_TO_MEM) { mmp_pdma_prep_memcpy()
499 } else if (chan->dir == DMA_MEM_TO_MEM) { mmp_pdma_prep_memcpy()
515 chan->cyclic_first = NULL; mmp_pdma_prep_memcpy()
521 mmp_pdma_free_desc_list(chan, &first->tx_list); mmp_pdma_prep_memcpy()
530 struct mmp_pdma_chan *chan = to_mmp_pdma_chan(dchan); mmp_pdma_prep_slave_sg() local
540 chan->byte_align = false; mmp_pdma_prep_slave_sg()
549 chan->byte_align = true; for_each_sg()
552 new = mmp_pdma_alloc_descriptor(chan); for_each_sg()
554 dev_err(chan->dev, "no memory for desc\n"); for_each_sg()
558 new->desc.dcmd = chan->dcmd | (DCMD_LENGTH & len); for_each_sg()
561 new->desc.dtadr = chan->dev_addr; for_each_sg()
563 new->desc.dsadr = chan->dev_addr; for_each_sg()
592 chan->dir = dir;
593 chan->cyclic_first = NULL;
599 mmp_pdma_free_desc_list(chan, &first->tx_list);
609 struct mmp_pdma_chan *chan; mmp_pdma_prep_dma_cyclic() local
623 chan = to_mmp_pdma_chan(dchan); mmp_pdma_prep_dma_cyclic()
628 dma_dst = chan->dev_addr; mmp_pdma_prep_dma_cyclic()
632 dma_src = chan->dev_addr; mmp_pdma_prep_dma_cyclic()
635 dev_err(chan->dev, "Unsupported direction for cyclic DMA\n"); mmp_pdma_prep_dma_cyclic()
639 chan->dir = direction; mmp_pdma_prep_dma_cyclic()
643 new = mmp_pdma_alloc_descriptor(chan); mmp_pdma_prep_dma_cyclic()
645 dev_err(chan->dev, "no memory for desc\n"); mmp_pdma_prep_dma_cyclic()
649 new->desc.dcmd = (chan->dcmd | DCMD_ENDIRQEN | mmp_pdma_prep_dma_cyclic()
665 if (chan->dir == DMA_MEM_TO_DEV) mmp_pdma_prep_dma_cyclic()
679 chan->cyclic_first = first; mmp_pdma_prep_dma_cyclic()
685 mmp_pdma_free_desc_list(chan, &first->tx_list); mmp_pdma_prep_dma_cyclic()
692 struct mmp_pdma_chan *chan = to_mmp_pdma_chan(dchan); mmp_pdma_config() local
700 chan->dcmd = DCMD_INCTRGADDR | DCMD_FLOWSRC; mmp_pdma_config()
705 chan->dcmd = DCMD_INCSRCADDR | DCMD_FLOWTRG; mmp_pdma_config()
712 chan->dcmd |= DCMD_WIDTH1; mmp_pdma_config()
714 chan->dcmd |= DCMD_WIDTH2; mmp_pdma_config()
716 chan->dcmd |= DCMD_WIDTH4; mmp_pdma_config()
719 chan->dcmd |= DCMD_BURST8; mmp_pdma_config()
721 chan->dcmd |= DCMD_BURST16; mmp_pdma_config()
723 chan->dcmd |= DCMD_BURST32; mmp_pdma_config()
725 chan->dir = cfg->direction; mmp_pdma_config()
726 chan->dev_addr = addr; mmp_pdma_config()
732 chan->drcmr = cfg->slave_id; mmp_pdma_config()
739 struct mmp_pdma_chan *chan = to_mmp_pdma_chan(dchan); mmp_pdma_terminate_all() local
745 disable_chan(chan->phy); mmp_pdma_terminate_all()
746 mmp_pdma_free_phy(chan); mmp_pdma_terminate_all()
747 spin_lock_irqsave(&chan->desc_lock, flags); mmp_pdma_terminate_all()
748 mmp_pdma_free_desc_list(chan, &chan->chain_pending); mmp_pdma_terminate_all()
749 mmp_pdma_free_desc_list(chan, &chan->chain_running); mmp_pdma_terminate_all()
750 spin_unlock_irqrestore(&chan->desc_lock, flags); mmp_pdma_terminate_all()
751 chan->idle = true; mmp_pdma_terminate_all()
756 static unsigned int mmp_pdma_residue(struct mmp_pdma_chan *chan, mmp_pdma_residue() argument
762 bool cyclic = chan->cyclic_first != NULL; mmp_pdma_residue()
768 if (!chan->phy) mmp_pdma_residue()
771 if (chan->dir == DMA_DEV_TO_MEM) mmp_pdma_residue()
772 curr = readl(chan->phy->base + DTADR(chan->phy->idx)); mmp_pdma_residue()
774 curr = readl(chan->phy->base + DSADR(chan->phy->idx)); mmp_pdma_residue()
776 list_for_each_entry(sw, &chan->chain_running, node) { mmp_pdma_residue()
779 if (chan->dir == DMA_DEV_TO_MEM) mmp_pdma_residue()
834 struct mmp_pdma_chan *chan = to_mmp_pdma_chan(dchan); mmp_pdma_tx_status() local
839 dma_set_residue(txstate, mmp_pdma_residue(chan, cookie)); mmp_pdma_tx_status()
850 struct mmp_pdma_chan *chan = to_mmp_pdma_chan(dchan); mmp_pdma_issue_pending() local
853 spin_lock_irqsave(&chan->desc_lock, flags); mmp_pdma_issue_pending()
854 start_pending_queue(chan); mmp_pdma_issue_pending()
855 spin_unlock_irqrestore(&chan->desc_lock, flags); mmp_pdma_issue_pending()
865 struct mmp_pdma_chan *chan = (struct mmp_pdma_chan *)data; dma_do_tasklet() local
870 if (chan->cyclic_first) { dma_do_tasklet()
874 spin_lock_irqsave(&chan->desc_lock, flags); dma_do_tasklet()
875 desc = chan->cyclic_first; dma_do_tasklet()
878 spin_unlock_irqrestore(&chan->desc_lock, flags); dma_do_tasklet()
887 spin_lock_irqsave(&chan->desc_lock, flags); dma_do_tasklet()
889 list_for_each_entry_safe(desc, _desc, &chan->chain_running, node) { dma_do_tasklet()
904 dev_dbg(chan->dev, "completed_cookie=%d\n", cookie); dma_do_tasklet()
913 chan->idle = list_empty(&chan->chain_running); dma_do_tasklet()
916 start_pending_queue(chan); dma_do_tasklet()
917 spin_unlock_irqrestore(&chan->desc_lock, flags); dma_do_tasklet()
929 dma_pool_free(chan->desc_pool, desc, txd->phys); dma_do_tasklet()
944 struct mmp_pdma_chan *chan; mmp_pdma_chan_init() local
947 chan = devm_kzalloc(pdev->dev, sizeof(*chan), GFP_KERNEL); mmp_pdma_chan_init()
948 if (chan == NULL) mmp_pdma_chan_init()
963 spin_lock_init(&chan->desc_lock); mmp_pdma_chan_init()
964 chan->dev = pdev->dev; mmp_pdma_chan_init()
965 chan->chan.device = &pdev->device; mmp_pdma_chan_init()
966 tasklet_init(&chan->tasklet, dma_do_tasklet, (unsigned long)chan); mmp_pdma_chan_init()
967 INIT_LIST_HEAD(&chan->chain_pending); mmp_pdma_chan_init()
968 INIT_LIST_HEAD(&chan->chain_running); mmp_pdma_chan_init()
971 list_add_tail(&chan->chan.device_node, &pdev->device.channels); mmp_pdma_chan_init()
986 struct dma_chan *chan; mmp_pdma_dma_xlate() local
988 chan = dma_get_any_slave_channel(&d->device); mmp_pdma_dma_xlate()
989 if (!chan) mmp_pdma_dma_xlate()
992 to_mmp_pdma_chan(chan)->drcmr = dma_spec->args[0]; mmp_pdma_dma_xlate()
994 return chan; mmp_pdma_dma_xlate()
1045 /* all chan share one irq, demux inside */ mmp_pdma_probe()
1121 bool mmp_pdma_filter_fn(struct dma_chan *chan, void *param) mmp_pdma_filter_fn() argument
1123 struct mmp_pdma_chan *c = to_mmp_pdma_chan(chan); mmp_pdma_filter_fn()
1125 if (chan->device->dev->driver != &mmp_pdma_driver.driver) mmp_pdma_filter_fn()
H A Dpch_dma.c103 struct dma_chan chan; member in struct:pch_dma_chan
155 static inline struct pch_dma_chan *to_pd_chan(struct dma_chan *chan) to_pd_chan() argument
157 return container_of(chan, struct pch_dma_chan, chan); to_pd_chan()
165 static inline struct device *chan2dev(struct dma_chan *chan) chan2dev() argument
167 return &chan->dev->device; chan2dev()
170 static inline struct device *chan2parent(struct dma_chan *chan) chan2parent() argument
172 return chan->dev->device.parent; chan2parent()
189 static void pdc_enable_irq(struct dma_chan *chan, int enable) pdc_enable_irq() argument
191 struct pch_dma *pd = to_pd(chan->device); pdc_enable_irq()
195 if (chan->chan_id < 8) pdc_enable_irq()
196 pos = chan->chan_id; pdc_enable_irq()
198 pos = chan->chan_id + 8; pdc_enable_irq()
209 dev_dbg(chan2dev(chan), "pdc_enable_irq: chan %d -> %x\n", pdc_enable_irq()
210 chan->chan_id, val); pdc_enable_irq()
213 static void pdc_set_dir(struct dma_chan *chan) pdc_set_dir() argument
215 struct pch_dma_chan *pd_chan = to_pd_chan(chan); pdc_set_dir()
216 struct pch_dma *pd = to_pd(chan->device); pdc_set_dir()
221 if (chan->chan_id < 8) { pdc_set_dir()
225 (DMA_CTL0_BITS_PER_CH * chan->chan_id); pdc_set_dir()
227 (DMA_CTL0_BITS_PER_CH * chan->chan_id)); pdc_set_dir()
230 val |= 0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id + pdc_set_dir()
233 val &= ~(0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id + pdc_set_dir()
239 int ch = chan->chan_id - 8; /* ch8-->0 ch9-->1 ... ch11->3 */ pdc_set_dir()
257 dev_dbg(chan2dev(chan), "pdc_set_dir: chan %d -> %x\n", pdc_set_dir()
258 chan->chan_id, val); pdc_set_dir()
261 static void pdc_set_mode(struct dma_chan *chan, u32 mode) pdc_set_mode() argument
263 struct pch_dma *pd = to_pd(chan->device); pdc_set_mode()
268 if (chan->chan_id < 8) { pdc_set_mode()
270 (DMA_CTL0_BITS_PER_CH * chan->chan_id)); pdc_set_mode()
271 mask_dir = 1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id +\ pdc_set_mode()
275 val |= mode << (DMA_CTL0_BITS_PER_CH * chan->chan_id); pdc_set_mode()
279 int ch = chan->chan_id - 8; /* ch8-->0 ch9-->1 ... ch11->3 */ pdc_set_mode()
291 dev_dbg(chan2dev(chan), "pdc_set_mode: chan %d -> %x\n", pdc_set_mode()
292 chan->chan_id, val); pdc_set_mode()
297 struct pch_dma *pd = to_pd(pd_chan->chan.device); pdc_get_status0()
302 DMA_STATUS_BITS_PER_CH * pd_chan->chan.chan_id)); pdc_get_status0()
307 struct pch_dma *pd = to_pd(pd_chan->chan.device); pdc_get_status2()
312 DMA_STATUS_BITS_PER_CH * (pd_chan->chan.chan_id - 8))); pdc_get_status2()
319 if (pd_chan->chan.chan_id < 8) pdc_is_idle()
334 dev_err(chan2dev(&pd_chan->chan), pdc_dostart()
339 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> dev_addr: %x\n", pdc_dostart()
340 pd_chan->chan.chan_id, desc->regs.dev_addr); pdc_dostart()
341 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> mem_addr: %x\n", pdc_dostart()
342 pd_chan->chan.chan_id, desc->regs.mem_addr); pdc_dostart()
343 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> size: %x\n", pdc_dostart()
344 pd_chan->chan.chan_id, desc->regs.size); pdc_dostart()
345 dev_dbg(chan2dev(&pd_chan->chan), "chan %d -> next: %x\n", pdc_dostart()
346 pd_chan->chan.chan_id, desc->regs.next); pdc_dostart()
353 pdc_set_mode(&pd_chan->chan, DMA_CTL0_ONESHOT); pdc_dostart()
356 pdc_set_mode(&pd_chan->chan, DMA_CTL0_SG); pdc_dostart()
403 dev_crit(chan2dev(&pd_chan->chan), "Bad descriptor submitted\n"); pdc_handle_error()
404 dev_crit(chan2dev(&pd_chan->chan), "descriptor cookie: %d\n", pdc_handle_error()
424 struct pch_dma_chan *pd_chan = to_pd_chan(txd->chan); pd_tx_submit()
441 static struct pch_dma_desc *pdc_alloc_desc(struct dma_chan *chan, gfp_t flags) pdc_alloc_desc() argument
444 struct pch_dma *pd = to_pd(chan->device); pdc_alloc_desc()
451 dma_async_tx_descriptor_init(&desc->txd, chan); pdc_alloc_desc()
474 dev_dbg(chan2dev(&pd_chan->chan), "desc %p not ACKed\n", desc); pdc_desc_get()
477 dev_dbg(chan2dev(&pd_chan->chan), "scanned %d descriptors\n", i); pdc_desc_get()
480 ret = pdc_alloc_desc(&pd_chan->chan, GFP_ATOMIC); pdc_desc_get()
486 dev_err(chan2dev(&pd_chan->chan), pdc_desc_get()
505 static int pd_alloc_chan_resources(struct dma_chan *chan) pd_alloc_chan_resources() argument
507 struct pch_dma_chan *pd_chan = to_pd_chan(chan); pd_alloc_chan_resources()
513 dev_dbg(chan2dev(chan), "DMA channel not idle ?\n"); pd_alloc_chan_resources()
521 desc = pdc_alloc_desc(chan, GFP_KERNEL); pd_alloc_chan_resources()
524 dev_warn(chan2dev(chan), pd_alloc_chan_resources()
535 dma_cookie_init(chan); pd_alloc_chan_resources()
538 pdc_enable_irq(chan, 1); pd_alloc_chan_resources()
543 static void pd_free_chan_resources(struct dma_chan *chan) pd_free_chan_resources() argument
545 struct pch_dma_chan *pd_chan = to_pd_chan(chan); pd_free_chan_resources()
546 struct pch_dma *pd = to_pd(chan->device); pd_free_chan_resources()
562 pdc_enable_irq(chan, 0); pd_free_chan_resources()
565 static enum dma_status pd_tx_status(struct dma_chan *chan, dma_cookie_t cookie, pd_tx_status() argument
568 return dma_cookie_status(chan, cookie, txstate); pd_tx_status()
571 static void pd_issue_pending(struct dma_chan *chan) pd_issue_pending() argument
573 struct pch_dma_chan *pd_chan = to_pd_chan(chan); pd_issue_pending()
582 static struct dma_async_tx_descriptor *pd_prep_slave_sg(struct dma_chan *chan, pd_prep_slave_sg() argument
587 struct pch_dma_chan *pd_chan = to_pd_chan(chan); pd_prep_slave_sg()
588 struct pch_dma_slave *pd_slave = chan->private; pd_prep_slave_sg()
597 dev_info(chan2dev(chan), "prep_slave_sg: length is zero!\n"); pd_prep_slave_sg()
609 pdc_set_dir(chan); pd_prep_slave_sg()
663 dev_err(chan2dev(chan), "failed to get desc or wrong parameters\n");
668 static int pd_device_terminate_all(struct dma_chan *chan) pd_device_terminate_all() argument
670 struct pch_dma_chan *pd_chan = to_pd_chan(chan); pd_device_terminate_all()
676 pdc_set_mode(&pd_chan->chan, DMA_CTL0_DISABLE); pd_device_terminate_all()
695 dev_err(chan2dev(&pd_chan->chan), pdc_tasklet()
758 struct dma_chan *chan, *_c; pch_dma_save_regs() local
766 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { pch_dma_save_regs()
767 pd_chan = to_pd_chan(chan); pch_dma_save_regs()
781 struct dma_chan *chan, *_c; pch_dma_restore_regs() local
789 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { pch_dma_restore_regs()
790 pd_chan = to_pd_chan(chan); pch_dma_restore_regs()
906 pd_chan->chan.device = &pd->dma; pch_dma_probe()
907 dma_cookie_init(&pd_chan->chan); pch_dma_probe()
919 list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels); pch_dma_probe()
960 struct dma_chan *chan, *_c; pch_dma_remove() local
967 list_for_each_entry_safe(chan, _c, &pd->dma.channels, pch_dma_remove()
969 pd_chan = to_pd_chan(chan); pch_dma_remove()
H A Ddmaengine.h13 * @chan: dma channel to initialize
15 static inline void dma_cookie_init(struct dma_chan *chan) dma_cookie_init() argument
17 chan->cookie = DMA_MIN_COOKIE; dma_cookie_init()
18 chan->completed_cookie = DMA_MIN_COOKIE; dma_cookie_init()
30 struct dma_chan *chan = tx->chan; dma_cookie_assign() local
33 cookie = chan->cookie + 1; dma_cookie_assign()
36 tx->cookie = chan->cookie = cookie; dma_cookie_assign()
54 tx->chan->completed_cookie = tx->cookie; dma_cookie_complete()
60 * @chan: dma channel
67 static inline enum dma_status dma_cookie_status(struct dma_chan *chan, dma_cookie_status() argument
72 used = chan->cookie; dma_cookie_status()
73 complete = chan->completed_cookie; dma_cookie_status()
H A Dmv_xor.h39 #define XOR_CURR_DESC(chan) (chan->mmr_high_base + 0x10 + (chan->idx * 4))
40 #define XOR_NEXT_DESC(chan) (chan->mmr_high_base + 0x00 + (chan->idx * 4))
41 #define XOR_BYTE_COUNT(chan) (chan->mmr_high_base + 0x20 + (chan->idx * 4))
42 #define XOR_DEST_POINTER(chan) (chan->mmr_high_base + 0xB0 + (chan->idx * 4))
43 #define XOR_BLOCK_SIZE(chan) (chan->mmr_high_base + 0xC0 + (chan->idx * 4))
44 #define XOR_INIT_VALUE_LOW(chan) (chan->mmr_high_base + 0xE0)
45 #define XOR_INIT_VALUE_HIGH(chan) (chan->mmr_high_base + 0xE4)
47 #define XOR_CONFIG(chan) (chan->mmr_base + 0x10 + (chan->idx * 4))
48 #define XOR_ACTIVATION(chan) (chan->mmr_base + 0x20 + (chan->idx * 4))
49 #define XOR_INTR_CAUSE(chan) (chan->mmr_base + 0x30)
50 #define XOR_INTR_MASK(chan) (chan->mmr_base + 0x40)
51 #define XOR_ERROR_CAUSE(chan) (chan->mmr_base + 0x50)
52 #define XOR_ERROR_ADDR(chan) (chan->mmr_base + 0x60)
75 #define WINDOW_BAR_ENABLE(chan) (0x40 + ((chan) << 2))
76 #define WINDOW_OVERRIDE_CTRL(chan) (0xA0 + ((chan) << 2))
H A Dxgene-dma.c235 #define chan_dbg(chan, fmt, arg...) \
236 dev_dbg(chan->dev, "%s: " fmt, chan->name, ##arg)
237 #define chan_err(chan, fmt, arg...) \
238 dev_err(chan->dev, "%s: " fmt, chan->name, ##arg)
339 * @chan: reference to X-Gene DMA channels
351 struct xgene_dma_chan chan[XGENE_DMA_MAX_CHANNEL]; member in struct:xgene_dma
457 static void xgene_dma_prep_cpy_desc(struct xgene_dma_chan *chan, xgene_dma_prep_cpy_desc() argument
467 xgene_dma_init_desc(desc1, chan->tx_ring.dst_ring_num); xgene_dma_prep_cpy_desc()
507 static void xgene_dma_prep_xor_desc(struct xgene_dma_chan *chan, xgene_dma_prep_xor_desc() argument
521 xgene_dma_init_desc(desc1, chan->tx_ring.dst_ring_num); xgene_dma_prep_xor_desc()
557 struct xgene_dma_chan *chan; xgene_dma_tx_submit() local
563 chan = to_dma_chan(tx->chan); xgene_dma_tx_submit()
566 spin_lock_bh(&chan->lock); xgene_dma_tx_submit()
571 list_splice_tail_init(&desc->tx_list, &chan->ld_pending); xgene_dma_tx_submit()
573 spin_unlock_bh(&chan->lock); xgene_dma_tx_submit()
578 static void xgene_dma_clean_descriptor(struct xgene_dma_chan *chan, xgene_dma_clean_descriptor() argument
582 chan_dbg(chan, "LD %p free\n", desc); xgene_dma_clean_descriptor()
583 dma_pool_free(chan->desc_pool, desc, desc->tx.phys); xgene_dma_clean_descriptor()
587 struct xgene_dma_chan *chan) xgene_dma_alloc_descriptor()
592 desc = dma_pool_alloc(chan->desc_pool, GFP_NOWAIT, &phys); xgene_dma_alloc_descriptor()
594 chan_err(chan, "Failed to allocate LDs\n"); xgene_dma_alloc_descriptor()
603 dma_async_tx_descriptor_init(&desc->tx, &chan->dma_chan); xgene_dma_alloc_descriptor()
605 chan_dbg(chan, "LD %p allocated\n", desc); xgene_dma_alloc_descriptor()
613 * @chan: X-Gene DMA channel
617 static void xgene_dma_clean_completed_descriptor(struct xgene_dma_chan *chan) xgene_dma_clean_completed_descriptor() argument
622 list_for_each_entry_safe(desc, _desc, &chan->ld_completed, node) { xgene_dma_clean_completed_descriptor()
624 xgene_dma_clean_descriptor(chan, desc); xgene_dma_clean_completed_descriptor()
630 * @chan: X-Gene DMA channel
636 static void xgene_dma_run_tx_complete_actions(struct xgene_dma_chan *chan, xgene_dma_run_tx_complete_actions() argument
666 * @chan: X-Gene DMA channel
672 static void xgene_dma_clean_running_descriptor(struct xgene_dma_chan *chan, xgene_dma_clean_running_descriptor() argument
687 list_add_tail(&desc->node, &chan->ld_completed); xgene_dma_clean_running_descriptor()
691 chan_dbg(chan, "LD %p free\n", desc); xgene_dma_clean_running_descriptor()
692 dma_pool_free(chan->desc_pool, desc, desc->tx.phys); xgene_dma_clean_running_descriptor()
739 * @chan : X-Gene DMA channel
741 * LOCKING: must hold chan->desc_lock
743 static void xgene_chan_xfer_ld_pending(struct xgene_dma_chan *chan) xgene_chan_xfer_ld_pending() argument
752 if (list_empty(&chan->ld_pending)) { xgene_chan_xfer_ld_pending()
753 chan_dbg(chan, "No pending LDs\n"); xgene_chan_xfer_ld_pending()
761 list_for_each_entry_safe(desc_sw, _desc_sw, &chan->ld_pending, node) { xgene_chan_xfer_ld_pending()
768 if (chan->pending >= chan->max_outstanding) xgene_chan_xfer_ld_pending()
771 ret = xgene_chan_xfer_request(&chan->tx_ring, desc_sw); xgene_chan_xfer_ld_pending()
779 list_move_tail(&desc_sw->node, &chan->ld_running); xgene_chan_xfer_ld_pending()
782 chan->pending++; xgene_chan_xfer_ld_pending()
789 * @chan: X-Gene DMA channel
795 static void xgene_dma_cleanup_descriptors(struct xgene_dma_chan *chan) xgene_dma_cleanup_descriptors() argument
797 struct xgene_dma_ring *ring = &chan->rx_ring; xgene_dma_cleanup_descriptors()
803 xgene_dma_clean_completed_descriptor(chan); xgene_dma_cleanup_descriptors()
806 list_for_each_entry_safe(desc_sw, _desc_sw, &chan->ld_running, node) { xgene_dma_cleanup_descriptors()
825 chan_err(chan, "%s\n", xgene_dma_desc_err[status]); xgene_dma_cleanup_descriptors()
847 xgene_dma_run_tx_complete_actions(chan, desc_sw); xgene_dma_cleanup_descriptors()
849 xgene_dma_clean_running_descriptor(chan, desc_sw); xgene_dma_cleanup_descriptors()
855 chan->pending--; xgene_dma_cleanup_descriptors()
863 xgene_chan_xfer_ld_pending(chan); xgene_dma_cleanup_descriptors()
868 struct xgene_dma_chan *chan = to_dma_chan(dchan); xgene_dma_alloc_chan_resources() local
871 if (chan->desc_pool) xgene_dma_alloc_chan_resources()
874 chan->desc_pool = dma_pool_create(chan->name, chan->dev, xgene_dma_alloc_chan_resources()
877 if (!chan->desc_pool) { xgene_dma_alloc_chan_resources()
878 chan_err(chan, "Failed to allocate descriptor pool\n"); xgene_dma_alloc_chan_resources()
882 chan_dbg(chan, "Allocate descripto pool\n"); xgene_dma_alloc_chan_resources()
889 * @chan: X-Gene DMA channel
892 * LOCKING: must hold chan->desc_lock
894 static void xgene_dma_free_desc_list(struct xgene_dma_chan *chan, xgene_dma_free_desc_list() argument
900 xgene_dma_clean_descriptor(chan, desc); xgene_dma_free_desc_list()
903 static void xgene_dma_free_tx_desc_list(struct xgene_dma_chan *chan, xgene_dma_free_tx_desc_list() argument
909 xgene_dma_clean_descriptor(chan, desc); xgene_dma_free_tx_desc_list()
914 struct xgene_dma_chan *chan = to_dma_chan(dchan); xgene_dma_free_chan_resources() local
916 chan_dbg(chan, "Free all resources\n"); xgene_dma_free_chan_resources()
918 if (!chan->desc_pool) xgene_dma_free_chan_resources()
921 spin_lock_bh(&chan->lock); xgene_dma_free_chan_resources()
924 xgene_dma_cleanup_descriptors(chan); xgene_dma_free_chan_resources()
927 xgene_dma_free_desc_list(chan, &chan->ld_pending); xgene_dma_free_chan_resources()
928 xgene_dma_free_desc_list(chan, &chan->ld_running); xgene_dma_free_chan_resources()
929 xgene_dma_free_desc_list(chan, &chan->ld_completed); xgene_dma_free_chan_resources()
931 spin_unlock_bh(&chan->lock); xgene_dma_free_chan_resources()
934 dma_pool_destroy(chan->desc_pool); xgene_dma_free_chan_resources()
935 chan->desc_pool = NULL; xgene_dma_free_chan_resources()
943 struct xgene_dma_chan *chan; xgene_dma_prep_memcpy() local
949 chan = to_dma_chan(dchan); xgene_dma_prep_memcpy()
953 new = xgene_dma_alloc_descriptor(chan); xgene_dma_prep_memcpy()
961 xgene_dma_prep_cpy_desc(chan, new, dst, src, copy); xgene_dma_prep_memcpy()
988 xgene_dma_free_tx_desc_list(chan, &first->tx_list); xgene_dma_prep_memcpy()
998 struct xgene_dma_chan *chan; xgene_dma_prep_sg() local
1012 chan = to_dma_chan(dchan); xgene_dma_prep_sg()
1032 new = xgene_dma_alloc_descriptor(chan); xgene_dma_prep_sg()
1037 xgene_dma_prep_cpy_desc(chan, new, dst, src, len); xgene_dma_prep_sg()
1096 xgene_dma_free_tx_desc_list(chan, &first->tx_list); xgene_dma_prep_sg()
1105 struct xgene_dma_chan *chan; xgene_dma_prep_xor() local
1112 chan = to_dma_chan(dchan); xgene_dma_prep_xor()
1116 new = xgene_dma_alloc_descriptor(chan); xgene_dma_prep_xor()
1121 xgene_dma_prep_xor_desc(chan, new, &dst, src, xgene_dma_prep_xor()
1144 xgene_dma_free_tx_desc_list(chan, &first->tx_list); xgene_dma_prep_xor()
1153 struct xgene_dma_chan *chan; xgene_dma_prep_pq() local
1161 chan = to_dma_chan(dchan); xgene_dma_prep_pq()
1178 new = xgene_dma_alloc_descriptor(chan); xgene_dma_prep_pq()
1196 xgene_dma_prep_xor_desc(chan, new, &dst[0], src, xgene_dma_prep_pq()
1206 xgene_dma_prep_xor_desc(chan, new, &dst[1], _src, xgene_dma_prep_pq()
1221 xgene_dma_free_tx_desc_list(chan, &first->tx_list); xgene_dma_prep_pq()
1227 struct xgene_dma_chan *chan = to_dma_chan(dchan); xgene_dma_issue_pending() local
1229 spin_lock_bh(&chan->lock); xgene_dma_issue_pending()
1230 xgene_chan_xfer_ld_pending(chan); xgene_dma_issue_pending()
1231 spin_unlock_bh(&chan->lock); xgene_dma_issue_pending()
1243 struct xgene_dma_chan *chan = (struct xgene_dma_chan *)data; xgene_dma_tasklet_cb() local
1245 spin_lock_bh(&chan->lock); xgene_dma_tasklet_cb()
1248 xgene_dma_cleanup_descriptors(chan); xgene_dma_tasklet_cb()
1251 enable_irq(chan->rx_irq); xgene_dma_tasklet_cb()
1253 spin_unlock_bh(&chan->lock); xgene_dma_tasklet_cb()
1258 struct xgene_dma_chan *chan = (struct xgene_dma_chan *)id; xgene_dma_chan_ring_isr() local
1260 BUG_ON(!chan); xgene_dma_chan_ring_isr()
1266 disable_irq_nosync(chan->rx_irq); xgene_dma_chan_ring_isr()
1273 tasklet_schedule(&chan->tasklet); xgene_dma_chan_ring_isr()
1401 static int xgene_dma_get_ring_size(struct xgene_dma_chan *chan, xgene_dma_get_ring_size() argument
1423 chan_err(chan, "Unsupported cfg ring size %d\n", cfgsize); xgene_dma_get_ring_size()
1443 static void xgene_dma_delete_chan_rings(struct xgene_dma_chan *chan) xgene_dma_delete_chan_rings() argument
1445 xgene_dma_delete_ring_one(&chan->rx_ring); xgene_dma_delete_chan_rings()
1446 xgene_dma_delete_ring_one(&chan->tx_ring); xgene_dma_delete_chan_rings()
1449 static int xgene_dma_create_ring_one(struct xgene_dma_chan *chan, xgene_dma_create_ring_one() argument
1454 ring->pdma = chan->pdma; xgene_dma_create_ring_one()
1456 ring->num = chan->pdma->ring_num++; xgene_dma_create_ring_one()
1459 ring->size = xgene_dma_get_ring_size(chan, cfgsize); xgene_dma_create_ring_one()
1464 ring->desc_vaddr = dma_zalloc_coherent(chan->dev, ring->size, xgene_dma_create_ring_one()
1467 chan_err(chan, "Failed to allocate ring desc\n"); xgene_dma_create_ring_one()
1478 static int xgene_dma_create_chan_rings(struct xgene_dma_chan *chan) xgene_dma_create_chan_rings() argument
1480 struct xgene_dma_ring *rx_ring = &chan->rx_ring; xgene_dma_create_chan_rings()
1481 struct xgene_dma_ring *tx_ring = &chan->tx_ring; xgene_dma_create_chan_rings()
1486 rx_ring->buf_num = XGENE_DMA_CPU_BUFNUM + chan->id; xgene_dma_create_chan_rings()
1488 ret = xgene_dma_create_ring_one(chan, rx_ring, xgene_dma_create_chan_rings()
1493 chan_dbg(chan, "Rx ring id 0x%X num %d desc 0x%p\n", xgene_dma_create_chan_rings()
1498 tx_ring->buf_num = XGENE_DMA_BUFNUM + chan->id; xgene_dma_create_chan_rings()
1500 ret = xgene_dma_create_ring_one(chan, tx_ring, xgene_dma_create_chan_rings()
1509 chan_dbg(chan, xgene_dma_create_chan_rings()
1514 chan->max_outstanding = rx_ring->slots; xgene_dma_create_chan_rings()
1524 ret = xgene_dma_create_chan_rings(&pdma->chan[i]); xgene_dma_init_rings()
1527 xgene_dma_delete_chan_rings(&pdma->chan[j]); xgene_dma_init_rings()
1695 struct xgene_dma_chan *chan; xgene_dma_request_irqs() local
1709 chan = &pdma->chan[i]; xgene_dma_request_irqs()
1710 ret = devm_request_irq(chan->dev, chan->rx_irq, xgene_dma_request_irqs()
1712 0, chan->name, chan); xgene_dma_request_irqs()
1714 chan_err(chan, "Failed to register Rx IRQ %d\n", xgene_dma_request_irqs()
1715 chan->rx_irq); xgene_dma_request_irqs()
1719 chan = &pdma->chan[i]; xgene_dma_request_irqs()
1720 devm_free_irq(chan->dev, chan->rx_irq, chan); xgene_dma_request_irqs()
1732 struct xgene_dma_chan *chan; xgene_dma_free_irqs() local
1739 chan = &pdma->chan[i]; xgene_dma_free_irqs()
1740 devm_free_irq(chan->dev, chan->rx_irq, chan); xgene_dma_free_irqs()
1744 static void xgene_dma_set_caps(struct xgene_dma_chan *chan, xgene_dma_set_caps() argument
1763 if ((chan->id == XGENE_DMA_PQ_CHANNEL) && xgene_dma_set_caps()
1764 is_pq_enabled(chan->pdma)) { xgene_dma_set_caps()
1767 } else if ((chan->id == XGENE_DMA_XOR_CHANNEL) && xgene_dma_set_caps()
1768 !is_pq_enabled(chan->pdma)) { xgene_dma_set_caps()
1773 dma_dev->dev = chan->dev; xgene_dma_set_caps()
1796 struct xgene_dma_chan *chan = &pdma->chan[id]; xgene_dma_async_register() local
1800 chan->dma_chan.device = dma_dev; xgene_dma_async_register()
1802 spin_lock_init(&chan->lock); xgene_dma_async_register()
1803 INIT_LIST_HEAD(&chan->ld_pending); xgene_dma_async_register()
1804 INIT_LIST_HEAD(&chan->ld_running); xgene_dma_async_register()
1805 INIT_LIST_HEAD(&chan->ld_completed); xgene_dma_async_register()
1806 tasklet_init(&chan->tasklet, xgene_dma_tasklet_cb, xgene_dma_async_register()
1807 (unsigned long)chan); xgene_dma_async_register()
1809 chan->pending = 0; xgene_dma_async_register()
1810 chan->desc_pool = NULL; xgene_dma_async_register()
1811 dma_cookie_init(&chan->dma_chan); xgene_dma_async_register()
1814 xgene_dma_set_caps(chan, dma_dev); xgene_dma_async_register()
1818 list_add_tail(&chan->dma_chan.device_node, &dma_dev->channels); xgene_dma_async_register()
1823 chan_err(chan, "Failed to register async device %d", ret); xgene_dma_async_register()
1824 tasklet_kill(&chan->tasklet); xgene_dma_async_register()
1831 "%s: CAPABILITY ( %s%s%s%s)\n", dma_chan_name(&chan->dma_chan), xgene_dma_async_register()
1849 tasklet_kill(&pdma->chan[j].tasklet); xgene_dma_init_async()
1869 struct xgene_dma_chan *chan; xgene_dma_init_channels() local
1875 chan = &pdma->chan[i]; xgene_dma_init_channels()
1876 chan->dev = pdma->dev; xgene_dma_init_channels()
1877 chan->pdma = pdma; xgene_dma_init_channels()
1878 chan->id = i; xgene_dma_init_channels()
1879 snprintf(chan->name, sizeof(chan->name), "dmachan%d", chan->id); xgene_dma_init_channels()
1962 pdma->chan[i - 1].rx_irq = irq; xgene_dma_get_resources()
2035 xgene_dma_delete_chan_rings(&pdma->chan[i]); xgene_dma_probe()
2047 struct xgene_dma_chan *chan; xgene_dma_remove() local
2058 chan = &pdma->chan[i]; xgene_dma_remove()
2059 tasklet_kill(&chan->tasklet); xgene_dma_remove()
2060 xgene_dma_delete_chan_rings(chan); xgene_dma_remove()
586 xgene_dma_alloc_descriptor( struct xgene_dma_chan *chan) xgene_dma_alloc_descriptor() argument
H A Dtimb_dma.c83 struct dma_chan chan; member in struct:timb_dma_chan
106 static struct device *chan2dev(struct dma_chan *chan) chan2dev() argument
108 return &chan->dev->device; chan2dev()
110 static struct device *chan2dmadev(struct dma_chan *chan) chan2dmadev() argument
112 return chan2dev(chan)->parent->parent; chan2dmadev()
117 int id = td_chan->chan.chan_id; tdchantotd()
125 int id = td_chan->chan.chan_id; __td_enable_chan_irq()
132 dev_dbg(chan2dev(&td_chan->chan), "Enabling irq: %d, IER: 0x%x\n", id, __td_enable_chan_irq()
140 int id = td_chan->chan.chan_id; __td_dma_done_ack()
146 dev_dbg(chan2dev(&td_chan->chan), "Checking irq: %d, td: %p\n", id, td); __td_dma_done_ack()
161 dev_err(chan2dev(&td_chan->chan), "Too big sg element\n"); td_fill_desc()
167 dev_err(chan2dev(&td_chan->chan), "Incorrect length: %d\n", td_fill_desc()
172 dev_dbg(chan2dev(&td_chan->chan), "desc: %p, addr: 0x%llx\n", td_fill_desc()
195 dev_err(chan2dev(&td_chan->chan), __td_start_dma()
203 dev_dbg(chan2dev(&td_chan->chan), __td_start_dma()
204 "td_chan: %p, chan: %d, membase: %p\n", __td_start_dma()
205 td_chan, td_chan->chan.chan_id, td_chan->membase); __td_start_dma()
246 dev_dbg(chan2dev(&td_chan->chan), "descriptor %u complete\n", __td_finish()
302 dev_dbg(chan2dev(&td_chan->chan), "%s: started %u\n", __td_start_next()
313 struct timb_dma_chan *td_chan = container_of(txd->chan, td_tx_submit()
314 struct timb_dma_chan, chan); td_tx_submit()
321 dev_dbg(chan2dev(txd->chan), "%s: started %u\n", __func__, td_tx_submit()
326 dev_dbg(chan2dev(txd->chan), "tx_submit: queued %u\n", td_tx_submit()
339 struct dma_chan *chan = &td_chan->chan; td_alloc_init_desc() local
345 dev_err(chan2dev(chan), "Failed to alloc descriptor\n"); td_alloc_init_desc()
353 dev_err(chan2dev(chan), "Failed to alloc descriptor\n"); td_alloc_init_desc()
357 dma_async_tx_descriptor_init(&td_desc->txd, chan); td_alloc_init_desc()
361 td_desc->txd.phys = dma_map_single(chan2dmadev(chan), td_alloc_init_desc()
364 err = dma_mapping_error(chan2dmadev(chan), td_desc->txd.phys); td_alloc_init_desc()
366 dev_err(chan2dev(chan), "DMA mapping error: %d\n", err); td_alloc_init_desc()
381 dev_dbg(chan2dev(td_desc->txd.chan), "Freeing desc: %p\n", td_desc); td_free_desc()
382 dma_unmap_single(chan2dmadev(td_desc->txd.chan), td_desc->txd.phys, td_free_desc()
392 dev_dbg(chan2dev(&td_chan->chan), "Putting desc: %p\n", td_desc); td_desc_put()
412 dev_dbg(chan2dev(&td_chan->chan), "desc %p not ACKed\n", td_desc_get()
420 static int td_alloc_chan_resources(struct dma_chan *chan) td_alloc_chan_resources() argument
423 container_of(chan, struct timb_dma_chan, chan); td_alloc_chan_resources()
426 dev_dbg(chan2dev(chan), "%s: entry\n", __func__); td_alloc_chan_resources()
435 dev_err(chan2dev(chan), td_alloc_chan_resources()
445 dma_cookie_init(chan); td_alloc_chan_resources()
451 static void td_free_chan_resources(struct dma_chan *chan) td_free_chan_resources() argument
454 container_of(chan, struct timb_dma_chan, chan); td_free_chan_resources()
458 dev_dbg(chan2dev(chan), "%s: Entry\n", __func__); td_free_chan_resources()
469 dev_dbg(chan2dev(chan), "%s: Freeing desc: %p\n", __func__, td_free_chan_resources()
475 static enum dma_status td_tx_status(struct dma_chan *chan, dma_cookie_t cookie, td_tx_status() argument
480 dev_dbg(chan2dev(chan), "%s: Entry\n", __func__); td_tx_status()
482 ret = dma_cookie_status(chan, cookie, txstate); td_tx_status()
484 dev_dbg(chan2dev(chan), "%s: exit, ret: %d\n", __func__, ret); td_tx_status()
489 static void td_issue_pending(struct dma_chan *chan) td_issue_pending() argument
492 container_of(chan, struct timb_dma_chan, chan); td_issue_pending()
494 dev_dbg(chan2dev(chan), "%s: Entry\n", __func__); td_issue_pending()
508 static struct dma_async_tx_descriptor *td_prep_slave_sg(struct dma_chan *chan, td_prep_slave_sg() argument
514 container_of(chan, struct timb_dma_chan, chan); td_prep_slave_sg()
521 dev_err(chan2dev(chan), "%s: No SG list\n", __func__); td_prep_slave_sg()
527 dev_err(chan2dev(chan), td_prep_slave_sg()
534 dev_err(chan2dev(chan), "Not enough descriptors available\n"); td_prep_slave_sg()
543 dev_err(chan2dev(chan), "No descriptor space\n"); for_each_sg()
550 dev_err(chan2dev(chan), "Failed to update desc: %d\n", for_each_sg()
558 dma_sync_single_for_device(chan2dmadev(chan), td_desc->txd.phys,
564 static int td_terminate_all(struct dma_chan *chan) td_terminate_all() argument
567 container_of(chan, struct timb_dma_chan, chan); td_terminate_all()
570 dev_dbg(chan2dev(chan), "%s: Entry\n", __func__); td_terminate_all()
714 td_chan->chan.device = &td->dma; td_probe()
715 dma_cookie_init(&td_chan->chan); td_probe()
734 list_add_tail(&td_chan->chan.device_node, &td->dma.channels); td_probe()
H A Dat_xdmac.c195 struct dma_chan chan; member in struct:at_xdmac_chan
226 struct at_xdmac_chan chan[0]; member in struct:at_xdmac
268 return container_of(dchan, struct at_xdmac_chan, chan); to_at_xdmac_chan()
271 static struct device *chan2dev(struct dma_chan *chan) chan2dev() argument
273 return &chan->dev->device; chan2dev()
338 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device); at_xdmac_start_xfer()
341 dev_vdbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, first); at_xdmac_start_xfer()
375 dev_vdbg(chan2dev(&atchan->chan), at_xdmac_start_xfer()
397 dev_vdbg(chan2dev(&atchan->chan), at_xdmac_start_xfer()
402 dev_vdbg(chan2dev(&atchan->chan), at_xdmac_start_xfer()
416 struct at_xdmac_chan *atchan = to_at_xdmac_chan(tx->chan); at_xdmac_tx_submit()
423 dev_vdbg(chan2dev(tx->chan), "%s: atchan 0x%p, add desc 0x%p to xfers_list\n", at_xdmac_tx_submit()
433 static struct at_xdmac_desc *at_xdmac_alloc_desc(struct dma_chan *chan, at_xdmac_alloc_desc() argument
437 struct at_xdmac *atxdmac = to_at_xdmac(chan->device); at_xdmac_alloc_desc()
444 dma_async_tx_descriptor_init(&desc->tx_dma_desc, chan); at_xdmac_alloc_desc()
458 desc = at_xdmac_alloc_desc(&atchan->chan, GFP_NOWAIT); at_xdmac_get_desc()
474 struct dma_chan *chan; at_xdmac_xlate() local
482 chan = dma_get_any_slave_channel(&atxdmac->dma); at_xdmac_xlate()
483 if (!chan) { at_xdmac_xlate()
488 atchan = to_at_xdmac_chan(chan); at_xdmac_xlate()
492 dev_dbg(dev, "chan dt cfg: memif=%u perif=%u perid=%u\n", at_xdmac_xlate()
495 return chan; at_xdmac_xlate()
498 static int at_xdmac_compute_chan_conf(struct dma_chan *chan, at_xdmac_compute_chan_conf() argument
501 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_compute_chan_conf()
517 dev_err(chan2dev(chan), "invalid src maxburst value\n"); at_xdmac_compute_chan_conf()
523 dev_err(chan2dev(chan), "invalid src addr width value\n"); at_xdmac_compute_chan_conf()
540 dev_err(chan2dev(chan), "invalid src maxburst value\n"); at_xdmac_compute_chan_conf()
546 dev_err(chan2dev(chan), "invalid dst addr width value\n"); at_xdmac_compute_chan_conf()
552 dev_dbg(chan2dev(chan), "%s: cfg=0x%08x\n", __func__, atchan->cfg); at_xdmac_compute_chan_conf()
575 static int at_xdmac_set_slave_config(struct dma_chan *chan, at_xdmac_set_slave_config() argument
578 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_set_slave_config()
581 dev_err(chan2dev(chan), "invalid slave configuration\n"); at_xdmac_set_slave_config()
591 at_xdmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, at_xdmac_prep_slave_sg() argument
595 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_prep_slave_sg()
607 dev_err(chan2dev(chan), "invalid DMA direction\n"); at_xdmac_prep_slave_sg()
611 dev_dbg(chan2dev(chan), "%s: sg_len=%d, dir=%s, flags=0x%lx\n", at_xdmac_prep_slave_sg()
619 if (at_xdmac_compute_chan_conf(chan, direction)) at_xdmac_prep_slave_sg()
630 dev_err(chan2dev(chan), "sg data length is zero\n"); for_each_sg()
633 dev_dbg(chan2dev(chan), "%s: * sg%d len=%u, mem=0x%08x\n", for_each_sg()
638 dev_err(chan2dev(chan), "can't get descriptor\n"); for_each_sg()
663 dev_dbg(chan2dev(chan), for_each_sg()
670 dev_dbg(chan2dev(chan), for_each_sg()
679 dev_dbg(chan2dev(chan), "%s: add desc 0x%p to descs_list 0x%p\n", for_each_sg()
697 at_xdmac_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, at_xdmac_prep_dma_cyclic() argument
702 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_prep_dma_cyclic()
708 dev_dbg(chan2dev(chan), "%s: buf_addr=%pad, buf_len=%zd, period_len=%zd, dir=%s, flags=0x%lx\n", at_xdmac_prep_dma_cyclic()
713 dev_err(chan2dev(chan), "invalid DMA direction\n"); at_xdmac_prep_dma_cyclic()
718 dev_err(chan2dev(chan), "channel currently used\n"); at_xdmac_prep_dma_cyclic()
722 if (at_xdmac_compute_chan_conf(chan, direction)) at_xdmac_prep_dma_cyclic()
731 dev_err(chan2dev(chan), "can't get descriptor\n"); at_xdmac_prep_dma_cyclic()
738 dev_dbg(chan2dev(chan), at_xdmac_prep_dma_cyclic()
756 dev_dbg(chan2dev(chan), at_xdmac_prep_dma_cyclic()
763 dev_dbg(chan2dev(chan), at_xdmac_prep_dma_cyclic()
772 dev_dbg(chan2dev(chan), "%s: add desc 0x%p to descs_list 0x%p\n", at_xdmac_prep_dma_cyclic()
778 dev_dbg(chan2dev(chan), at_xdmac_prep_dma_cyclic()
789 at_xdmac_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src, at_xdmac_prep_dma_memcpy() argument
792 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_prep_dma_memcpy()
813 dev_dbg(chan2dev(chan), "%s: src=%pad, dest=%pad, len=%zd, flags=0x%lx\n", at_xdmac_prep_dma_memcpy()
826 dev_dbg(chan2dev(chan), "%s: dwidth: double word\n", __func__); at_xdmac_prep_dma_memcpy()
829 dev_dbg(chan2dev(chan), "%s: dwidth: word\n", __func__); at_xdmac_prep_dma_memcpy()
832 dev_dbg(chan2dev(chan), "%s: dwidth: half word\n", __func__); at_xdmac_prep_dma_memcpy()
835 dev_dbg(chan2dev(chan), "%s: dwidth: byte\n", __func__); at_xdmac_prep_dma_memcpy()
842 dev_dbg(chan2dev(chan), "%s: remaining_size=%zu\n", __func__, remaining_size); at_xdmac_prep_dma_memcpy()
848 dev_err(chan2dev(chan), "can't get descriptor\n"); at_xdmac_prep_dma_memcpy()
863 dev_dbg(chan2dev(chan), "%s: xfer_size=%zu\n", __func__, xfer_size); at_xdmac_prep_dma_memcpy()
868 dev_dbg(chan2dev(chan), "%s: dwidth: double word\n", __func__); at_xdmac_prep_dma_memcpy()
871 dev_dbg(chan2dev(chan), "%s: dwidth: word\n", __func__); at_xdmac_prep_dma_memcpy()
874 dev_dbg(chan2dev(chan), "%s: dwidth: half word\n", __func__); at_xdmac_prep_dma_memcpy()
877 dev_dbg(chan2dev(chan), "%s: dwidth: byte\n", __func__); at_xdmac_prep_dma_memcpy()
893 dev_dbg(chan2dev(chan), at_xdmac_prep_dma_memcpy()
900 dev_dbg(chan2dev(chan), at_xdmac_prep_dma_memcpy()
909 dev_dbg(chan2dev(chan), "%s: add desc 0x%p to descs_list 0x%p\n", at_xdmac_prep_dma_memcpy()
921 at_xdmac_tx_status(struct dma_chan *chan, dma_cookie_t cookie, at_xdmac_tx_status() argument
924 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_tx_status()
925 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device); at_xdmac_tx_status()
934 ret = dma_cookie_status(chan, cookie, txstate); at_xdmac_tx_status()
1019 dev_dbg(chan2dev(chan),
1032 dev_dbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc); at_xdmac_remove_xfer()
1057 dev_vdbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc); at_xdmac_advance_work()
1083 dev_dbg(chan2dev(&atchan->chan), "%s: status=0x%08lx\n", at_xdmac_tasklet()
1097 dev_err(chan2dev(&atchan->chan), "read bus error!!!"); at_xdmac_tasklet()
1099 dev_err(chan2dev(&atchan->chan), "write bus error!!!"); at_xdmac_tasklet()
1101 dev_err(chan2dev(&atchan->chan), "request overflow error!!!"); at_xdmac_tasklet()
1107 dev_vdbg(chan2dev(&atchan->chan), "%s: desc 0x%p\n", __func__, desc); at_xdmac_tasklet()
1152 atchan = &atxdmac->chan[i]; at_xdmac_interrupt()
1157 "%s: chan%d: imr=0x%x, status=0x%x\n", at_xdmac_interrupt()
1159 dev_vdbg(chan2dev(&atchan->chan), at_xdmac_interrupt()
1181 static void at_xdmac_issue_pending(struct dma_chan *chan) at_xdmac_issue_pending() argument
1183 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_issue_pending()
1185 dev_dbg(chan2dev(&atchan->chan), "%s\n", __func__); at_xdmac_issue_pending()
1193 static int at_xdmac_device_config(struct dma_chan *chan, at_xdmac_device_config() argument
1196 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_device_config()
1200 dev_dbg(chan2dev(chan), "%s\n", __func__); at_xdmac_device_config()
1203 ret = at_xdmac_set_slave_config(chan, config); at_xdmac_device_config()
1209 static int at_xdmac_device_pause(struct dma_chan *chan) at_xdmac_device_pause() argument
1211 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_device_pause()
1212 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device); at_xdmac_device_pause()
1215 dev_dbg(chan2dev(chan), "%s\n", __func__); at_xdmac_device_pause()
1230 static int at_xdmac_device_resume(struct dma_chan *chan) at_xdmac_device_resume() argument
1232 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_device_resume()
1233 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device); at_xdmac_device_resume()
1236 dev_dbg(chan2dev(chan), "%s\n", __func__); at_xdmac_device_resume()
1251 static int at_xdmac_device_terminate_all(struct dma_chan *chan) at_xdmac_device_terminate_all() argument
1254 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_device_terminate_all()
1255 struct at_xdmac *atxdmac = to_at_xdmac(atchan->chan.device); at_xdmac_device_terminate_all()
1258 dev_dbg(chan2dev(chan), "%s\n", __func__); at_xdmac_device_terminate_all()
1276 static int at_xdmac_alloc_chan_resources(struct dma_chan *chan) at_xdmac_alloc_chan_resources() argument
1278 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_alloc_chan_resources()
1286 dev_err(chan2dev(chan), at_xdmac_alloc_chan_resources()
1293 dev_err(chan2dev(chan), at_xdmac_alloc_chan_resources()
1300 desc = at_xdmac_alloc_desc(chan, GFP_ATOMIC); at_xdmac_alloc_chan_resources()
1302 dev_warn(chan2dev(chan), at_xdmac_alloc_chan_resources()
1309 dma_cookie_init(chan); at_xdmac_alloc_chan_resources()
1311 dev_dbg(chan2dev(chan), "%s: allocated %d descriptors\n", __func__, i); at_xdmac_alloc_chan_resources()
1318 static void at_xdmac_free_chan_resources(struct dma_chan *chan) at_xdmac_free_chan_resources() argument
1320 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); at_xdmac_free_chan_resources()
1321 struct at_xdmac *atxdmac = to_at_xdmac(chan->device); at_xdmac_free_chan_resources()
1325 dev_dbg(chan2dev(chan), "%s: freeing descriptor %p\n", __func__, desc); at_xdmac_free_chan_resources()
1338 struct dma_chan *chan, *_chan; atmel_xdmac_prepare() local
1340 list_for_each_entry_safe(chan, _chan, &atxdmac->dma.channels, device_node) { atmel_xdmac_prepare()
1341 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); atmel_xdmac_prepare()
1358 struct dma_chan *chan, *_chan; atmel_xdmac_suspend() local
1360 list_for_each_entry_safe(chan, _chan, &atxdmac->dma.channels, device_node) { atmel_xdmac_suspend()
1361 struct at_xdmac_chan *atchan = to_at_xdmac_chan(chan); atmel_xdmac_suspend()
1366 at_xdmac_device_pause(chan); atmel_xdmac_suspend()
1384 struct dma_chan *chan, *_chan; atmel_xdmac_resume() local
1391 atchan = &atxdmac->chan[i]; atmel_xdmac_resume()
1398 list_for_each_entry_safe(chan, _chan, &atxdmac->dma.channels, device_node) { atmel_xdmac_resume()
1399 atchan = to_at_xdmac_chan(chan); atmel_xdmac_resume()
1403 at_xdmac_device_resume(chan); atmel_xdmac_resume()
1518 struct at_xdmac_chan *atchan = &atxdmac->chan[i]; at_xdmac_probe()
1520 atchan->chan.device = &atxdmac->dma; at_xdmac_probe()
1521 list_add_tail(&atchan->chan.device_node, at_xdmac_probe()
1581 struct at_xdmac_chan *atchan = &atxdmac->chan[i]; at_xdmac_remove()
1584 at_xdmac_free_chan_resources(&atchan->chan); at_xdmac_remove()
H A Dmv_xor.c33 static void mv_xor_issue_pending(struct dma_chan *chan);
35 #define to_mv_xor_chan(chan) \
36 container_of(chan, struct mv_xor_chan, dmachan)
41 #define mv_chan_to_devp(chan) \
42 ((chan)->dmadev.dev)
82 static u32 mv_chan_get_current_desc(struct mv_xor_chan *chan) mv_chan_get_current_desc() argument
84 return readl_relaxed(XOR_CURR_DESC(chan)); mv_chan_get_current_desc()
87 static void mv_chan_set_next_descriptor(struct mv_xor_chan *chan, mv_chan_set_next_descriptor() argument
90 writel_relaxed(next_desc_addr, XOR_NEXT_DESC(chan)); mv_chan_set_next_descriptor()
93 static void mv_chan_unmask_interrupts(struct mv_xor_chan *chan) mv_chan_unmask_interrupts() argument
95 u32 val = readl_relaxed(XOR_INTR_MASK(chan)); mv_chan_unmask_interrupts()
96 val |= XOR_INTR_MASK_VALUE << (chan->idx * 16); mv_chan_unmask_interrupts()
97 writel_relaxed(val, XOR_INTR_MASK(chan)); mv_chan_unmask_interrupts()
100 static u32 mv_chan_get_intr_cause(struct mv_xor_chan *chan) mv_chan_get_intr_cause() argument
102 u32 intr_cause = readl_relaxed(XOR_INTR_CAUSE(chan)); mv_chan_get_intr_cause()
103 intr_cause = (intr_cause >> (chan->idx * 16)) & 0xFFFF; mv_chan_get_intr_cause()
107 static void mv_xor_device_clear_eoc_cause(struct mv_xor_chan *chan) mv_xor_device_clear_eoc_cause() argument
112 val = ~(val << (chan->idx * 16)); mv_xor_device_clear_eoc_cause()
113 dev_dbg(mv_chan_to_devp(chan), "%s, val 0x%08x\n", __func__, val); mv_xor_device_clear_eoc_cause()
114 writel_relaxed(val, XOR_INTR_CAUSE(chan)); mv_xor_device_clear_eoc_cause()
117 static void mv_xor_device_clear_err_status(struct mv_xor_chan *chan) mv_xor_device_clear_err_status() argument
119 u32 val = 0xFFFF0000 >> (chan->idx * 16); mv_xor_device_clear_err_status()
120 writel_relaxed(val, XOR_INTR_CAUSE(chan)); mv_xor_device_clear_err_status()
123 static void mv_set_mode(struct mv_xor_chan *chan, mv_set_mode() argument
127 u32 config = readl_relaxed(XOR_CONFIG(chan)); mv_set_mode()
137 dev_err(mv_chan_to_devp(chan), mv_set_mode()
153 writel_relaxed(config, XOR_CONFIG(chan)); mv_set_mode()
154 chan->current_type = type; mv_set_mode()
157 static void mv_chan_activate(struct mv_xor_chan *chan) mv_chan_activate() argument
159 dev_dbg(mv_chan_to_devp(chan), " activate chan.\n"); mv_chan_activate()
162 writel(BIT(0), XOR_ACTIVATION(chan)); mv_chan_activate()
165 static char mv_chan_is_busy(struct mv_xor_chan *chan) mv_chan_is_busy() argument
167 u32 state = readl_relaxed(XOR_ACTIVATION(chan)); mv_chan_is_busy()
348 struct mv_xor_chan *chan = (struct mv_xor_chan *) data; mv_xor_tasklet() local
350 spin_lock_bh(&chan->lock); mv_xor_tasklet()
351 mv_xor_slot_cleanup(chan); mv_xor_tasklet()
352 spin_unlock_bh(&chan->lock); mv_xor_tasklet()
413 struct mv_xor_chan *mv_chan = to_mv_xor_chan(tx->chan); mv_xor_tx_submit()
462 static int mv_xor_alloc_chan_resources(struct dma_chan *chan) mv_xor_alloc_chan_resources() argument
467 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); mv_xor_alloc_chan_resources()
484 dma_async_tx_descriptor_init(&slot->async_tx, chan); mv_xor_alloc_chan_resources()
511 mv_xor_prep_dma_xor(struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src, mv_xor_prep_dma_xor() argument
514 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); mv_xor_prep_dma_xor()
543 mv_xor_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src, mv_xor_prep_dma_memcpy() argument
550 return mv_xor_prep_dma_xor(chan, dest, &src, 1, len, flags); mv_xor_prep_dma_memcpy()
554 mv_xor_prep_dma_interrupt(struct dma_chan *chan, unsigned long flags) mv_xor_prep_dma_interrupt() argument
556 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); mv_xor_prep_dma_interrupt()
568 return mv_xor_prep_dma_xor(chan, dest, &src, 1, len, flags); mv_xor_prep_dma_interrupt()
571 static void mv_xor_free_chan_resources(struct dma_chan *chan) mv_xor_free_chan_resources() argument
573 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); mv_xor_free_chan_resources()
610 * @chan: XOR channel handle
614 static enum dma_status mv_xor_status(struct dma_chan *chan, mv_xor_status() argument
618 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); mv_xor_status()
621 ret = dma_cookie_status(chan, cookie, txstate); mv_xor_status()
629 return dma_cookie_status(chan, cookie, txstate); mv_xor_status()
632 static void mv_dump_xor_regs(struct mv_xor_chan *chan) mv_dump_xor_regs() argument
636 val = readl_relaxed(XOR_CONFIG(chan)); mv_dump_xor_regs()
637 dev_err(mv_chan_to_devp(chan), "config 0x%08x\n", val); mv_dump_xor_regs()
639 val = readl_relaxed(XOR_ACTIVATION(chan)); mv_dump_xor_regs()
640 dev_err(mv_chan_to_devp(chan), "activation 0x%08x\n", val); mv_dump_xor_regs()
642 val = readl_relaxed(XOR_INTR_CAUSE(chan)); mv_dump_xor_regs()
643 dev_err(mv_chan_to_devp(chan), "intr cause 0x%08x\n", val); mv_dump_xor_regs()
645 val = readl_relaxed(XOR_INTR_MASK(chan)); mv_dump_xor_regs()
646 dev_err(mv_chan_to_devp(chan), "intr mask 0x%08x\n", val); mv_dump_xor_regs()
648 val = readl_relaxed(XOR_ERROR_CAUSE(chan)); mv_dump_xor_regs()
649 dev_err(mv_chan_to_devp(chan), "error cause 0x%08x\n", val); mv_dump_xor_regs()
651 val = readl_relaxed(XOR_ERROR_ADDR(chan)); mv_dump_xor_regs()
652 dev_err(mv_chan_to_devp(chan), "error addr 0x%08x\n", val); mv_dump_xor_regs()
655 static void mv_xor_err_interrupt_handler(struct mv_xor_chan *chan, mv_xor_err_interrupt_handler() argument
659 dev_dbg(mv_chan_to_devp(chan), "ignoring address decode error\n"); mv_xor_err_interrupt_handler()
663 dev_err(mv_chan_to_devp(chan), "error on chan %d. intr cause 0x%08x\n", mv_xor_err_interrupt_handler()
664 chan->idx, intr_cause); mv_xor_err_interrupt_handler()
666 mv_dump_xor_regs(chan); mv_xor_err_interrupt_handler()
672 struct mv_xor_chan *chan = data; mv_xor_interrupt_handler() local
673 u32 intr_cause = mv_chan_get_intr_cause(chan); mv_xor_interrupt_handler()
675 dev_dbg(mv_chan_to_devp(chan), "intr cause %x\n", intr_cause); mv_xor_interrupt_handler()
678 mv_xor_err_interrupt_handler(chan, intr_cause); mv_xor_interrupt_handler()
680 tasklet_schedule(&chan->irq_tasklet); mv_xor_interrupt_handler()
682 mv_xor_device_clear_eoc_cause(chan); mv_xor_interrupt_handler()
687 static void mv_xor_issue_pending(struct dma_chan *chan) mv_xor_issue_pending() argument
689 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); mv_xor_issue_pending()
949 struct dma_chan *chan, *_chan; mv_xor_channel_remove() local
961 list_for_each_entry_safe(chan, _chan, &mv_chan->dmadev.channels, mv_xor_channel_remove()
963 list_del(&chan->device_node); mv_xor_channel_remove()
1173 struct mv_xor_chan *chan; mv_xor_probe() local
1191 chan = mv_xor_channel_add(xordev, pdev, i, mv_xor_probe()
1193 if (IS_ERR(chan)) { mv_xor_probe()
1194 ret = PTR_ERR(chan); mv_xor_probe()
1199 xordev->channels[i] = chan; mv_xor_probe()
1205 struct mv_xor_chan *chan; mv_xor_probe() local
1220 chan = mv_xor_channel_add(xordev, pdev, i, mv_xor_probe()
1222 if (IS_ERR(chan)) { mv_xor_probe()
1223 ret = PTR_ERR(chan); mv_xor_probe()
1227 xordev->channels[i] = chan; mv_xor_probe()
H A Dfsl_raid.c85 /* Add descriptors into per chan software queue - submit_q */ fsl_re_tx_submit()
94 re_chan = container_of(tx->chan, struct fsl_re_chan, chan); fsl_re_tx_submit()
104 /* Copy descriptor from per chan software queue into hardware job ring */ fsl_re_issue_pending()
105 static void fsl_re_issue_pending(struct dma_chan *chan) fsl_re_issue_pending() argument
112 re_chan = container_of(chan, struct fsl_re_chan, chan); fsl_re_issue_pending()
162 fsl_re_issue_pending(&re_chan->chan); fsl_re_cleanup_descs()
229 dev_err(re_chan->dev, "chan error irqstate: %x, status: %x\n", fsl_re_isr()
241 static enum dma_status fsl_re_tx_status(struct dma_chan *chan, fsl_re_tx_status() argument
245 return dma_cookie_status(chan, cookie, txstate); fsl_re_tx_status()
265 dma_async_tx_descriptor_init(&desc->async_tx, &re_chan->chan); fsl_re_init_desc()
325 struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src, fsl_re_prep_dma_genq()
338 re_chan = container_of(chan, struct fsl_re_chan, chan); fsl_re_prep_dma_genq()
400 struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src, fsl_re_prep_dma_xor()
404 return fsl_re_prep_dma_genq(chan, dest, src, src_cnt, NULL, len, flags); fsl_re_prep_dma_xor()
412 struct dma_chan *chan, dma_addr_t *dest, dma_addr_t *src, fsl_re_prep_dma_pq()
425 re_chan = container_of(chan, struct fsl_re_chan, chan); fsl_re_prep_dma_pq()
446 tx = fsl_re_prep_dma_genq(chan, dest[1], dma_src, 2, coef, len, fsl_re_prep_dma_pq()
463 return fsl_re_prep_dma_genq(chan, dest[1], src, src_cnt, fsl_re_prep_dma_pq()
535 struct dma_chan *chan, dma_addr_t dest, dma_addr_t src, fsl_re_prep_dma_memcpy()
545 re_chan = container_of(chan, struct fsl_re_chan, chan); fsl_re_prep_dma_memcpy()
581 static int fsl_re_alloc_chan_resources(struct dma_chan *chan) fsl_re_alloc_chan_resources() argument
589 re_chan = container_of(chan, struct fsl_re_chan, chan); fsl_re_alloc_chan_resources()
611 static void fsl_re_free_chan_resources(struct dma_chan *chan) fsl_re_free_chan_resources() argument
616 re_chan = container_of(chan, struct fsl_re_chan, chan); fsl_re_free_chan_resources()
629 dev_err(re_chan->dev, "chan resource cannot be cleaned!\n"); fsl_re_free_chan_resources()
637 struct fsl_re_chan *chan; fsl_re_chan_probe() local
648 chan = devm_kzalloc(dev, sizeof(*chan), GFP_KERNEL); fsl_re_chan_probe()
649 if (!chan) fsl_re_chan_probe()
652 /* create platform device for chan node */ fsl_re_chan_probe()
668 chan->jrregs = (struct fsl_re_chan_cfg *)((u8 *)re_priv->re_regs + fsl_re_chan_probe()
672 chan->irq = irq_of_parse_and_map(np, 0); fsl_re_chan_probe()
673 if (chan->irq == NO_IRQ) { fsl_re_chan_probe()
679 snprintf(chan->name, sizeof(chan->name), "re_jr%02d", q); fsl_re_chan_probe()
682 tasklet_init(&chan->irqtask, fsl_re_dequeue, (unsigned long)chandev); fsl_re_chan_probe()
684 ret = request_irq(chan->irq, fsl_re_isr, 0, chan->name, chandev); fsl_re_chan_probe()
691 re_priv->re_jrs[q] = chan; fsl_re_chan_probe()
692 chan->chan.device = dma_dev; fsl_re_chan_probe()
693 chan->chan.private = chan; fsl_re_chan_probe()
694 chan->dev = chandev; fsl_re_chan_probe()
695 chan->re_dev = re_priv; fsl_re_chan_probe()
697 spin_lock_init(&chan->desc_lock); fsl_re_chan_probe()
698 INIT_LIST_HEAD(&chan->ack_q); fsl_re_chan_probe()
699 INIT_LIST_HEAD(&chan->active_q); fsl_re_chan_probe()
700 INIT_LIST_HEAD(&chan->submit_q); fsl_re_chan_probe()
701 INIT_LIST_HEAD(&chan->free_q); fsl_re_chan_probe()
703 chan->inb_ring_virt_addr = dma_pool_alloc(chan->re_dev->hw_desc_pool, fsl_re_chan_probe()
704 GFP_KERNEL, &chan->inb_phys_addr); fsl_re_chan_probe()
705 if (!chan->inb_ring_virt_addr) { fsl_re_chan_probe()
711 chan->oub_ring_virt_addr = dma_pool_alloc(chan->re_dev->hw_desc_pool, fsl_re_chan_probe()
712 GFP_KERNEL, &chan->oub_phys_addr); fsl_re_chan_probe()
713 if (!chan->oub_ring_virt_addr) { fsl_re_chan_probe()
720 out_be32(&chan->jrregs->inbring_base_h, fsl_re_chan_probe()
721 chan->inb_phys_addr & FSL_RE_ADDR_BIT_MASK); fsl_re_chan_probe()
722 out_be32(&chan->jrregs->oubring_base_h, fsl_re_chan_probe()
723 chan->oub_phys_addr & FSL_RE_ADDR_BIT_MASK); fsl_re_chan_probe()
724 out_be32(&chan->jrregs->inbring_base_l, fsl_re_chan_probe()
725 chan->inb_phys_addr >> FSL_RE_ADDR_BIT_SHIFT); fsl_re_chan_probe()
726 out_be32(&chan->jrregs->oubring_base_l, fsl_re_chan_probe()
727 chan->oub_phys_addr >> FSL_RE_ADDR_BIT_SHIFT); fsl_re_chan_probe()
728 out_be32(&chan->jrregs->inbring_size, fsl_re_chan_probe()
730 out_be32(&chan->jrregs->oubring_size, fsl_re_chan_probe()
734 status = in_be32(&chan->jrregs->jr_config_1) & FSL_RE_REG_LIODN_MASK; fsl_re_chan_probe()
737 out_be32(&chan->jrregs->jr_config_1, fsl_re_chan_probe()
740 dev_set_drvdata(chandev, chan); fsl_re_chan_probe()
743 out_be32(&chan->jrregs->jr_command, FSL_RE_ENABLE); fsl_re_chan_probe()
748 dma_pool_free(chan->re_dev->hw_desc_pool, chan->inb_ring_virt_addr, fsl_re_chan_probe()
749 chan->inb_phys_addr); fsl_re_chan_probe()
857 static void fsl_re_remove_chan(struct fsl_re_chan *chan) fsl_re_remove_chan() argument
859 dma_pool_free(chan->re_dev->hw_desc_pool, chan->inb_ring_virt_addr, fsl_re_remove_chan()
860 chan->inb_phys_addr); fsl_re_remove_chan()
862 dma_pool_free(chan->re_dev->hw_desc_pool, chan->oub_ring_virt_addr, fsl_re_remove_chan()
863 chan->oub_phys_addr); fsl_re_remove_chan()
875 /* Cleanup chan related memory areas */ fsl_re_remove()
324 fsl_re_prep_dma_genq( struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) fsl_re_prep_dma_genq() argument
399 fsl_re_prep_dma_xor( struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src, unsigned int src_cnt, size_t len, unsigned long flags) fsl_re_prep_dma_xor() argument
411 fsl_re_prep_dma_pq( struct dma_chan *chan, dma_addr_t *dest, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) fsl_re_prep_dma_pq() argument
534 fsl_re_prep_dma_memcpy( struct dma_chan *chan, dma_addr_t dest, dma_addr_t src, size_t len, unsigned long flags) fsl_re_prep_dma_memcpy() argument
H A Dmxs-dma.c113 struct dma_chan chan; member in struct:mxs_dma_chan
200 static struct mxs_dma_chan *to_mxs_dma_chan(struct dma_chan *chan) to_mxs_dma_chan() argument
202 return container_of(chan, struct mxs_dma_chan, chan); to_mxs_dma_chan()
205 static void mxs_dma_reset_chan(struct dma_chan *chan) mxs_dma_reset_chan() argument
207 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_reset_chan()
209 int chan_id = mxs_chan->chan.chan_id; mxs_dma_reset_chan()
254 static void mxs_dma_enable_chan(struct dma_chan *chan) mxs_dma_enable_chan() argument
256 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_enable_chan()
258 int chan_id = mxs_chan->chan.chan_id; mxs_dma_enable_chan()
277 static void mxs_dma_disable_chan(struct dma_chan *chan) mxs_dma_disable_chan() argument
279 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_disable_chan()
284 static int mxs_dma_pause_chan(struct dma_chan *chan) mxs_dma_pause_chan() argument
286 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_pause_chan()
288 int chan_id = mxs_chan->chan.chan_id; mxs_dma_pause_chan()
302 static int mxs_dma_resume_chan(struct dma_chan *chan) mxs_dma_resume_chan() argument
304 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_resume_chan()
306 int chan_id = mxs_chan->chan.chan_id; mxs_dma_resume_chan()
350 int chan = mxs_dma_irq_to_chan(mxs_dma, irq); mxs_dma_int_handler() local
352 if (chan < 0) mxs_dma_int_handler()
357 completed = (completed >> chan) & 0x1; mxs_dma_int_handler()
360 writel((1 << chan), mxs_dma_int_handler()
365 err &= (1 << (MXS_DMA_CHANNELS + chan)) | (1 << chan); mxs_dma_int_handler()
372 err = (err >> (MXS_DMA_CHANNELS + chan)) + (err >> chan); mxs_dma_int_handler()
375 writel((1 << chan), mxs_dma_int_handler()
387 mxs_chan = &mxs_dma->mxs_chans[chan]; mxs_dma_int_handler()
392 chan); mxs_dma_int_handler()
394 mxs_dma_reset_chan(&mxs_chan->chan); mxs_dma_int_handler()
400 HW_APBHX_CHn_SEMA(mxs_dma, chan)); mxs_dma_int_handler()
418 static int mxs_dma_alloc_chan_resources(struct dma_chan *chan) mxs_dma_alloc_chan_resources() argument
420 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_alloc_chan_resources()
443 mxs_dma_reset_chan(chan); mxs_dma_alloc_chan_resources()
445 dma_async_tx_descriptor_init(&mxs_chan->desc, chan); mxs_dma_alloc_chan_resources()
462 static void mxs_dma_free_chan_resources(struct dma_chan *chan) mxs_dma_free_chan_resources() argument
464 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_free_chan_resources()
467 mxs_dma_disable_chan(chan); mxs_dma_free_chan_resources()
500 struct dma_chan *chan, struct scatterlist *sgl, mxs_dma_prep_slave_sg()
504 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_prep_slave_sg()
598 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, mxs_dma_prep_dma_cyclic()
602 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_prep_dma_cyclic()
662 static int mxs_dma_terminate_all(struct dma_chan *chan) mxs_dma_terminate_all() argument
664 mxs_dma_reset_chan(chan); mxs_dma_terminate_all()
665 mxs_dma_disable_chan(chan); mxs_dma_terminate_all()
670 static enum dma_status mxs_dma_tx_status(struct dma_chan *chan, mxs_dma_tx_status() argument
673 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_tx_status()
686 HW_APBHX_CHn_BAR(mxs_dma, chan->chan_id)); mxs_dma_tx_status()
690 dma_set_tx_state(txstate, chan->completed_cookie, chan->cookie, mxs_dma_tx_status()
730 static bool mxs_dma_filter_fn(struct dma_chan *chan, void *fn_param) mxs_dma_filter_fn() argument
733 struct mxs_dma_chan *mxs_chan = to_mxs_dma_chan(chan); mxs_dma_filter_fn()
740 if (chan->chan_id != param->chan_id) mxs_dma_filter_fn()
820 mxs_chan->chan.device = &mxs_dma->dma_device; mxs_dma_probe()
821 dma_cookie_init(&mxs_chan->chan); mxs_dma_probe()
828 list_add_tail(&mxs_chan->chan.device_node, mxs_dma_probe()
499 mxs_dma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) mxs_dma_prep_slave_sg() argument
597 mxs_dma_prep_dma_cyclic( struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, size_t period_len, enum dma_transfer_direction direction, unsigned long flags) mxs_dma_prep_dma_cyclic() argument
H A Dmmp_tdma.c108 struct dma_chan chan; member in struct:mmp_tdma_chan
141 #define to_mmp_tdma_chan(dchan) container_of(dchan, struct mmp_tdma_chan, chan)
160 /* enable dma chan */ mmp_tdma_enable_chan()
166 static int mmp_tdma_disable_chan(struct dma_chan *chan) mmp_tdma_disable_chan() argument
168 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_disable_chan()
181 static int mmp_tdma_resume_chan(struct dma_chan *chan) mmp_tdma_resume_chan() argument
183 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_resume_chan()
192 static int mmp_tdma_pause_chan(struct dma_chan *chan) mmp_tdma_pause_chan() argument
194 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_pause_chan()
203 static int mmp_tdma_config_chan(struct dma_chan *chan) mmp_tdma_config_chan() argument
205 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_config_chan()
208 mmp_tdma_disable_chan(chan); mmp_tdma_config_chan()
374 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(tx->chan); mmp_tdma_tx_submit()
381 static int mmp_tdma_alloc_chan_resources(struct dma_chan *chan) mmp_tdma_alloc_chan_resources() argument
383 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_alloc_chan_resources()
386 dma_async_tx_descriptor_init(&tdmac->desc, chan); mmp_tdma_alloc_chan_resources()
398 static void mmp_tdma_free_chan_resources(struct dma_chan *chan) mmp_tdma_free_chan_resources() argument
400 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_free_chan_resources()
423 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, mmp_tdma_prep_dma_cyclic()
427 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_prep_dma_cyclic()
485 static int mmp_tdma_terminate_all(struct dma_chan *chan) mmp_tdma_terminate_all() argument
487 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_terminate_all()
489 mmp_tdma_disable_chan(chan); mmp_tdma_terminate_all()
496 static int mmp_tdma_config(struct dma_chan *chan, mmp_tdma_config() argument
499 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_config()
512 return mmp_tdma_config_chan(chan); mmp_tdma_config()
515 static enum dma_status mmp_tdma_tx_status(struct dma_chan *chan, mmp_tdma_tx_status() argument
518 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_tx_status()
521 dma_set_tx_state(txstate, chan->completed_cookie, chan->cookie, mmp_tdma_tx_status()
527 static void mmp_tdma_issue_pending(struct dma_chan *chan) mmp_tdma_issue_pending() argument
529 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_issue_pending()
562 tdmac->chan.device = &tdev->device; mmp_tdma_chan_init()
572 list_add_tail(&tdmac->chan.device_node, mmp_tdma_chan_init()
582 static bool mmp_tdma_filter_fn(struct dma_chan *chan, void *fn_param) mmp_tdma_filter_fn() argument
585 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); mmp_tdma_filter_fn()
586 struct dma_device *pdma_device = tdmac->chan.device; mmp_tdma_filter_fn()
591 if (chan->chan_id != param->chan_id) mmp_tdma_filter_fn()
422 mmp_tdma_prep_dma_cyclic( struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len, size_t period_len, enum dma_transfer_direction direction, unsigned long flags) mmp_tdma_prep_dma_cyclic() argument
H A Dtxx9dmac.c22 static struct txx9dmac_chan *to_txx9dmac_chan(struct dma_chan *chan) to_txx9dmac_chan() argument
24 return container_of(chan, struct txx9dmac_chan, chan); to_txx9dmac_chan()
132 static struct device *chan2dev(struct dma_chan *chan) chan2dev() argument
134 return &chan->dev->device; chan2dev()
136 static struct device *chan2parent(struct dma_chan *chan) chan2parent() argument
138 return chan->dev->device.parent; chan2parent()
202 dma_async_tx_descriptor_init(&desc->txd, &dc->chan); txx9dmac_desc_alloc()
206 desc->txd.phys = dma_map_single(chan2parent(&dc->chan), &desc->hwdesc, txx9dmac_desc_alloc()
224 dev_dbg(chan2dev(&dc->chan), "desc %p not ACKed\n", desc); txx9dmac_desc_get()
229 dev_vdbg(chan2dev(&dc->chan), "scanned %u descriptors on freelist\n", txx9dmac_desc_get()
238 dev_err(chan2dev(&dc->chan), txx9dmac_desc_get()
251 dma_sync_single_for_cpu(chan2parent(&dc->chan), txx9dmac_sync_desc_for_cpu()
254 dma_sync_single_for_cpu(chan2parent(&dc->chan), txx9dmac_sync_desc_for_cpu()
273 dev_vdbg(chan2dev(&dc->chan), txx9dmac_desc_put()
277 dev_vdbg(chan2dev(&dc->chan), "moving desc %p to freelist\n", txx9dmac_desc_put()
289 dev_err(chan2dev(&dc->chan), txx9dmac_dump_regs()
301 dev_err(chan2dev(&dc->chan), txx9dmac_dump_regs()
337 struct txx9dmac_slave *ds = dc->chan.private; txx9dmac_dostart()
340 dev_vdbg(chan2dev(&dc->chan), "dostart %u %p\n", txx9dmac_dostart()
344 dev_err(chan2dev(&dc->chan), txx9dmac_dostart()
410 dev_vdbg(chan2dev(&dc->chan), "descriptor %u %p complete\n", txx9dmac_descriptor_complete()
442 dma_sync_single_for_device(chan2parent(&dc->chan), txx9dmac_dequeue()
479 dev_crit(chan2dev(&dc->chan), txx9dmac_dump_desc()
483 dev_crit(chan2dev(&dc->chan), txx9dmac_dump_desc()
492 dev_crit(chan2dev(&dc->chan), txx9dmac_dump_desc()
496 dev_crit(chan2dev(&dc->chan), txx9dmac_dump_desc()
516 dev_crit(chan2dev(&dc->chan), "Abnormal Chain Completion\n"); txx9dmac_handle_error()
533 dev_crit(chan2dev(&dc->chan), txx9dmac_handle_error()
568 dev_vdbg(chan2dev(&dc->chan), "scan_descriptors: char=%#llx\n", txx9dmac_scan_descriptors()
599 dev_err(chan2dev(&dc->chan), txx9dmac_scan_descriptors()
619 dev_vdbg(chan2dev(&dc->chan), "tasklet: status=%x\n", csr); txx9dmac_chan_tasklet()
635 dev_vdbg(chan2dev(&dc->chan), "interrupt: status=%#x\n", txx9dmac_chan_interrupt()
659 dev_vdbg(ddev->chan[0]->dma.dev, "tasklet: mcr=%x\n", mcr); txx9dmac_tasklet()
662 dc = ddev->chan[i]; txx9dmac_tasklet()
664 dev_vdbg(chan2dev(&dc->chan), "tasklet: status=%x\n", txx9dmac_tasklet()
682 dev_vdbg(ddev->chan[0]->dma.dev, "interrupt: status=%#x\n", txx9dmac_interrupt()
700 struct txx9dmac_chan *dc = to_txx9dmac_chan(tx->chan); txx9dmac_tx_submit()
706 dev_vdbg(chan2dev(tx->chan), "tx_submit: queued %u %p\n", txx9dmac_tx_submit()
716 txx9dmac_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src, txx9dmac_prep_dma_memcpy() argument
719 struct txx9dmac_chan *dc = to_txx9dmac_chan(chan); txx9dmac_prep_dma_memcpy()
727 dev_vdbg(chan2dev(chan), "prep_dma_memcpy d%#llx s%#llx l%#zx f%#lx\n", txx9dmac_prep_dma_memcpy()
731 dev_dbg(chan2dev(chan), "prep_dma_memcpy: length is zero!\n"); txx9dmac_prep_dma_memcpy()
786 dma_sync_single_for_device(chan2parent(&dc->chan), txx9dmac_prep_dma_memcpy()
799 dma_sync_single_for_device(chan2parent(&dc->chan), txx9dmac_prep_dma_memcpy()
810 txx9dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, txx9dmac_prep_slave_sg() argument
814 struct txx9dmac_chan *dc = to_txx9dmac_chan(chan); txx9dmac_prep_slave_sg()
816 struct txx9dmac_slave *ds = chan->private; txx9dmac_prep_slave_sg()
822 dev_vdbg(chan2dev(chan), "prep_dma_slave\n"); txx9dmac_prep_slave_sg()
880 dma_sync_single_for_device(chan2parent(&dc->chan), for_each_sg()
894 dma_sync_single_for_device(chan2parent(&dc->chan),
904 static int txx9dmac_terminate_all(struct dma_chan *chan) txx9dmac_terminate_all() argument
906 struct txx9dmac_chan *dc = to_txx9dmac_chan(chan); txx9dmac_terminate_all()
910 dev_vdbg(chan2dev(chan), "terminate_all\n"); txx9dmac_terminate_all()
929 txx9dmac_tx_status(struct dma_chan *chan, dma_cookie_t cookie, txx9dmac_tx_status() argument
932 struct txx9dmac_chan *dc = to_txx9dmac_chan(chan); txx9dmac_tx_status()
935 ret = dma_cookie_status(chan, cookie, txstate); txx9dmac_tx_status()
943 return dma_cookie_status(chan, cookie, txstate); txx9dmac_tx_status()
957 dma_sync_single_for_device(chan2parent(&dc->chan), txx9dmac_chain_dynamic()
968 static void txx9dmac_issue_pending(struct dma_chan *chan) txx9dmac_issue_pending() argument
970 struct txx9dmac_chan *dc = to_txx9dmac_chan(chan); txx9dmac_issue_pending()
992 static int txx9dmac_alloc_chan_resources(struct dma_chan *chan) txx9dmac_alloc_chan_resources() argument
994 struct txx9dmac_chan *dc = to_txx9dmac_chan(chan); txx9dmac_alloc_chan_resources()
995 struct txx9dmac_slave *ds = chan->private; txx9dmac_alloc_chan_resources()
999 dev_vdbg(chan2dev(chan), "alloc_chan_resources\n"); txx9dmac_alloc_chan_resources()
1003 dev_dbg(chan2dev(chan), "DMA channel not idle?\n"); txx9dmac_alloc_chan_resources()
1007 dma_cookie_init(chan); txx9dmac_alloc_chan_resources()
1013 if (chan->device->device_prep_dma_memcpy) { txx9dmac_alloc_chan_resources()
1033 dev_info(chan2dev(chan), txx9dmac_alloc_chan_resources()
1045 dev_dbg(chan2dev(chan), txx9dmac_alloc_chan_resources()
1051 static void txx9dmac_free_chan_resources(struct dma_chan *chan) txx9dmac_free_chan_resources() argument
1053 struct txx9dmac_chan *dc = to_txx9dmac_chan(chan); txx9dmac_free_chan_resources()
1058 dev_dbg(chan2dev(chan), "free_chan_resources (descs allocated=%u)\n", txx9dmac_free_chan_resources()
1072 dev_vdbg(chan2dev(chan), " freeing descriptor %p\n", desc); txx9dmac_free_chan_resources()
1073 dma_unmap_single(chan2parent(chan), desc->txd.phys, txx9dmac_free_chan_resources()
1078 dev_vdbg(chan2dev(chan), "free_chan_resources done\n"); txx9dmac_free_chan_resources()
1134 dc->ddev->chan[ch] = dc; txx9dmac_chan_probe()
1135 dc->chan.device = &dc->dma; txx9dmac_chan_probe()
1136 list_add_tail(&dc->chan.device_node, &dc->chan.device->channels); txx9dmac_chan_probe()
1137 dma_cookie_init(&dc->chan); txx9dmac_chan_probe()
1171 dc->ddev->chan[pdev->id % TXX9_DMA_MAX_NR_CHANNELS] = NULL; txx9dmac_chan_remove()
1275 .name = "txx9dmac-chan",
1314 MODULE_ALIAS("platform:txx9dmac-chan");
H A Dmoxart-dma.c164 static struct device *chan2dev(struct dma_chan *chan) chan2dev() argument
166 return &chan->dev->device; chan2dev()
171 return container_of(c, struct moxart_chan, vc.chan); to_moxart_dma_chan()
185 static int moxart_terminate_all(struct dma_chan *chan) moxart_terminate_all() argument
187 struct moxart_chan *ch = to_moxart_dma_chan(chan); moxart_terminate_all()
192 dev_dbg(chan2dev(chan), "%s: ch=%p\n", __func__, ch); moxart_terminate_all()
212 static int moxart_slave_config(struct dma_chan *chan, moxart_slave_config() argument
215 struct moxart_chan *ch = to_moxart_dma_chan(chan); moxart_slave_config()
269 struct dma_chan *chan, struct scatterlist *sgl, moxart_prep_slave_sg()
273 struct moxart_chan *ch = to_moxart_dma_chan(chan); moxart_prep_slave_sg()
282 dev_err(chan2dev(chan), "%s: invalid DMA direction\n", moxart_prep_slave_sg()
306 dev_err(chan2dev(chan), "%s: unsupported data width (%u)\n", moxart_prep_slave_sg()
335 struct dma_chan *chan; moxart_of_xlate() local
338 chan = dma_get_any_slave_channel(&mdc->dma_slave); moxart_of_xlate()
339 if (!chan) moxart_of_xlate()
342 ch = to_moxart_dma_chan(chan); moxart_of_xlate()
345 return chan; moxart_of_xlate()
348 static int moxart_alloc_chan_resources(struct dma_chan *chan) moxart_alloc_chan_resources() argument
350 struct moxart_chan *ch = to_moxart_dma_chan(chan); moxart_alloc_chan_resources()
352 dev_dbg(chan2dev(chan), "%s: allocating channel #%u\n", moxart_alloc_chan_resources()
359 static void moxart_free_chan_resources(struct dma_chan *chan) moxart_free_chan_resources() argument
361 struct moxart_chan *ch = to_moxart_dma_chan(chan); moxart_free_chan_resources()
365 dev_dbg(chan2dev(chan), "%s: freeing channel #%u\n", moxart_free_chan_resources()
390 dev_dbg(chan2dev(&ch->vc.chan), "%s: set %u DMA cycles (len=%u)\n", moxart_set_transfer_params()
418 static void moxart_dma_start_desc(struct dma_chan *chan) moxart_dma_start_desc() argument
420 struct moxart_chan *ch = to_moxart_dma_chan(chan); moxart_dma_start_desc()
438 static void moxart_issue_pending(struct dma_chan *chan) moxart_issue_pending() argument
440 struct moxart_chan *ch = to_moxart_dma_chan(chan); moxart_issue_pending()
445 moxart_dma_start_desc(chan); moxart_issue_pending()
471 dev_dbg(chan2dev(&ch->vc.chan), "%s: size=%zu\n", __func__, size); moxart_dma_desc_size_in_flight()
476 static enum dma_status moxart_tx_status(struct dma_chan *chan, moxart_tx_status() argument
480 struct moxart_chan *ch = to_moxart_dma_chan(chan); moxart_tx_status()
489 ret = dma_cookie_status(chan, cookie, txstate); moxart_tx_status()
529 dev_dbg(chan2dev(&ch->vc.chan), "%s\n", __func__); moxart_dma_interrupt()
537 dev_dbg(chan2dev(&ch->vc.chan), "%s: ch=%p ch->base=%p ctrl=%x\n", moxart_dma_interrupt()
548 moxart_dma_start_desc(&ch->vc.chan); moxart_dma_interrupt()
268 moxart_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction dir, unsigned long tx_flags, void *context) moxart_prep_slave_sg() argument
H A Dvirt-dma.c24 struct virt_dma_chan *vc = to_virt_chan(tx->chan); vchan_tx_submit()
35 dev_dbg(vc->chan.device->dev, "vchan %p: txd %p[%x]: submitted\n", vchan_tx_submit()
100 dev_dbg(vc->chan.device->dev, "txd %p: freeing\n", vd); vchan_dma_desc_free_list()
108 dma_cookie_init(&vc->chan); vchan_init()
117 vc->chan.device = dmadev; vchan_init()
118 list_add_tail(&vc->chan.device_node, &dmadev->channels); vchan_init()
H A Dvirt-dma.h25 struct dma_chan chan; member in struct:virt_dma_chan
39 static inline struct virt_dma_chan *to_virt_chan(struct dma_chan *chan) to_virt_chan() argument
41 return container_of(chan, struct virt_dma_chan, chan); to_virt_chan()
59 dma_async_tx_descriptor_init(&vd->tx, &vc->chan); vchan_tx_prep()
86 struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan); vchan_cookie_complete()
91 dev_vdbg(vc->chan.device->dev, "txd %p[%x]: marked complete\n", vchan_cookie_complete()
104 struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan); vchan_cyclic_callback()
H A Dsirf-dma.c63 struct dma_chan chan; member in struct:sirfsoc_dma_chan
102 return container_of(c, struct sirfsoc_dma_chan, chan); dma_chan_to_sirfsoc_dma_chan()
115 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); sirfsoc_dma_execute()
116 int cid = schan->chan.chan_id; sirfsoc_dma_execute()
227 schan->chan.completed_cookie = last_cookie; sirfsoc_dma_process_completed()
265 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(txd->chan); sirfsoc_dma_tx_submit()
284 static int sirfsoc_dma_slave_config(struct dma_chan *chan, sirfsoc_dma_slave_config() argument
287 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); sirfsoc_dma_slave_config()
301 static int sirfsoc_dma_terminate_all(struct dma_chan *chan) sirfsoc_dma_terminate_all() argument
303 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); sirfsoc_dma_terminate_all()
304 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); sirfsoc_dma_terminate_all()
305 int cid = schan->chan.chan_id; sirfsoc_dma_terminate_all()
332 static int sirfsoc_dma_pause_chan(struct dma_chan *chan) sirfsoc_dma_pause_chan() argument
334 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); sirfsoc_dma_pause_chan()
335 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); sirfsoc_dma_pause_chan()
336 int cid = schan->chan.chan_id; sirfsoc_dma_pause_chan()
354 static int sirfsoc_dma_resume_chan(struct dma_chan *chan) sirfsoc_dma_resume_chan() argument
356 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); sirfsoc_dma_resume_chan()
357 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); sirfsoc_dma_resume_chan()
358 int cid = schan->chan.chan_id; sirfsoc_dma_resume_chan()
377 static int sirfsoc_dma_alloc_chan_resources(struct dma_chan *chan) sirfsoc_dma_alloc_chan_resources() argument
379 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan); sirfsoc_dma_alloc_chan_resources()
380 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); sirfsoc_dma_alloc_chan_resources()
397 dma_async_tx_descriptor_init(&sdesc->desc, chan); sirfsoc_dma_alloc_chan_resources()
417 static void sirfsoc_dma_free_chan_resources(struct dma_chan *chan) sirfsoc_dma_free_chan_resources() argument
419 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); sirfsoc_dma_free_chan_resources()
420 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan); sirfsoc_dma_free_chan_resources()
446 static void sirfsoc_dma_issue_pending(struct dma_chan *chan) sirfsoc_dma_issue_pending() argument
448 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); sirfsoc_dma_issue_pending()
461 sirfsoc_dma_tx_status(struct dma_chan *chan, dma_cookie_t cookie, sirfsoc_dma_tx_status() argument
464 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan); sirfsoc_dma_tx_status()
465 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); sirfsoc_dma_tx_status()
469 int cid = schan->chan.chan_id; sirfsoc_dma_tx_status()
481 ret = dma_cookie_status(chan, cookie, txstate); sirfsoc_dma_tx_status()
493 struct dma_chan *chan, struct dma_interleaved_template *xt, sirfsoc_dma_prep_interleaved()
496 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan); sirfsoc_dma_prep_interleaved()
497 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); sirfsoc_dma_prep_interleaved()
561 sirfsoc_dma_prep_cyclic(struct dma_chan *chan, dma_addr_t addr, sirfsoc_dma_prep_cyclic() argument
565 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); sirfsoc_dma_prep_cyclic()
612 bool sirfsoc_dma_filter_id(struct dma_chan *chan, void *chan_id) sirfsoc_dma_filter_id() argument
616 if (ch_nr == chan->chan_id + sirfsoc_dma_filter_id()
617 chan->device->dev_id * SIRFSOC_DMA_CHANNELS) sirfsoc_dma_filter_id()
640 return dma_get_slave_channel(&sdma->channels[request].chan); of_dma_sirfsoc_xlate()
730 schan->chan.device = dma; sirfsoc_dma_probe()
731 dma_cookie_init(&schan->chan); sirfsoc_dma_probe()
740 list_add_tail(&schan->chan.device_node, &dma->channels); sirfsoc_dma_probe()
492 sirfsoc_dma_prep_interleaved( struct dma_chan *chan, struct dma_interleaved_template *xt, unsigned long flags) sirfsoc_dma_prep_interleaved() argument
H A Dbcm2835-dma.c134 return container_of(c, struct bcm2835_chan, vc.chan); to_bcm2835_dma_chan()
146 dma_free_coherent(desc->vd.tx.chan->device->dev, bcm2835_dma_desc_free()
232 static int bcm2835_dma_alloc_chan_resources(struct dma_chan *chan) bcm2835_dma_alloc_chan_resources() argument
234 struct bcm2835_chan *c = to_bcm2835_dma_chan(chan); bcm2835_dma_alloc_chan_resources()
236 dev_dbg(c->vc.chan.device->dev, bcm2835_dma_alloc_chan_resources()
243 static void bcm2835_dma_free_chan_resources(struct dma_chan *chan) bcm2835_dma_free_chan_resources() argument
245 struct bcm2835_chan *c = to_bcm2835_dma_chan(chan); bcm2835_dma_free_chan_resources()
250 dev_dbg(c->vc.chan.device->dev, "Freeing DMA channel %u\n", c->ch); bcm2835_dma_free_chan_resources()
283 static enum dma_status bcm2835_dma_tx_status(struct dma_chan *chan, bcm2835_dma_tx_status() argument
286 struct bcm2835_chan *c = to_bcm2835_dma_chan(chan); bcm2835_dma_tx_status()
291 ret = dma_cookie_status(chan, cookie, txstate); bcm2835_dma_tx_status()
321 static void bcm2835_dma_issue_pending(struct dma_chan *chan) bcm2835_dma_issue_pending() argument
323 struct bcm2835_chan *c = to_bcm2835_dma_chan(chan); bcm2835_dma_issue_pending()
336 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, bcm2835_dma_prep_dma_cyclic()
340 struct bcm2835_chan *c = to_bcm2835_dma_chan(chan); bcm2835_dma_prep_dma_cyclic()
349 dev_err(chan->device->dev, "%s: bad direction?\n", __func__); bcm2835_dma_prep_dma_cyclic()
382 d->control_block_base = dma_zalloc_coherent(chan->device->dev, bcm2835_dma_prep_dma_cyclic()
439 static int bcm2835_dma_slave_config(struct dma_chan *chan, bcm2835_dma_slave_config() argument
442 struct bcm2835_chan *c = to_bcm2835_dma_chan(chan); bcm2835_dma_slave_config()
457 static int bcm2835_dma_terminate_all(struct dma_chan *chan) bcm2835_dma_terminate_all() argument
459 struct bcm2835_chan *c = to_bcm2835_dma_chan(chan); bcm2835_dma_terminate_all()
460 struct bcm2835_dmadev *d = to_bcm2835_dma_dev(c->vc.chan.device); bcm2835_dma_terminate_all()
526 vc.chan.device_node) { bcm2835_dma_free()
527 list_del(&c->vc.chan.device_node); bcm2835_dma_free()
542 struct dma_chan *chan; bcm2835_dma_xlate() local
544 chan = dma_get_any_slave_channel(&d->ddev); bcm2835_dma_xlate()
545 if (!chan) bcm2835_dma_xlate()
549 to_bcm2835_dma_chan(chan)->dreq = spec->args[0]; bcm2835_dma_xlate()
551 return chan; bcm2835_dma_xlate()
335 bcm2835_dma_prep_dma_cyclic( struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, size_t period_len, enum dma_transfer_direction direction, unsigned long flags) bcm2835_dma_prep_dma_cyclic() argument
H A Dat_hdmac.c63 static void atc_issue_pending(struct dma_chan *chan);
97 * @chan: the channel to allocate descriptors for
105 static struct at_desc *atc_alloc_descriptor(struct dma_chan *chan, atc_alloc_descriptor() argument
109 struct at_dma *atdma = to_at_dma(chan->device); atc_alloc_descriptor()
116 dma_async_tx_descriptor_init(&desc->txd, chan); atc_alloc_descriptor()
310 * @chan: DMA channel
313 static int atc_get_bytes_left(struct dma_chan *chan, dma_cookie_t cookie) atc_get_bytes_left() argument
315 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_get_bytes_left()
581 atchan = &atdma->chan[i]; at_dma_interrupt()
614 struct at_dma_chan *atchan = to_at_dma_chan(tx->chan); atc_tx_submit()
622 dev_vdbg(chan2dev(tx->chan), "tx_submit: started %u\n", atc_tx_submit()
627 dev_vdbg(chan2dev(tx->chan), "tx_submit: queued %u\n", atc_tx_submit()
639 * @chan: the channel to prepare operation on
646 atc_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src, atc_prep_dma_memcpy() argument
649 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_prep_dma_memcpy()
660 dev_vdbg(chan2dev(chan), "prep_dma_memcpy: d0x%x s0x%x l0x%zx f0x%lx\n", atc_prep_dma_memcpy()
664 dev_dbg(chan2dev(chan), "prep_dma_memcpy: length is zero!\n"); atc_prep_dma_memcpy()
724 * @chan: DMA channel
732 atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, atc_prep_slave_sg() argument
736 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_prep_slave_sg()
737 struct at_dma_slave *atslave = chan->private; atc_prep_slave_sg()
750 dev_vdbg(chan2dev(chan), "prep_slave_sg (%d): %s f0x%lx\n", atc_prep_slave_sg()
756 dev_dbg(chan2dev(chan), "prep_slave_sg: sg length is zero!\n"); atc_prep_slave_sg()
785 dev_dbg(chan2dev(chan), for_each_sg()
826 dev_dbg(chan2dev(chan), for_each_sg()
867 dev_err(chan2dev(chan), "not enough descriptors available\n");
875 * @chan: the channel to prepare operation on
883 atc_prep_dma_sg(struct dma_chan *chan, atc_prep_dma_sg() argument
888 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_prep_dma_sg()
1036 atc_dma_cyclic_fill_desc(struct dma_chan *chan, struct at_desc *desc, atc_dma_cyclic_fill_desc() argument
1041 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_dma_cyclic_fill_desc()
1086 * @chan: the DMA channel to prepare
1094 atc_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, atc_prep_dma_cyclic() argument
1098 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_prep_dma_cyclic()
1099 struct at_dma_slave *atslave = chan->private; atc_prep_dma_cyclic()
1108 dev_vdbg(chan2dev(chan), "prep_dma_cyclic: %s buf@0x%08x - %d (%d/%d)\n", atc_prep_dma_cyclic()
1114 dev_dbg(chan2dev(chan), "prep_dma_cyclic: length is zero!\n"); atc_prep_dma_cyclic()
1120 dev_dbg(chan2dev(chan), "prep_dma_cyclic: channel in use!\n"); atc_prep_dma_cyclic()
1144 if (atc_dma_cyclic_fill_desc(chan, desc, i, buf_addr, atc_prep_dma_cyclic()
1162 dev_err(chan2dev(chan), "not enough descriptors available\n"); atc_prep_dma_cyclic()
1169 static int atc_config(struct dma_chan *chan, atc_config() argument
1172 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_config()
1174 dev_vdbg(chan2dev(chan), "%s\n", __func__); atc_config()
1176 /* Check if it is chan is configured for slave transfers */ atc_config()
1177 if (!chan->private) atc_config()
1188 static int atc_pause(struct dma_chan *chan) atc_pause() argument
1190 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_pause()
1191 struct at_dma *atdma = to_at_dma(chan->device); atc_pause()
1197 dev_vdbg(chan2dev(chan), "%s\n", __func__); atc_pause()
1209 static int atc_resume(struct dma_chan *chan) atc_resume() argument
1211 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_resume()
1212 struct at_dma *atdma = to_at_dma(chan->device); atc_resume()
1218 dev_vdbg(chan2dev(chan), "%s\n", __func__); atc_resume()
1233 static int atc_terminate_all(struct dma_chan *chan) atc_terminate_all() argument
1235 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_terminate_all()
1236 struct at_dma *atdma = to_at_dma(chan->device); atc_terminate_all()
1243 dev_vdbg(chan2dev(chan), "%s\n", __func__); atc_terminate_all()
1279 * @chan: DMA channel
1288 atc_tx_status(struct dma_chan *chan, atc_tx_status() argument
1292 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_tx_status()
1297 ret = dma_cookie_status(chan, cookie, txstate); atc_tx_status()
1310 bytes = atc_get_bytes_left(chan, cookie); atc_tx_status()
1315 dev_vdbg(chan2dev(chan), "get residual bytes error\n"); atc_tx_status()
1321 dev_vdbg(chan2dev(chan), "tx_status %d: cookie = %d residue = %d\n", atc_tx_status()
1329 * @chan: target DMA channel
1331 static void atc_issue_pending(struct dma_chan *chan) atc_issue_pending() argument
1333 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_issue_pending()
1336 dev_vdbg(chan2dev(chan), "issue_pending\n"); atc_issue_pending()
1349 * @chan: allocate descriptor resources for this channel
1354 static int atc_alloc_chan_resources(struct dma_chan *chan) atc_alloc_chan_resources() argument
1356 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_alloc_chan_resources()
1357 struct at_dma *atdma = to_at_dma(chan->device); atc_alloc_chan_resources()
1365 dev_vdbg(chan2dev(chan), "alloc_chan_resources\n"); atc_alloc_chan_resources()
1369 dev_dbg(chan2dev(chan), "DMA channel not idle ?\n"); atc_alloc_chan_resources()
1375 atslave = chan->private; atc_alloc_chan_resources()
1395 desc = atc_alloc_descriptor(chan, GFP_KERNEL); atc_alloc_chan_resources()
1407 dma_cookie_init(chan); atc_alloc_chan_resources()
1413 dev_dbg(chan2dev(chan), atc_alloc_chan_resources()
1422 * @chan: DMA channel
1424 static void atc_free_chan_resources(struct dma_chan *chan) atc_free_chan_resources() argument
1426 struct at_dma_chan *atchan = to_at_dma_chan(chan); atc_free_chan_resources()
1427 struct at_dma *atdma = to_at_dma(chan->device); atc_free_chan_resources()
1431 dev_dbg(chan2dev(chan), "free_chan_resources: (descs allocated=%u)\n", atc_free_chan_resources()
1440 dev_vdbg(chan2dev(chan), " freeing descriptor %p\n", desc); atc_free_chan_resources()
1449 dev_vdbg(chan2dev(chan), "free_chan_resources: done\n"); atc_free_chan_resources()
1453 static bool at_dma_filter(struct dma_chan *chan, void *slave) at_dma_filter() argument
1457 if (atslave->dma_dev == chan->device->dev) { at_dma_filter()
1458 chan->private = atslave; at_dma_filter()
1468 struct dma_chan *chan; at_dma_xlate() local
1513 chan = dma_request_channel(mask, at_dma_filter, atslave); at_dma_xlate()
1514 if (!chan) at_dma_xlate()
1517 atchan = to_at_dma_chan(chan); at_dma_xlate()
1521 return chan; at_dma_xlate()
1686 struct at_dma_chan *atchan = &atdma->chan[i]; at_dma_probe()
1785 struct dma_chan *chan, *_chan; at_dma_remove() local
1794 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, at_dma_remove()
1796 struct at_dma_chan *atchan = to_at_dma_chan(chan); at_dma_remove()
1799 atc_disable_chan_irq(atdma, chan->chan_id); at_dma_remove()
1802 list_del(&chan->device_node); at_dma_remove()
1831 struct dma_chan *chan, *_chan; at_dma_prepare() local
1833 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, at_dma_prepare()
1835 struct at_dma_chan *atchan = to_at_dma_chan(chan); at_dma_prepare()
1845 struct dma_chan *chan = &atchan->chan_common; atc_suspend_cyclic() local
1850 dev_warn(chan2dev(chan), atc_suspend_cyclic()
1852 atc_pause(chan); atc_suspend_cyclic()
1866 struct dma_chan *chan, *_chan; at_dma_suspend_noirq() local
1869 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, at_dma_suspend_noirq()
1871 struct at_dma_chan *atchan = to_at_dma_chan(chan); at_dma_suspend_noirq()
1908 struct dma_chan *chan, *_chan; at_dma_resume_noirq() local
1920 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, at_dma_resume_noirq()
1922 struct at_dma_chan *atchan = to_at_dma_chan(chan); at_dma_resume_noirq()
/linux-4.1.27/drivers/mailbox/
H A Dmailbox.c31 static int add_to_rbuf(struct mbox_chan *chan, void *mssg) add_to_rbuf() argument
36 spin_lock_irqsave(&chan->lock, flags); add_to_rbuf()
39 if (chan->msg_count == MBOX_TX_QUEUE_LEN) { add_to_rbuf()
40 spin_unlock_irqrestore(&chan->lock, flags); add_to_rbuf()
44 idx = chan->msg_free; add_to_rbuf()
45 chan->msg_data[idx] = mssg; add_to_rbuf()
46 chan->msg_count++; add_to_rbuf()
49 chan->msg_free = 0; add_to_rbuf()
51 chan->msg_free++; add_to_rbuf()
53 spin_unlock_irqrestore(&chan->lock, flags); add_to_rbuf()
58 static void msg_submit(struct mbox_chan *chan) msg_submit() argument
65 spin_lock_irqsave(&chan->lock, flags); msg_submit()
67 if (!chan->msg_count || chan->active_req) msg_submit()
70 count = chan->msg_count; msg_submit()
71 idx = chan->msg_free; msg_submit()
77 data = chan->msg_data[idx]; msg_submit()
79 if (chan->cl->tx_prepare) msg_submit()
80 chan->cl->tx_prepare(chan->cl, data); msg_submit()
82 err = chan->mbox->ops->send_data(chan, data); msg_submit()
84 chan->active_req = data; msg_submit()
85 chan->msg_count--; msg_submit()
88 spin_unlock_irqrestore(&chan->lock, flags); msg_submit()
90 if (!err && (chan->txdone_method & TXDONE_BY_POLL)) msg_submit()
91 poll_txdone((unsigned long)chan->mbox); msg_submit()
94 static void tx_tick(struct mbox_chan *chan, int r) tx_tick() argument
99 spin_lock_irqsave(&chan->lock, flags); tx_tick()
100 mssg = chan->active_req; tx_tick()
101 chan->active_req = NULL; tx_tick()
102 spin_unlock_irqrestore(&chan->lock, flags); tx_tick()
105 msg_submit(chan); tx_tick()
108 if (mssg && chan->cl->tx_done) tx_tick()
109 chan->cl->tx_done(chan->cl, mssg, r); tx_tick()
111 if (chan->cl->tx_block) tx_tick()
112 complete(&chan->tx_complete); tx_tick()
122 struct mbox_chan *chan = &mbox->chans[i]; poll_txdone() local
124 if (chan->active_req && chan->cl) { poll_txdone()
125 txdone = chan->mbox->ops->last_tx_done(chan); poll_txdone()
127 tx_tick(chan, 0); poll_txdone()
141 * @chan: Pointer to the mailbox channel on which RX happened.
144 * After startup and before shutdown any data received on the chan
148 void mbox_chan_received_data(struct mbox_chan *chan, void *mssg) mbox_chan_received_data() argument
151 if (chan->cl->rx_callback) mbox_chan_received_data()
152 chan->cl->rx_callback(chan->cl, mssg); mbox_chan_received_data()
159 * @chan: Pointer to the mailbox chan on which TX happened.
166 void mbox_chan_txdone(struct mbox_chan *chan, int r) mbox_chan_txdone() argument
168 if (unlikely(!(chan->txdone_method & TXDONE_BY_IRQ))) { mbox_chan_txdone()
169 dev_err(chan->mbox->dev, mbox_chan_txdone()
174 tx_tick(chan, r); mbox_chan_txdone()
180 * @chan: Mailbox channel assigned to this client.
187 void mbox_client_txdone(struct mbox_chan *chan, int r) mbox_client_txdone() argument
189 if (unlikely(!(chan->txdone_method & TXDONE_BY_ACK))) { mbox_client_txdone()
190 dev_err(chan->mbox->dev, "Client can't run the TX ticker\n"); mbox_client_txdone()
194 tx_tick(chan, r); mbox_client_txdone()
201 * @chan: Mailbox channel assigned to this client.
213 bool mbox_client_peek_data(struct mbox_chan *chan) mbox_client_peek_data() argument
215 if (chan->mbox->ops->peek_data) mbox_client_peek_data()
216 return chan->mbox->ops->peek_data(chan); mbox_client_peek_data()
225 * @chan: Mailbox channel assigned to this client.
238 * over the chan, i.e, tx_done() is made.
243 * or transmission over chan (blocking mode).
246 int mbox_send_message(struct mbox_chan *chan, void *mssg) mbox_send_message() argument
250 if (!chan || !chan->cl) mbox_send_message()
253 t = add_to_rbuf(chan, mssg); mbox_send_message()
255 dev_err(chan->mbox->dev, "Try increasing MBOX_TX_QUEUE_LEN\n"); mbox_send_message()
259 msg_submit(chan); mbox_send_message()
261 if (chan->cl->tx_block && chan->active_req) { mbox_send_message()
265 if (!chan->cl->tx_tout) /* wait forever */ mbox_send_message()
268 wait = msecs_to_jiffies(chan->cl->tx_tout); mbox_send_message()
270 ret = wait_for_completion_timeout(&chan->tx_complete, wait); mbox_send_message()
273 tx_tick(chan, -EIO); mbox_send_message()
303 struct mbox_chan *chan; mbox_request_channel() local
321 chan = NULL; mbox_request_channel()
324 chan = mbox->of_xlate(mbox, &spec); mbox_request_channel()
330 if (!chan || chan->cl || !try_module_get(mbox->dev->driver->owner)) { mbox_request_channel()
336 spin_lock_irqsave(&chan->lock, flags); mbox_request_channel()
337 chan->msg_free = 0; mbox_request_channel()
338 chan->msg_count = 0; mbox_request_channel()
339 chan->active_req = NULL; mbox_request_channel()
340 chan->cl = cl; mbox_request_channel()
341 init_completion(&chan->tx_complete); mbox_request_channel()
343 if (chan->txdone_method == TXDONE_BY_POLL && cl->knows_txdone) mbox_request_channel()
344 chan->txdone_method |= TXDONE_BY_ACK; mbox_request_channel()
346 spin_unlock_irqrestore(&chan->lock, flags); mbox_request_channel()
348 ret = chan->mbox->ops->startup(chan); mbox_request_channel()
350 dev_err(dev, "Unable to startup the chan (%d)\n", ret); mbox_request_channel()
351 mbox_free_channel(chan); mbox_request_channel()
352 chan = ERR_PTR(ret); mbox_request_channel()
356 return chan; mbox_request_channel()
363 * @chan: The mailbox channel to be freed.
365 void mbox_free_channel(struct mbox_chan *chan) mbox_free_channel() argument
369 if (!chan || !chan->cl) mbox_free_channel()
372 chan->mbox->ops->shutdown(chan); mbox_free_channel()
375 spin_lock_irqsave(&chan->lock, flags); mbox_free_channel()
376 chan->cl = NULL; mbox_free_channel()
377 chan->active_req = NULL; mbox_free_channel()
378 if (chan->txdone_method == (TXDONE_BY_POLL | TXDONE_BY_ACK)) mbox_free_channel()
379 chan->txdone_method = TXDONE_BY_POLL; mbox_free_channel()
381 module_put(chan->mbox->dev->driver->owner); mbox_free_channel()
382 spin_unlock_irqrestore(&chan->lock, flags); mbox_free_channel()
426 struct mbox_chan *chan = &mbox->chans[i]; mbox_controller_register() local
428 chan->cl = NULL; mbox_controller_register()
429 chan->mbox = mbox; mbox_controller_register()
430 chan->txdone_method = txdone; mbox_controller_register()
431 spin_lock_init(&chan->lock); mbox_controller_register()
H A Dmailbox-altera.c62 static struct altera_mbox *mbox_chan_to_altera_mbox(struct mbox_chan *chan) mbox_chan_to_altera_mbox() argument
64 if (!chan || !chan->con_priv) mbox_chan_to_altera_mbox()
67 return (struct altera_mbox *)chan->con_priv; mbox_chan_to_altera_mbox()
127 static void altera_mbox_rx_data(struct mbox_chan *chan) altera_mbox_rx_data() argument
129 struct altera_mbox *mbox = mbox_chan_to_altera_mbox(chan); altera_mbox_rx_data()
137 mbox_chan_received_data(chan, (void *)data); altera_mbox_rx_data()
143 struct mbox_chan *chan = (struct mbox_chan *)data; altera_mbox_poll_rx() local
144 struct altera_mbox *mbox = mbox_chan_to_altera_mbox(chan); altera_mbox_poll_rx()
146 altera_mbox_rx_data(chan); altera_mbox_poll_rx()
154 struct mbox_chan *chan = (struct mbox_chan *)p; altera_mbox_tx_interrupt() local
155 struct altera_mbox *mbox = mbox_chan_to_altera_mbox(chan); altera_mbox_tx_interrupt()
158 mbox_chan_txdone(chan, 0); altera_mbox_tx_interrupt()
165 struct mbox_chan *chan = (struct mbox_chan *)p; altera_mbox_rx_interrupt() local
167 altera_mbox_rx_data(chan); altera_mbox_rx_interrupt()
171 static int altera_mbox_startup_sender(struct mbox_chan *chan) altera_mbox_startup_sender() argument
174 struct altera_mbox *mbox = mbox_chan_to_altera_mbox(chan); altera_mbox_startup_sender()
178 DRIVER_NAME, chan); altera_mbox_startup_sender()
190 static int altera_mbox_startup_receiver(struct mbox_chan *chan) altera_mbox_startup_receiver() argument
193 struct altera_mbox *mbox = mbox_chan_to_altera_mbox(chan); altera_mbox_startup_receiver()
197 DRIVER_NAME, chan); altera_mbox_startup_receiver()
210 (unsigned long)chan); altera_mbox_startup_receiver()
217 static int altera_mbox_send_data(struct mbox_chan *chan, void *data) altera_mbox_send_data() argument
219 struct altera_mbox *mbox = mbox_chan_to_altera_mbox(chan); altera_mbox_send_data()
244 static bool altera_mbox_last_tx_done(struct mbox_chan *chan) altera_mbox_last_tx_done() argument
246 struct altera_mbox *mbox = mbox_chan_to_altera_mbox(chan); altera_mbox_last_tx_done()
252 static bool altera_mbox_peek_data(struct mbox_chan *chan) altera_mbox_peek_data() argument
254 struct altera_mbox *mbox = mbox_chan_to_altera_mbox(chan); altera_mbox_peek_data()
259 static int altera_mbox_startup(struct mbox_chan *chan) altera_mbox_startup() argument
261 struct altera_mbox *mbox = mbox_chan_to_altera_mbox(chan); altera_mbox_startup()
268 ret = altera_mbox_startup_sender(chan); altera_mbox_startup()
270 ret = altera_mbox_startup_receiver(chan); altera_mbox_startup()
275 static void altera_mbox_shutdown(struct mbox_chan *chan) altera_mbox_shutdown() argument
277 struct altera_mbox *mbox = mbox_chan_to_altera_mbox(chan); altera_mbox_shutdown()
282 free_irq(mbox->irq, chan); altera_mbox_shutdown()
H A Darm_mhu.c47 struct mbox_chan chan[MHU_CHANS]; member in struct:arm_mhu
53 struct mbox_chan *chan = p; mhu_rx_interrupt() local
54 struct mhu_link *mlink = chan->con_priv; mhu_rx_interrupt()
61 mbox_chan_received_data(chan, (void *)&val); mhu_rx_interrupt()
68 static bool mhu_last_tx_done(struct mbox_chan *chan) mhu_last_tx_done() argument
70 struct mhu_link *mlink = chan->con_priv; mhu_last_tx_done()
76 static int mhu_send_data(struct mbox_chan *chan, void *data) mhu_send_data() argument
78 struct mhu_link *mlink = chan->con_priv; mhu_send_data()
86 static int mhu_startup(struct mbox_chan *chan) mhu_startup() argument
88 struct mhu_link *mlink = chan->con_priv; mhu_startup()
96 IRQF_SHARED, "mhu_link", chan); mhu_startup()
98 dev_err(chan->mbox->dev, mhu_startup()
106 static void mhu_shutdown(struct mbox_chan *chan) mhu_shutdown() argument
108 struct mhu_link *mlink = chan->con_priv; mhu_shutdown()
110 free_irq(mlink->irq, chan); mhu_shutdown()
139 mhu->chan[i].con_priv = &mhu->mlink[i]; mhu_probe()
146 mhu->mbox.chans = &mhu->chan[0]; mhu_probe()
H A Dpcc.c113 struct mbox_chan *chan; pcc_mbox_request_channel() local
123 chan = get_pcc_channel(subspace_id); pcc_mbox_request_channel()
125 if (!chan || chan->cl) { pcc_mbox_request_channel()
130 spin_lock_irqsave(&chan->lock, flags); pcc_mbox_request_channel()
131 chan->msg_free = 0; pcc_mbox_request_channel()
132 chan->msg_count = 0; pcc_mbox_request_channel()
133 chan->active_req = NULL; pcc_mbox_request_channel()
134 chan->cl = cl; pcc_mbox_request_channel()
135 init_completion(&chan->tx_complete); pcc_mbox_request_channel()
137 if (chan->txdone_method == TXDONE_BY_POLL && cl->knows_txdone) pcc_mbox_request_channel()
138 chan->txdone_method |= TXDONE_BY_ACK; pcc_mbox_request_channel()
140 spin_unlock_irqrestore(&chan->lock, flags); pcc_mbox_request_channel()
142 return chan; pcc_mbox_request_channel()
149 * @chan: Pointer to the mailbox channel as returned by
152 void pcc_mbox_free_channel(struct mbox_chan *chan) pcc_mbox_free_channel() argument
156 if (!chan || !chan->cl) pcc_mbox_free_channel()
159 spin_lock_irqsave(&chan->lock, flags); pcc_mbox_free_channel()
160 chan->cl = NULL; pcc_mbox_free_channel()
161 chan->active_req = NULL; pcc_mbox_free_channel()
162 if (chan->txdone_method == (TXDONE_BY_POLL | TXDONE_BY_ACK)) pcc_mbox_free_channel()
163 chan->txdone_method = TXDONE_BY_POLL; pcc_mbox_free_channel()
165 spin_unlock_irqrestore(&chan->lock, flags); pcc_mbox_free_channel()
175 * @chan: Pointer to Mailbox channel over which to send data.
181 static int pcc_send_data(struct mbox_chan *chan, void *data) pcc_send_data() argument
183 struct acpi_pcct_hw_reduced *pcct_ss = chan->con_priv; pcc_send_data()
H A Domap-mailbox.c121 struct mbox_chan *chan; member in struct:omap_mbox
132 static struct omap_mbox *mbox_chan_to_omap_mbox(struct mbox_chan *chan) mbox_chan_to_omap_mbox() argument
134 if (!chan || !chan->con_priv) mbox_chan_to_omap_mbox()
137 return (struct omap_mbox *)chan->con_priv; mbox_chan_to_omap_mbox()
205 void omap_mbox_save_ctx(struct mbox_chan *chan) omap_mbox_save_ctx() argument
209 struct omap_mbox *mbox = mbox_chan_to_omap_mbox(chan); omap_mbox_save_ctx()
227 void omap_mbox_restore_ctx(struct mbox_chan *chan) omap_mbox_restore_ctx() argument
231 struct omap_mbox *mbox = mbox_chan_to_omap_mbox(chan); omap_mbox_restore_ctx()
278 void omap_mbox_enable_irq(struct mbox_chan *chan, omap_mbox_irq_t irq) omap_mbox_enable_irq() argument
280 struct omap_mbox *mbox = mbox_chan_to_omap_mbox(chan); omap_mbox_enable_irq()
289 void omap_mbox_disable_irq(struct mbox_chan *chan, omap_mbox_irq_t irq) omap_mbox_disable_irq() argument
291 struct omap_mbox *mbox = mbox_chan_to_omap_mbox(chan); omap_mbox_disable_irq()
314 mbox_chan_received_data(mq->mbox->chan, (void *)msg); mbox_rx_work()
331 mbox_chan_txdone(mbox->chan, 0); __mbox_tx_interrupt()
463 struct mbox_chan *chan; omap_mbox_request_channel() local
484 if (!mbox || !mbox->chan) omap_mbox_request_channel()
487 chan = mbox->chan; omap_mbox_request_channel()
488 spin_lock_irqsave(&chan->lock, flags); omap_mbox_request_channel()
489 chan->msg_free = 0; omap_mbox_request_channel()
490 chan->msg_count = 0; omap_mbox_request_channel()
491 chan->active_req = NULL; omap_mbox_request_channel()
492 chan->cl = cl; omap_mbox_request_channel()
493 init_completion(&chan->tx_complete); omap_mbox_request_channel()
494 spin_unlock_irqrestore(&chan->lock, flags); omap_mbox_request_channel()
496 ret = chan->mbox->ops->startup(chan); omap_mbox_request_channel()
498 pr_err("Unable to startup the chan (%d)\n", ret); omap_mbox_request_channel()
499 mbox_free_channel(chan); omap_mbox_request_channel()
500 chan = ERR_PTR(ret); omap_mbox_request_channel()
503 return chan; omap_mbox_request_channel()
563 static int omap_mbox_chan_startup(struct mbox_chan *chan) omap_mbox_chan_startup() argument
565 struct omap_mbox *mbox = mbox_chan_to_omap_mbox(chan); omap_mbox_chan_startup()
578 static void omap_mbox_chan_shutdown(struct mbox_chan *chan) omap_mbox_chan_shutdown() argument
580 struct omap_mbox *mbox = mbox_chan_to_omap_mbox(chan); omap_mbox_chan_shutdown()
589 static int omap_mbox_chan_send_data(struct mbox_chan *chan, void *data) omap_mbox_chan_send_data() argument
591 struct omap_mbox *mbox = mbox_chan_to_omap_mbox(chan); omap_mbox_chan_send_data()
653 return mbox ? mbox->chan : NULL; omap_mbox_of_xlate()
801 mbox->chan = &chnls[i]; omap_mbox_probe()
/linux-4.1.27/include/linux/
H A Dsirfsoc_dma.h4 bool sirfsoc_dma_filter_id(struct dma_chan *chan, void *chan_id);
H A Domap-mailbox.h24 void omap_mbox_save_ctx(struct mbox_chan *chan);
25 void omap_mbox_restore_ctx(struct mbox_chan *chan);
26 void omap_mbox_enable_irq(struct mbox_chan *chan, omap_mbox_irq_t irq);
27 void omap_mbox_disable_irq(struct mbox_chan *chan, omap_mbox_irq_t irq);
H A Ddmaengine.h231 * @device_node: used to add this to the device chan list
255 * @chan: driver channel device
261 struct dma_chan *chan; member in struct:dma_chan_dev
387 static inline const char *dma_chan_name(struct dma_chan *chan) dma_chan_name() argument
389 return dev_name(&chan->dev->device); dma_chan_name()
396 * @chan: channel to be reviewed
405 typedef bool (*dma_filter_fn)(struct dma_chan *chan, void *filter_param);
428 * @chan: target channel for this operation
442 struct dma_chan *chan; member in struct:dma_async_tx_descriptor
633 int (*device_alloc_chan_resources)(struct dma_chan *chan);
634 void (*device_free_chan_resources)(struct dma_chan *chan);
637 struct dma_chan *chan, dma_addr_t dst, dma_addr_t src,
640 struct dma_chan *chan, dma_addr_t dst, dma_addr_t *src,
643 struct dma_chan *chan, dma_addr_t *src, unsigned int src_cnt,
646 struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src,
650 struct dma_chan *chan, dma_addr_t *pq, dma_addr_t *src,
654 struct dma_chan *chan, unsigned long flags);
656 struct dma_chan *chan,
662 struct dma_chan *chan, struct scatterlist *sgl,
666 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len,
670 struct dma_chan *chan, struct dma_interleaved_template *xt,
673 int (*device_config)(struct dma_chan *chan,
675 int (*device_pause)(struct dma_chan *chan);
676 int (*device_resume)(struct dma_chan *chan);
677 int (*device_terminate_all)(struct dma_chan *chan);
679 enum dma_status (*device_tx_status)(struct dma_chan *chan,
682 void (*device_issue_pending)(struct dma_chan *chan);
685 static inline int dmaengine_slave_config(struct dma_chan *chan, dmaengine_slave_config() argument
688 if (chan->device->device_config) dmaengine_slave_config()
689 return chan->device->device_config(chan, config); dmaengine_slave_config()
700 struct dma_chan *chan, dma_addr_t buf, size_t len, dmaengine_prep_slave_single()
708 return chan->device->device_prep_slave_sg(chan, &sg, 1, dmaengine_prep_slave_single()
713 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, dmaengine_prep_slave_sg()
716 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, dmaengine_prep_slave_sg()
723 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, dmaengine_prep_rio_sg()
727 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, dmaengine_prep_rio_sg()
733 struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, dmaengine_prep_dma_cyclic()
737 return chan->device->device_prep_dma_cyclic(chan, buf_addr, buf_len, dmaengine_prep_dma_cyclic()
742 struct dma_chan *chan, struct dma_interleaved_template *xt, dmaengine_prep_interleaved_dma()
745 return chan->device->device_prep_interleaved_dma(chan, xt, flags); dmaengine_prep_interleaved_dma()
749 struct dma_chan *chan, dmaengine_prep_dma_sg()
754 return chan->device->device_prep_dma_sg(chan, dst_sg, dst_nents, dmaengine_prep_dma_sg()
758 static inline int dmaengine_terminate_all(struct dma_chan *chan) dmaengine_terminate_all() argument
760 if (chan->device->device_terminate_all) dmaengine_terminate_all()
761 return chan->device->device_terminate_all(chan); dmaengine_terminate_all()
766 static inline int dmaengine_pause(struct dma_chan *chan) dmaengine_pause() argument
768 if (chan->device->device_pause) dmaengine_pause()
769 return chan->device->device_pause(chan); dmaengine_pause()
774 static inline int dmaengine_resume(struct dma_chan *chan) dmaengine_resume() argument
776 if (chan->device->device_resume) dmaengine_resume()
777 return chan->device->device_resume(chan); dmaengine_resume()
782 static inline enum dma_status dmaengine_tx_status(struct dma_chan *chan, dmaengine_tx_status() argument
785 return chan->device->device_tx_status(chan, cookie, state); dmaengine_tx_status()
913 struct dma_chan *chan);
962 * @chan: target DMA channel
967 static inline void dma_async_issue_pending(struct dma_chan *chan) dma_async_issue_pending() argument
969 chan->device->device_issue_pending(chan); dma_async_issue_pending()
974 * @chan: DMA channel
983 static inline enum dma_status dma_async_is_tx_complete(struct dma_chan *chan, dma_async_is_tx_complete() argument
989 status = chan->device->device_tx_status(chan, cookie, &state); dma_async_is_tx_complete()
998 * dma_async_is_complete - test a cookie against chan state
1031 enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie);
1039 void dma_release_channel(struct dma_chan *chan);
1040 int dma_get_slave_caps(struct dma_chan *chan, struct dma_slave_caps *caps);
1046 static inline enum dma_status dma_sync_wait(struct dma_chan *chan, dma_cookie_t cookie) dma_sync_wait() argument
1072 static inline void dma_release_channel(struct dma_chan *chan) dma_release_channel() argument
1075 static inline int dma_get_slave_caps(struct dma_chan *chan, dma_get_slave_caps() argument
1087 struct dma_chan *dma_get_slave_channel(struct dma_chan *chan);
1098 struct dma_chan *chan; __dma_request_slave_channel_compat() local
1100 chan = dma_request_slave_channel(dev, name); __dma_request_slave_channel_compat()
1101 if (chan) __dma_request_slave_channel_compat()
1102 return chan; __dma_request_slave_channel_compat()
699 dmaengine_prep_slave_single( struct dma_chan *chan, dma_addr_t buf, size_t len, enum dma_transfer_direction dir, unsigned long flags) dmaengine_prep_slave_single() argument
712 dmaengine_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction dir, unsigned long flags) dmaengine_prep_slave_sg() argument
722 dmaengine_prep_rio_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction dir, unsigned long flags, struct rio_dma_ext *rio_ext) dmaengine_prep_rio_sg() argument
732 dmaengine_prep_dma_cyclic( struct dma_chan *chan, dma_addr_t buf_addr, size_t buf_len, size_t period_len, enum dma_transfer_direction dir, unsigned long flags) dmaengine_prep_dma_cyclic() argument
741 dmaengine_prep_interleaved_dma( struct dma_chan *chan, struct dma_interleaved_template *xt, unsigned long flags) dmaengine_prep_interleaved_dma() argument
748 dmaengine_prep_dma_sg( struct dma_chan *chan, struct scatterlist *dst_sg, unsigned int dst_nents, struct scatterlist *src_sg, unsigned int src_nents, unsigned long flags) dmaengine_prep_dma_sg() argument
H A Dmailbox_client.h44 int mbox_send_message(struct mbox_chan *chan, void *mssg);
45 void mbox_client_txdone(struct mbox_chan *chan, int r); /* atomic */
46 bool mbox_client_peek_data(struct mbox_chan *chan); /* atomic */
47 void mbox_free_channel(struct mbox_chan *chan); /* may sleep */
/linux-4.1.27/arch/mips/include/asm/mach-au1x00/
H A Dau1000_dma.h156 struct dma_chan *chan = get_dma_chan(dmanr); enable_dma_buffer0() local
158 if (!chan) enable_dma_buffer0()
160 __raw_writel(DMA_BE0, chan->io + DMA_MODE_SET); enable_dma_buffer0()
165 struct dma_chan *chan = get_dma_chan(dmanr); enable_dma_buffer1() local
167 if (!chan) enable_dma_buffer1()
169 __raw_writel(DMA_BE1, chan->io + DMA_MODE_SET); enable_dma_buffer1()
173 struct dma_chan *chan = get_dma_chan(dmanr); enable_dma_buffers() local
175 if (!chan) enable_dma_buffers()
177 __raw_writel(DMA_BE0 | DMA_BE1, chan->io + DMA_MODE_SET); enable_dma_buffers()
182 struct dma_chan *chan = get_dma_chan(dmanr); start_dma() local
184 if (!chan) start_dma()
186 __raw_writel(DMA_GO, chan->io + DMA_MODE_SET); start_dma()
193 struct dma_chan *chan = get_dma_chan(dmanr); halt_dma() local
196 if (!chan) halt_dma()
198 __raw_writel(DMA_GO, chan->io + DMA_MODE_CLEAR); halt_dma()
202 if (__raw_readl(chan->io + DMA_MODE_READ) & DMA_HALT) halt_dma()
210 struct dma_chan *chan = get_dma_chan(dmanr); disable_dma() local
212 if (!chan) disable_dma()
218 __raw_writel(~DMA_GO, chan->io + DMA_MODE_CLEAR); disable_dma()
223 struct dma_chan *chan = get_dma_chan(dmanr); dma_halted() local
225 if (!chan) dma_halted()
227 return (__raw_readl(chan->io + DMA_MODE_READ) & DMA_HALT) ? 1 : 0; dma_halted()
233 struct dma_chan *chan = get_dma_chan(dmanr); init_dma() local
236 if (!chan) init_dma()
242 __raw_writel(CPHYSADDR(chan->fifo_addr), chan->io + DMA_PERIPHERAL_ADDR); init_dma()
244 mode = chan->mode | (chan->dev_id << DMA_DID_BIT); init_dma()
245 if (chan->irq) init_dma()
248 __raw_writel(~mode, chan->io + DMA_MODE_CLEAR); init_dma()
249 __raw_writel(mode, chan->io + DMA_MODE_SET); init_dma()
257 struct dma_chan *chan = get_dma_chan(dmanr); set_dma_mode() local
259 if (!chan) set_dma_mode()
267 chan->mode &= ~(DMA_BE | DMA_DR | DMA_TS8 | DMA_DW_MASK | DMA_NC); set_dma_mode()
268 chan->mode |= mode; set_dma_mode()
273 struct dma_chan *chan = get_dma_chan(dmanr); get_dma_mode() local
275 if (!chan) get_dma_mode()
277 return chan->mode; get_dma_mode()
282 struct dma_chan *chan = get_dma_chan(dmanr); get_dma_active_buffer() local
284 if (!chan) get_dma_active_buffer()
286 return (__raw_readl(chan->io + DMA_MODE_READ) & DMA_AB) ? 1 : 0; get_dma_active_buffer()
296 struct dma_chan *chan = get_dma_chan(dmanr); set_dma_fifo_addr() local
298 if (!chan) set_dma_fifo_addr()
301 if (chan->mode & DMA_DS) /* second bank of device IDs */ set_dma_fifo_addr()
304 if (chan->dev_id != DMA_ID_GP04 && chan->dev_id != DMA_ID_GP05) set_dma_fifo_addr()
307 __raw_writel(CPHYSADDR(a), chan->io + DMA_PERIPHERAL_ADDR); set_dma_fifo_addr()
315 struct dma_chan *chan = get_dma_chan(dmanr); clear_dma_done0() local
317 if (!chan) clear_dma_done0()
319 __raw_writel(DMA_D0, chan->io + DMA_MODE_CLEAR); clear_dma_done0()
324 struct dma_chan *chan = get_dma_chan(dmanr); clear_dma_done1() local
326 if (!chan) clear_dma_done1()
328 __raw_writel(DMA_D1, chan->io + DMA_MODE_CLEAR); clear_dma_done1()
343 struct dma_chan *chan = get_dma_chan(dmanr); set_dma_addr0() local
345 if (!chan) set_dma_addr0()
347 __raw_writel(a, chan->io + DMA_BUFFER0_START); set_dma_addr0()
355 struct dma_chan *chan = get_dma_chan(dmanr); set_dma_addr1() local
357 if (!chan) set_dma_addr1()
359 __raw_writel(a, chan->io + DMA_BUFFER1_START); set_dma_addr1()
368 struct dma_chan *chan = get_dma_chan(dmanr); set_dma_count0() local
370 if (!chan) set_dma_count0()
373 __raw_writel(count, chan->io + DMA_BUFFER0_COUNT); set_dma_count0()
381 struct dma_chan *chan = get_dma_chan(dmanr); set_dma_count1() local
383 if (!chan) set_dma_count1()
386 __raw_writel(count, chan->io + DMA_BUFFER1_COUNT); set_dma_count1()
394 struct dma_chan *chan = get_dma_chan(dmanr); set_dma_count() local
396 if (!chan) set_dma_count()
399 __raw_writel(count, chan->io + DMA_BUFFER0_COUNT); set_dma_count()
400 __raw_writel(count, chan->io + DMA_BUFFER1_COUNT); set_dma_count()
409 struct dma_chan *chan = get_dma_chan(dmanr); get_dma_buffer_done() local
411 if (!chan) get_dma_buffer_done()
413 return __raw_readl(chan->io + DMA_MODE_READ) & (DMA_D0 | DMA_D1); get_dma_buffer_done()
422 struct dma_chan *chan = get_dma_chan(dmanr); get_dma_done_irq() local
424 if (!chan) get_dma_done_irq()
426 return chan->irq; get_dma_done_irq()
435 struct dma_chan *chan = get_dma_chan(dmanr); get_dma_residue() local
437 if (!chan) get_dma_residue()
440 curBufCntReg = (__raw_readl(chan->io + DMA_MODE_READ) & DMA_AB) ? get_dma_residue()
443 count = __raw_readl(chan->io + curBufCntReg) & DMA_COUNT_MASK; get_dma_residue()
445 if ((chan->mode & DMA_DW_MASK) == DMA_DW16) get_dma_residue()
447 else if ((chan->mode & DMA_DW_MASK) == DMA_DW32) get_dma_residue()
/linux-4.1.27/drivers/dma/xilinx/
H A Dxilinx_vdma.c239 * @chan: Driver specific VDMA channel
247 struct xilinx_vdma_chan *chan[XILINX_VDMA_MAX_CHANS_PER_DEVICE]; member in struct:xilinx_vdma_device
253 #define to_xilinx_chan(chan) \
254 container_of(chan, struct xilinx_vdma_chan, common)
259 static inline u32 vdma_read(struct xilinx_vdma_chan *chan, u32 reg) vdma_read() argument
261 return ioread32(chan->xdev->regs + reg); vdma_read()
264 static inline void vdma_write(struct xilinx_vdma_chan *chan, u32 reg, u32 value) vdma_write() argument
266 iowrite32(value, chan->xdev->regs + reg); vdma_write()
269 static inline void vdma_desc_write(struct xilinx_vdma_chan *chan, u32 reg, vdma_desc_write() argument
272 vdma_write(chan, chan->desc_offset + reg, value); vdma_desc_write()
275 static inline u32 vdma_ctrl_read(struct xilinx_vdma_chan *chan, u32 reg) vdma_ctrl_read() argument
277 return vdma_read(chan, chan->ctrl_offset + reg); vdma_ctrl_read()
280 static inline void vdma_ctrl_write(struct xilinx_vdma_chan *chan, u32 reg, vdma_ctrl_write() argument
283 vdma_write(chan, chan->ctrl_offset + reg, value); vdma_ctrl_write()
286 static inline void vdma_ctrl_clr(struct xilinx_vdma_chan *chan, u32 reg, vdma_ctrl_clr() argument
289 vdma_ctrl_write(chan, reg, vdma_ctrl_read(chan, reg) & ~clr); vdma_ctrl_clr()
292 static inline void vdma_ctrl_set(struct xilinx_vdma_chan *chan, u32 reg, vdma_ctrl_set() argument
295 vdma_ctrl_write(chan, reg, vdma_ctrl_read(chan, reg) | set); vdma_ctrl_set()
304 * @chan: Driver specific VDMA channel
309 xilinx_vdma_alloc_tx_segment(struct xilinx_vdma_chan *chan) xilinx_vdma_alloc_tx_segment() argument
314 segment = dma_pool_alloc(chan->desc_pool, GFP_ATOMIC, &phys); xilinx_vdma_alloc_tx_segment()
326 * @chan: Driver specific VDMA channel
329 static void xilinx_vdma_free_tx_segment(struct xilinx_vdma_chan *chan, xilinx_vdma_free_tx_segment() argument
332 dma_pool_free(chan->desc_pool, segment, segment->phys); xilinx_vdma_free_tx_segment()
337 * @chan: Driver specific VDMA channel
342 xilinx_vdma_alloc_tx_descriptor(struct xilinx_vdma_chan *chan) xilinx_vdma_alloc_tx_descriptor() argument
347 if (chan->allocated_desc) xilinx_vdma_alloc_tx_descriptor()
348 return chan->allocated_desc; xilinx_vdma_alloc_tx_descriptor()
354 spin_lock_irqsave(&chan->lock, flags); xilinx_vdma_alloc_tx_descriptor()
355 chan->allocated_desc = desc; xilinx_vdma_alloc_tx_descriptor()
356 spin_unlock_irqrestore(&chan->lock, flags); xilinx_vdma_alloc_tx_descriptor()
365 * @chan: Driver specific VDMA channel
369 xilinx_vdma_free_tx_descriptor(struct xilinx_vdma_chan *chan, xilinx_vdma_free_tx_descriptor() argument
379 xilinx_vdma_free_tx_segment(chan, segment); xilinx_vdma_free_tx_descriptor()
389 * @chan: Driver specific VDMA channel
392 static void xilinx_vdma_free_desc_list(struct xilinx_vdma_chan *chan, xilinx_vdma_free_desc_list() argument
399 xilinx_vdma_free_tx_descriptor(chan, desc); list_for_each_entry_safe()
405 * @chan: Driver specific VDMA channel
407 static void xilinx_vdma_free_descriptors(struct xilinx_vdma_chan *chan) xilinx_vdma_free_descriptors() argument
411 spin_lock_irqsave(&chan->lock, flags); xilinx_vdma_free_descriptors()
413 xilinx_vdma_free_desc_list(chan, &chan->pending_list); xilinx_vdma_free_descriptors()
414 xilinx_vdma_free_desc_list(chan, &chan->done_list); xilinx_vdma_free_descriptors()
416 xilinx_vdma_free_tx_descriptor(chan, chan->active_desc); xilinx_vdma_free_descriptors()
417 chan->active_desc = NULL; xilinx_vdma_free_descriptors()
419 spin_unlock_irqrestore(&chan->lock, flags); xilinx_vdma_free_descriptors()
428 struct xilinx_vdma_chan *chan = to_xilinx_chan(dchan); xilinx_vdma_free_chan_resources() local
430 dev_dbg(chan->dev, "Free all channel resources.\n"); xilinx_vdma_free_chan_resources()
432 xilinx_vdma_free_descriptors(chan); xilinx_vdma_free_chan_resources()
433 dma_pool_destroy(chan->desc_pool); xilinx_vdma_free_chan_resources()
434 chan->desc_pool = NULL; xilinx_vdma_free_chan_resources()
439 * @chan: Driver specific VDMA channel
441 static void xilinx_vdma_chan_desc_cleanup(struct xilinx_vdma_chan *chan) xilinx_vdma_chan_desc_cleanup() argument
446 spin_lock_irqsave(&chan->lock, flags); xilinx_vdma_chan_desc_cleanup()
448 list_for_each_entry_safe(desc, next, &chan->done_list, node) { xilinx_vdma_chan_desc_cleanup()
459 spin_unlock_irqrestore(&chan->lock, flags); xilinx_vdma_chan_desc_cleanup()
461 spin_lock_irqsave(&chan->lock, flags); xilinx_vdma_chan_desc_cleanup()
466 xilinx_vdma_free_tx_descriptor(chan, desc); xilinx_vdma_chan_desc_cleanup()
469 spin_unlock_irqrestore(&chan->lock, flags); xilinx_vdma_chan_desc_cleanup()
478 struct xilinx_vdma_chan *chan = (struct xilinx_vdma_chan *)data; xilinx_vdma_do_tasklet() local
480 xilinx_vdma_chan_desc_cleanup(chan); xilinx_vdma_do_tasklet()
491 struct xilinx_vdma_chan *chan = to_xilinx_chan(dchan); xilinx_vdma_alloc_chan_resources() local
494 if (chan->desc_pool) xilinx_vdma_alloc_chan_resources()
501 chan->desc_pool = dma_pool_create("xilinx_vdma_desc_pool", xilinx_vdma_alloc_chan_resources()
502 chan->dev, xilinx_vdma_alloc_chan_resources()
505 if (!chan->desc_pool) { xilinx_vdma_alloc_chan_resources()
506 dev_err(chan->dev, xilinx_vdma_alloc_chan_resources()
508 chan->id); xilinx_vdma_alloc_chan_resources()
533 * @chan: Driver specific VDMA channel
537 static bool xilinx_vdma_is_running(struct xilinx_vdma_chan *chan) xilinx_vdma_is_running() argument
539 return !(vdma_ctrl_read(chan, XILINX_VDMA_REG_DMASR) & xilinx_vdma_is_running()
541 (vdma_ctrl_read(chan, XILINX_VDMA_REG_DMACR) & xilinx_vdma_is_running()
547 * @chan: Driver specific VDMA channel
551 static bool xilinx_vdma_is_idle(struct xilinx_vdma_chan *chan) xilinx_vdma_is_idle() argument
553 return vdma_ctrl_read(chan, XILINX_VDMA_REG_DMASR) & xilinx_vdma_is_idle()
559 * @chan: Driver specific VDMA channel
561 static void xilinx_vdma_halt(struct xilinx_vdma_chan *chan) xilinx_vdma_halt() argument
565 vdma_ctrl_clr(chan, XILINX_VDMA_REG_DMACR, XILINX_VDMA_DMACR_RUNSTOP); xilinx_vdma_halt()
569 if (vdma_ctrl_read(chan, XILINX_VDMA_REG_DMASR) & xilinx_vdma_halt()
575 dev_err(chan->dev, "Cannot stop channel %p: %x\n", xilinx_vdma_halt()
576 chan, vdma_ctrl_read(chan, XILINX_VDMA_REG_DMASR)); xilinx_vdma_halt()
577 chan->err = true; xilinx_vdma_halt()
585 * @chan: Driver specific VDMA channel
587 static void xilinx_vdma_start(struct xilinx_vdma_chan *chan) xilinx_vdma_start() argument
591 vdma_ctrl_set(chan, XILINX_VDMA_REG_DMACR, XILINX_VDMA_DMACR_RUNSTOP); xilinx_vdma_start()
595 if (!(vdma_ctrl_read(chan, XILINX_VDMA_REG_DMASR) & xilinx_vdma_start()
601 dev_err(chan->dev, "Cannot start channel %p: %x\n", xilinx_vdma_start()
602 chan, vdma_ctrl_read(chan, XILINX_VDMA_REG_DMASR)); xilinx_vdma_start()
604 chan->err = true; xilinx_vdma_start()
612 * @chan: Driver specific channel struct pointer
614 static void xilinx_vdma_start_transfer(struct xilinx_vdma_chan *chan) xilinx_vdma_start_transfer() argument
616 struct xilinx_vdma_config *config = &chan->config; xilinx_vdma_start_transfer()
622 if (chan->err) xilinx_vdma_start_transfer()
625 spin_lock_irqsave(&chan->lock, flags); xilinx_vdma_start_transfer()
628 if (chan->active_desc) xilinx_vdma_start_transfer()
631 if (list_empty(&chan->pending_list)) xilinx_vdma_start_transfer()
634 desc = list_first_entry(&chan->pending_list, xilinx_vdma_start_transfer()
638 if (chan->has_sg && xilinx_vdma_is_running(chan) && xilinx_vdma_start_transfer()
639 !xilinx_vdma_is_idle(chan)) { xilinx_vdma_start_transfer()
640 dev_dbg(chan->dev, "DMA controller still busy\n"); xilinx_vdma_start_transfer()
648 if (chan->has_sg) { xilinx_vdma_start_transfer()
654 vdma_ctrl_write(chan, XILINX_VDMA_REG_CURDESC, head->phys); xilinx_vdma_start_transfer()
658 reg = vdma_ctrl_read(chan, XILINX_VDMA_REG_DMACR); xilinx_vdma_start_transfer()
669 if (chan->has_sg || !config->park) xilinx_vdma_start_transfer()
675 vdma_ctrl_write(chan, XILINX_VDMA_REG_DMACR, reg); xilinx_vdma_start_transfer()
678 (config->park_frm < chan->num_frms)) { xilinx_vdma_start_transfer()
679 if (chan->direction == DMA_MEM_TO_DEV) xilinx_vdma_start_transfer()
680 vdma_write(chan, XILINX_VDMA_REG_PARK_PTR, xilinx_vdma_start_transfer()
684 vdma_write(chan, XILINX_VDMA_REG_PARK_PTR, xilinx_vdma_start_transfer()
690 xilinx_vdma_start(chan); xilinx_vdma_start_transfer()
692 if (chan->err) xilinx_vdma_start_transfer()
696 if (chan->has_sg) { xilinx_vdma_start_transfer()
697 vdma_ctrl_write(chan, XILINX_VDMA_REG_TAILDESC, tail->phys); xilinx_vdma_start_transfer()
703 vdma_desc_write(chan, xilinx_vdma_start_transfer()
713 vdma_desc_write(chan, XILINX_VDMA_REG_HSIZE, last->hw.hsize); xilinx_vdma_start_transfer()
714 vdma_desc_write(chan, XILINX_VDMA_REG_FRMDLY_STRIDE, xilinx_vdma_start_transfer()
716 vdma_desc_write(chan, XILINX_VDMA_REG_VSIZE, last->hw.vsize); xilinx_vdma_start_transfer()
720 chan->active_desc = desc; xilinx_vdma_start_transfer()
723 spin_unlock_irqrestore(&chan->lock, flags); xilinx_vdma_start_transfer()
732 struct xilinx_vdma_chan *chan = to_xilinx_chan(dchan); xilinx_vdma_issue_pending() local
734 xilinx_vdma_start_transfer(chan); xilinx_vdma_issue_pending()
739 * @chan : xilinx DMA channel
743 static void xilinx_vdma_complete_descriptor(struct xilinx_vdma_chan *chan) xilinx_vdma_complete_descriptor() argument
748 spin_lock_irqsave(&chan->lock, flags); xilinx_vdma_complete_descriptor()
750 desc = chan->active_desc; xilinx_vdma_complete_descriptor()
752 dev_dbg(chan->dev, "no running descriptors\n"); xilinx_vdma_complete_descriptor()
757 list_add_tail(&desc->node, &chan->done_list); xilinx_vdma_complete_descriptor()
759 chan->active_desc = NULL; xilinx_vdma_complete_descriptor()
762 spin_unlock_irqrestore(&chan->lock, flags); xilinx_vdma_complete_descriptor()
767 * @chan: Driver specific VDMA channel
771 static int xilinx_vdma_reset(struct xilinx_vdma_chan *chan) xilinx_vdma_reset() argument
776 vdma_ctrl_set(chan, XILINX_VDMA_REG_DMACR, XILINX_VDMA_DMACR_RESET); xilinx_vdma_reset()
778 tmp = vdma_ctrl_read(chan, XILINX_VDMA_REG_DMACR) & xilinx_vdma_reset()
783 tmp = vdma_ctrl_read(chan, XILINX_VDMA_REG_DMACR) & xilinx_vdma_reset()
788 dev_err(chan->dev, "reset timeout, cr %x, sr %x\n", xilinx_vdma_reset()
789 vdma_ctrl_read(chan, XILINX_VDMA_REG_DMACR), xilinx_vdma_reset()
790 vdma_ctrl_read(chan, XILINX_VDMA_REG_DMASR)); xilinx_vdma_reset()
794 chan->err = false; xilinx_vdma_reset()
801 * @chan: Driver specific VDMA channel
805 static int xilinx_vdma_chan_reset(struct xilinx_vdma_chan *chan) xilinx_vdma_chan_reset() argument
810 err = xilinx_vdma_reset(chan); xilinx_vdma_chan_reset()
815 vdma_ctrl_set(chan, XILINX_VDMA_REG_DMACR, xilinx_vdma_chan_reset()
830 struct xilinx_vdma_chan *chan = data; xilinx_vdma_irq_handler() local
834 status = vdma_ctrl_read(chan, XILINX_VDMA_REG_DMASR); xilinx_vdma_irq_handler()
838 vdma_ctrl_write(chan, XILINX_VDMA_REG_DMASR, xilinx_vdma_irq_handler()
850 vdma_ctrl_write(chan, XILINX_VDMA_REG_DMASR, xilinx_vdma_irq_handler()
853 if (!chan->flush_on_fsync || xilinx_vdma_irq_handler()
855 dev_err(chan->dev, xilinx_vdma_irq_handler()
857 chan, errors, xilinx_vdma_irq_handler()
858 vdma_ctrl_read(chan, XILINX_VDMA_REG_CURDESC), xilinx_vdma_irq_handler()
859 vdma_ctrl_read(chan, XILINX_VDMA_REG_TAILDESC)); xilinx_vdma_irq_handler()
860 chan->err = true; xilinx_vdma_irq_handler()
869 dev_dbg(chan->dev, "Inter-packet latency too long\n"); xilinx_vdma_irq_handler()
873 xilinx_vdma_complete_descriptor(chan); xilinx_vdma_irq_handler()
874 xilinx_vdma_start_transfer(chan); xilinx_vdma_irq_handler()
877 tasklet_schedule(&chan->tasklet); xilinx_vdma_irq_handler()
890 struct xilinx_vdma_chan *chan = to_xilinx_chan(tx->chan); xilinx_vdma_tx_submit() local
895 if (chan->err) { xilinx_vdma_tx_submit()
900 err = xilinx_vdma_chan_reset(chan); xilinx_vdma_tx_submit()
905 spin_lock_irqsave(&chan->lock, flags); xilinx_vdma_tx_submit()
910 list_add_tail(&desc->node, &chan->pending_list); xilinx_vdma_tx_submit()
913 chan->allocated_desc = NULL; xilinx_vdma_tx_submit()
915 spin_unlock_irqrestore(&chan->lock, flags); xilinx_vdma_tx_submit()
934 struct xilinx_vdma_chan *chan = to_xilinx_chan(dchan); xilinx_vdma_dma_prep_interleaved() local
949 desc = xilinx_vdma_alloc_tx_descriptor(chan); xilinx_vdma_dma_prep_interleaved()
953 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common); xilinx_vdma_dma_prep_interleaved()
958 segment = xilinx_vdma_alloc_tx_segment(chan); xilinx_vdma_dma_prep_interleaved()
968 hw->stride |= chan->config.frm_dly << xilinx_vdma_dma_prep_interleaved()
996 xilinx_vdma_free_tx_descriptor(chan, desc); xilinx_vdma_dma_prep_interleaved()
1002 * @chan: Driver specific VDMA Channel pointer
1006 struct xilinx_vdma_chan *chan = to_xilinx_chan(dchan); xilinx_vdma_terminate_all() local
1009 xilinx_vdma_halt(chan); xilinx_vdma_terminate_all()
1012 xilinx_vdma_free_descriptors(chan); xilinx_vdma_terminate_all()
1033 struct xilinx_vdma_chan *chan = to_xilinx_chan(dchan); xilinx_vdma_channel_set_config() local
1037 return xilinx_vdma_chan_reset(chan); xilinx_vdma_channel_set_config()
1039 dmacr = vdma_ctrl_read(chan, XILINX_VDMA_REG_DMACR); xilinx_vdma_channel_set_config()
1041 chan->config.frm_dly = cfg->frm_dly; xilinx_vdma_channel_set_config()
1042 chan->config.park = cfg->park; xilinx_vdma_channel_set_config()
1045 chan->config.gen_lock = cfg->gen_lock; xilinx_vdma_channel_set_config()
1046 chan->config.master = cfg->master; xilinx_vdma_channel_set_config()
1048 if (cfg->gen_lock && chan->genlock) { xilinx_vdma_channel_set_config()
1053 chan->config.frm_cnt_en = cfg->frm_cnt_en; xilinx_vdma_channel_set_config()
1055 chan->config.park_frm = cfg->park_frm; xilinx_vdma_channel_set_config()
1057 chan->config.park_frm = -1; xilinx_vdma_channel_set_config()
1059 chan->config.coalesc = cfg->coalesc; xilinx_vdma_channel_set_config()
1060 chan->config.delay = cfg->delay; xilinx_vdma_channel_set_config()
1064 chan->config.coalesc = cfg->coalesc; xilinx_vdma_channel_set_config()
1069 chan->config.delay = cfg->delay; xilinx_vdma_channel_set_config()
1076 vdma_ctrl_write(chan, XILINX_VDMA_REG_DMACR, dmacr); xilinx_vdma_channel_set_config()
1088 * @chan: Driver specific VDMA channel
1090 static void xilinx_vdma_chan_remove(struct xilinx_vdma_chan *chan) xilinx_vdma_chan_remove() argument
1093 vdma_ctrl_clr(chan, XILINX_VDMA_REG_DMACR, xilinx_vdma_chan_remove()
1096 if (chan->irq > 0) xilinx_vdma_chan_remove()
1097 free_irq(chan->irq, chan); xilinx_vdma_chan_remove()
1099 tasklet_kill(&chan->tasklet); xilinx_vdma_chan_remove()
1101 list_del(&chan->common.device_node); xilinx_vdma_chan_remove()
1117 struct xilinx_vdma_chan *chan; xilinx_vdma_chan_probe() local
1123 chan = devm_kzalloc(xdev->dev, sizeof(*chan), GFP_KERNEL); xilinx_vdma_chan_probe()
1124 if (!chan) xilinx_vdma_chan_probe()
1127 chan->dev = xdev->dev; xilinx_vdma_chan_probe()
1128 chan->xdev = xdev; xilinx_vdma_chan_probe()
1129 chan->has_sg = xdev->has_sg; xilinx_vdma_chan_probe()
1131 spin_lock_init(&chan->lock); xilinx_vdma_chan_probe()
1132 INIT_LIST_HEAD(&chan->pending_list); xilinx_vdma_chan_probe()
1133 INIT_LIST_HEAD(&chan->done_list); xilinx_vdma_chan_probe()
1138 chan->genlock = of_property_read_bool(node, "xlnx,genlock-mode"); xilinx_vdma_chan_probe()
1155 chan->direction = DMA_MEM_TO_DEV; xilinx_vdma_chan_probe()
1156 chan->id = 0; xilinx_vdma_chan_probe()
1158 chan->ctrl_offset = XILINX_VDMA_MM2S_CTRL_OFFSET; xilinx_vdma_chan_probe()
1159 chan->desc_offset = XILINX_VDMA_MM2S_DESC_OFFSET; xilinx_vdma_chan_probe()
1163 chan->flush_on_fsync = true; xilinx_vdma_chan_probe()
1166 chan->direction = DMA_DEV_TO_MEM; xilinx_vdma_chan_probe()
1167 chan->id = 1; xilinx_vdma_chan_probe()
1169 chan->ctrl_offset = XILINX_VDMA_S2MM_CTRL_OFFSET; xilinx_vdma_chan_probe()
1170 chan->desc_offset = XILINX_VDMA_S2MM_DESC_OFFSET; xilinx_vdma_chan_probe()
1174 chan->flush_on_fsync = true; xilinx_vdma_chan_probe()
1181 chan->irq = irq_of_parse_and_map(node, 0); xilinx_vdma_chan_probe()
1182 err = request_irq(chan->irq, xilinx_vdma_irq_handler, IRQF_SHARED, xilinx_vdma_chan_probe()
1183 "xilinx-vdma-controller", chan); xilinx_vdma_chan_probe()
1185 dev_err(xdev->dev, "unable to request IRQ %d\n", chan->irq); xilinx_vdma_chan_probe()
1190 tasklet_init(&chan->tasklet, xilinx_vdma_do_tasklet, xilinx_vdma_chan_probe()
1191 (unsigned long)chan); xilinx_vdma_chan_probe()
1197 chan->common.device = &xdev->common; xilinx_vdma_chan_probe()
1199 list_add_tail(&chan->common.device_node, &xdev->common.channels); xilinx_vdma_chan_probe()
1200 xdev->chan[chan->id] = chan; xilinx_vdma_chan_probe()
1203 err = xilinx_vdma_chan_reset(chan); xilinx_vdma_chan_probe()
1228 return dma_get_slave_channel(&xdev->chan[chan_id]->common); of_dma_xilinx_xlate()
1300 if (xdev->chan[i])
1301 xdev->chan[i]->num_frms = num_frames;
1320 if (xdev->chan[i])
1321 xilinx_vdma_chan_remove(xdev->chan[i]);
1342 if (xdev->chan[i]) xilinx_vdma_remove()
1343 xilinx_vdma_chan_remove(xdev->chan[i]); xilinx_vdma_remove()
/linux-4.1.27/include/linux/dma/
H A Dmmp-pdma.h7 bool mmp_pdma_filter_fn(struct dma_chan *chan, void *param);
9 static inline bool mmp_pdma_filter_fn(struct dma_chan *chan, void *param) mmp_pdma_filter_fn() argument
H A Ddw.h53 struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan,
56 void dw_dma_cyclic_free(struct dma_chan *chan);
57 int dw_dma_cyclic_start(struct dma_chan *chan);
58 void dw_dma_cyclic_stop(struct dma_chan *chan);
60 dma_addr_t dw_dma_get_src_addr(struct dma_chan *chan);
62 dma_addr_t dw_dma_get_dst_addr(struct dma_chan *chan);
/linux-4.1.27/arch/sh/include/asm/
H A Ddma.h57 int (*request)(struct dma_channel *chan);
58 void (*free)(struct dma_channel *chan);
60 int (*get_residue)(struct dma_channel *chan);
61 int (*xfer)(struct dma_channel *chan);
62 int (*configure)(struct dma_channel *chan, unsigned long flags);
63 int (*extend)(struct dma_channel *chan, unsigned long op, void *param);
69 unsigned int chan; /* DMAC channel number */ member in struct:dma_channel
112 extern int dma_xfer(unsigned int chan, unsigned long from,
115 #define dma_write(chan, from, to, size) \
116 dma_xfer(chan, from, to, size, DMA_MODE_WRITE)
117 #define dma_write_page(chan, from, to) \
118 dma_write(chan, from, to, PAGE_SIZE)
120 #define dma_read(chan, from, to, size) \
121 dma_xfer(chan, from, to, size, DMA_MODE_READ)
122 #define dma_read_page(chan, from, to) \
123 dma_read(chan, from, to, PAGE_SIZE)
127 extern int get_dma_residue(unsigned int chan);
128 extern struct dma_info *get_dma_info(unsigned int chan);
129 extern struct dma_channel *get_dma_channel(unsigned int chan);
130 extern void dma_wait_for_completion(unsigned int chan);
131 extern void dma_configure_channel(unsigned int chan, unsigned long flags);
137 extern int dma_extend(unsigned int chan, unsigned long op, void *param);
/linux-4.1.27/arch/sh/drivers/pci/
H A Dpci-sh7751.c22 static int __init __area_sdram_check(struct pci_channel *chan, __area_sdram_check() argument
34 pci_write_reg(chan, word, SH4_PCIBCR1); __area_sdram_check()
43 pci_write_reg(chan, word, SH4_PCIBCR2); __area_sdram_check()
80 struct pci_channel *chan = &sh7751_pci_controller; sh7751_pci_init() local
86 chan->reg_base = 0xfe200000; sh7751_pci_init()
89 id = pci_read_reg(chan, SH7751_PCICONF0); sh7751_pci_init()
102 pci_write_reg(chan, 0, SH4_PCICLKR); sh7751_pci_init()
105 pci_write_reg(chan, word, SH4_PCIPINT); sh7751_pci_init()
113 pci_write_reg(chan, word, SH7751_PCICONF1); sh7751_pci_init()
117 pci_write_reg(chan, word, SH7751_PCICONF2); sh7751_pci_init()
123 pci_write_reg(chan, word, SH4_PCILSR0); sh7751_pci_init()
126 pci_write_reg(chan, word, SH4_PCILAR0); sh7751_pci_init()
127 pci_write_reg(chan, word, SH7751_PCICONF5); sh7751_pci_init()
132 word = chan->resources[1].start & SH4_PCIMBR_MASK; sh7751_pci_init()
134 pci_write_reg(chan, word , SH4_PCIMBR); sh7751_pci_init()
138 word = chan->resources[0].start & SH4_PCIIOBR_MASK; sh7751_pci_init()
140 pci_write_reg(chan, word, SH4_PCIIOBR); sh7751_pci_init()
146 case SH7751_CS0_BASE_ADDR: word = __area_sdram_check(chan, 0); break; sh7751_pci_init()
147 case SH7751_CS1_BASE_ADDR: word = __area_sdram_check(chan, 1); break; sh7751_pci_init()
148 case SH7751_CS2_BASE_ADDR: word = __area_sdram_check(chan, 2); break; sh7751_pci_init()
149 case SH7751_CS3_BASE_ADDR: word = __area_sdram_check(chan, 3); break; sh7751_pci_init()
150 case SH7751_CS4_BASE_ADDR: word = __area_sdram_check(chan, 4); break; sh7751_pci_init()
151 case SH7751_CS5_BASE_ADDR: word = __area_sdram_check(chan, 5); break; sh7751_pci_init()
152 case SH7751_CS6_BASE_ADDR: word = __area_sdram_check(chan, 6); break; sh7751_pci_init()
160 pci_write_reg(chan, word, SH4_PCIWCR1); sh7751_pci_init()
162 pci_write_reg(chan, word, SH4_PCIWCR2); sh7751_pci_init()
164 pci_write_reg(chan, word, SH4_PCIWCR3); sh7751_pci_init()
166 pci_write_reg(chan, word, SH4_PCIMCR); sh7751_pci_init()
173 pci_fixup_pcic(chan); sh7751_pci_init()
178 pci_write_reg(chan, word, SH4_PCICR); sh7751_pci_init()
180 return register_pci_controller(chan); sh7751_pci_init()
H A Dfixups-rts7751r2d.c42 int pci_fixup_pcic(struct pci_channel *chan) pci_fixup_pcic() argument
48 pci_write_reg(chan, bcr1, SH4_PCIBCR1); pci_fixup_pcic()
51 pci_write_reg(chan, 0x0000c3ff, SH4_PCIINTM); pci_fixup_pcic()
52 pci_write_reg(chan, 0x0000380f, SH4_PCIAINTM); pci_fixup_pcic()
54 pci_write_reg(chan, 0xfb900047, SH7751_PCICONF1); pci_fixup_pcic()
55 pci_write_reg(chan, 0xab000001, SH7751_PCICONF4); pci_fixup_pcic()
59 pci_write_reg(chan, mcr, SH4_PCIMCR); pci_fixup_pcic()
61 pci_write_reg(chan, 0x0c000000, SH7751_PCICONF5); pci_fixup_pcic()
62 pci_write_reg(chan, 0xd0000000, SH7751_PCICONF6); pci_fixup_pcic()
63 pci_write_reg(chan, 0x0c000000, SH4_PCILAR0); pci_fixup_pcic()
64 pci_write_reg(chan, 0x00000000, SH4_PCILAR1); pci_fixup_pcic()
H A Dpcie-sh7786.c153 static int __init phy_wait_for_ack(struct pci_channel *chan) phy_wait_for_ack() argument
158 if (pci_read_reg(chan, SH4A_PCIEPHYADRR) & (1 << BITS_ACK)) phy_wait_for_ack()
167 static int __init pci_wait_for_irq(struct pci_channel *chan, unsigned int mask) pci_wait_for_irq() argument
172 if ((pci_read_reg(chan, SH4A_PCIEINTR) & mask) == mask) pci_wait_for_irq()
181 static void __init phy_write_reg(struct pci_channel *chan, unsigned int addr, phy_write_reg() argument
190 pci_write_reg(chan, data, SH4A_PCIEPHYDOUTR); phy_write_reg()
191 pci_write_reg(chan, phyaddr, SH4A_PCIEPHYADRR); phy_write_reg()
193 phy_wait_for_ack(chan); phy_write_reg()
196 pci_write_reg(chan, 0, SH4A_PCIEPHYDOUTR); phy_write_reg()
197 pci_write_reg(chan, 0, SH4A_PCIEPHYADRR); phy_write_reg()
199 phy_wait_for_ack(chan); phy_write_reg()
204 struct pci_channel *chan = port->hose; pcie_clk_init() local
239 clk->enable_reg = (void __iomem *)(chan->reg_base + SH4A_PCIEPHYCTLR); pcie_clk_init()
259 struct pci_channel *chan = port->hose; phy_init() local
265 phy_write_reg(chan, 0x60, 0xf, 0x004b008b); phy_init()
266 phy_write_reg(chan, 0x61, 0xf, 0x00007b41); phy_init()
267 phy_write_reg(chan, 0x64, 0xf, 0x00ff4f00); phy_init()
268 phy_write_reg(chan, 0x65, 0xf, 0x09070907); phy_init()
269 phy_write_reg(chan, 0x66, 0xf, 0x00000010); phy_init()
270 phy_write_reg(chan, 0x74, 0xf, 0x0007001c); phy_init()
271 phy_write_reg(chan, 0x79, 0xf, 0x01fc000d); phy_init()
272 phy_write_reg(chan, 0xb0, 0xf, 0x00000610); phy_init()
275 phy_write_reg(chan, 0x67, 0x1, 0x00000400); phy_init()
281 if (pci_read_reg(chan, SH4A_PCIEPHYSR)) phy_init()
292 struct pci_channel *chan = port->hose; pcie_reset() local
294 pci_write_reg(chan, 1, SH4A_PCIESRSTR); pcie_reset()
295 pci_write_reg(chan, 0, SH4A_PCIETCTLR); pcie_reset()
296 pci_write_reg(chan, 0, SH4A_PCIESRSTR); pcie_reset()
297 pci_write_reg(chan, 0, SH4A_PCIETXVC0SR); pcie_reset()
302 struct pci_channel *chan = port->hose; pcie_init() local
316 pci_write_reg(chan, PCI_CLASS_BRIDGE_PCI << 16, SH4A_PCIEIDSETR1); pcie_init()
319 data = pci_read_reg(chan, SH4A_PCIEEXPCAP0); pcie_init()
328 pci_write_reg(chan, data, SH4A_PCIEEXPCAP0); pcie_init()
331 pci_write_reg(chan, PCI_EXP_LNKCAP_DLLLARC, SH4A_PCIEEXPCAP3); pcie_init()
334 data = pci_read_reg(chan, SH4A_PCIEEXPCAP4); pcie_init()
337 pci_write_reg(chan, data, SH4A_PCIEEXPCAP4); pcie_init()
340 data = pci_read_reg(chan, SH4A_PCIEEXPCAP5); pcie_init()
343 pci_write_reg(chan, data, SH4A_PCIEEXPCAP5); pcie_init()
346 data = pci_read_reg(chan, SH4A_PCIETLCTLR); pcie_init()
349 pci_write_reg(chan, data, SH4A_PCIETLCTLR); pcie_init()
355 data = pci_read_reg(chan, SH4A_PCIEMACCTLR); pcie_init()
358 pci_write_reg(chan, data, SH4A_PCIEMACCTLR); pcie_init()
368 pci_write_reg(chan, memphys + SZ_512M, SH4A_PCIELAR1); pcie_init()
369 pci_write_reg(chan, ((memsize - SZ_512M) - SZ_256) | 1, pcie_init()
376 pci_write_reg(chan, 0, SH4A_PCIELAR1); pcie_init()
377 pci_write_reg(chan, 0, SH4A_PCIELAMR1); pcie_init()
384 pci_write_reg(chan, memphys, SH4A_PCIELAR0); pcie_init()
385 pci_write_reg(chan, (memsize - SZ_256) | 1, SH4A_PCIELAMR0); pcie_init()
388 data = pci_read_reg(chan, SH4A_PCIETCTLR); pcie_init()
390 pci_write_reg(chan, data, SH4A_PCIETCTLR); pcie_init()
396 data = pci_read_reg(chan, SH4A_PCIEDLINTENR); pcie_init()
398 pci_write_reg(chan, data, SH4A_PCIEDLINTENR); pcie_init()
401 data = pci_read_reg(chan, SH4A_PCIEMACCTLR); pcie_init()
403 pci_write_reg(chan, data, SH4A_PCIEMACCTLR); pcie_init()
410 ret = pci_wait_for_irq(chan, MASK_INT_TX_CTRL); pcie_init()
412 data = pci_read_reg(chan, SH4A_PCIEPCICONF1); pcie_init()
416 pci_write_reg(chan, data, SH4A_PCIEPCICONF1); pcie_init()
418 pci_write_reg(chan, 0x80888000, SH4A_PCIETXVC0DCTLR); pcie_init()
419 pci_write_reg(chan, 0x00222000, SH4A_PCIERXVC0DCTLR); pcie_init()
424 data = pci_read_reg(chan, SH4A_PCIEMACSR); pcie_init()
431 for (i = win = 0; i < chan->nr_resources; i++) { pcie_init()
432 struct resource *res = chan->resources + i; pcie_init()
443 pci_write_reg(chan, 0x00000000, SH4A_PCIEPTCTLR(win)); pcie_init()
451 pci_write_reg(chan, mask << 18, SH4A_PCIEPAMR(win)); pcie_init()
453 pci_write_reg(chan, upper_32_bits(res->start), pcie_init()
455 pci_write_reg(chan, lower_32_bits(res->start), pcie_init()
462 pci_write_reg(chan, mask, SH4A_PCIEPTCTLR(win)); pcie_init()
H A Dfixups-landisk.c41 int pci_fixup_pcic(struct pci_channel *chan) pci_fixup_pcic() argument
47 pci_write_reg(chan, bcr1, SH4_PCIBCR1); pci_fixup_pcic()
51 pci_write_reg(chan, mcr, SH4_PCIMCR); pci_fixup_pcic()
53 pci_write_reg(chan, 0x0c000000, SH7751_PCICONF5); pci_fixup_pcic()
54 pci_write_reg(chan, 0xd0000000, SH7751_PCICONF6); pci_fixup_pcic()
55 pci_write_reg(chan, 0x0c000000, SH4_PCILAR0); pci_fixup_pcic()
56 pci_write_reg(chan, 0x00000000, SH4_PCILAR1); pci_fixup_pcic()
H A Dops-sh4.c28 struct pci_channel *chan = bus->sysdata; sh4_pci_read() local
37 pci_write_reg(chan, CONFIG_CMD(bus, devfn, where), SH4_PCIPAR); sh4_pci_read()
38 data = pci_read_reg(chan, SH4_PCIPDR); sh4_pci_read()
66 struct pci_channel *chan = bus->sysdata; sh4_pci_write() local
72 pci_write_reg(chan, CONFIG_CMD(bus, devfn, where), SH4_PCIPAR); sh4_pci_write()
73 data = pci_read_reg(chan, SH4_PCIPDR); sh4_pci_write()
94 pci_write_reg(chan, data, SH4_PCIPDR); sh4_pci_write()
104 int __attribute__((weak)) pci_fixup_pcic(struct pci_channel *chan) pci_fixup_pcic() argument
H A Dpci-sh7780.c249 struct pci_channel *chan = &sh7780_pci_controller; sh7780_pci_init() local
258 chan->reg_base = 0xfe040000; sh7780_pci_init()
265 chan->reg_base + SH4_PCICR); sh7780_pci_init()
274 id = __raw_readw(chan->reg_base + PCI_VENDOR_ID); sh7780_pci_init()
280 id = __raw_readw(chan->reg_base + PCI_DEVICE_ID); sh7780_pci_init()
294 __raw_readb(chan->reg_base + PCI_REVISION_ID)); sh7780_pci_init()
301 chan->reg_base + SH4_PCICR); sh7780_pci_init()
311 __raw_writel(memphys + SZ_512M, chan->reg_base + SH4_PCILAR1); sh7780_pci_init()
313 chan->reg_base + SH4_PCILSR1); sh7780_pci_init()
319 __raw_writel(0, chan->reg_base + SH4_PCILAR1); sh7780_pci_init()
320 __raw_writel(0, chan->reg_base + SH4_PCILSR1); sh7780_pci_init()
327 __raw_writel(memphys, chan->reg_base + SH4_PCILAR0); sh7780_pci_init()
329 chan->reg_base + SH4_PCILSR0); sh7780_pci_init()
334 ret = sh7780_pci_setup_irqs(chan); sh7780_pci_init()
341 __raw_writel(0, chan->reg_base + SH7780_PCICSCR0); sh7780_pci_init()
342 __raw_writel(0, chan->reg_base + SH7780_PCICSAR0); sh7780_pci_init()
343 __raw_writel(0, chan->reg_base + SH7780_PCICSCR1); sh7780_pci_init()
344 __raw_writel(0, chan->reg_base + SH7780_PCICSAR1); sh7780_pci_init()
349 for (i = 1; i < chan->nr_resources; i++) { sh7780_pci_init()
350 struct resource *res = chan->resources + i; sh7780_pci_init()
361 chan->nr_resources--; sh7780_pci_init()
372 chan->reg_base + SH7780_PCIMBMR(i - 1)); sh7780_pci_init()
373 __raw_writel(res->start, chan->reg_base + SH7780_PCIMBR(i - 1)); sh7780_pci_init()
379 __raw_writel(0, chan->reg_base + PCI_BASE_ADDRESS_0); sh7780_pci_init()
380 __raw_writel(0, chan->reg_base + SH7780_PCIIOBR); sh7780_pci_init()
381 __raw_writel(0, chan->reg_base + SH7780_PCIIOBMR); sh7780_pci_init()
385 PCI_COMMAND_MEMORY, chan->reg_base + PCI_COMMAND); sh7780_pci_init()
393 chan->reg_base + SH4_PCICR); sh7780_pci_init()
395 ret = register_pci_controller(chan); sh7780_pci_init()
399 sh7780_pci66_init(chan); sh7780_pci_init()
402 (__raw_readw(chan->reg_base + PCI_STATUS) & PCI_STATUS_66MHZ) ? sh7780_pci_init()
408 sh7780_pci_teardown_irqs(chan); sh7780_pci_init()
H A Dops-sh7786.c25 struct pci_channel *chan = bus->sysdata; sh7786_pcie_config_access() local
54 *data = pci_read_reg(chan, PCI_REG(reg)); sh7786_pcie_config_access()
56 pci_write_reg(chan, *data, PCI_REG(reg)); sh7786_pcie_config_access()
64 pci_write_reg(chan, pci_read_reg(chan, SH4A_PCIEERRFR), SH4A_PCIEERRFR); sh7786_pcie_config_access()
67 pci_write_reg(chan, (bus->number << 24) | (dev << 19) | sh7786_pcie_config_access()
71 pci_write_reg(chan, (1 << 31) | (type << 8), SH4A_PCIEPCTLR); sh7786_pcie_config_access()
74 if (pci_read_reg(chan, SH4A_PCIEERRFR) & 0x10) sh7786_pcie_config_access()
78 if (pci_read_reg(chan, SH4A_PCIEPCICONF1) & ((1 << 29) | (1 << 28))) sh7786_pcie_config_access()
82 *data = pci_read_reg(chan, SH4A_PCIEPDR); sh7786_pcie_config_access()
84 pci_write_reg(chan, *data, SH4A_PCIEPDR); sh7786_pcie_config_access()
87 pci_write_reg(chan, 0, SH4A_PCIEPCTLR); sh7786_pcie_config_access()
/linux-4.1.27/arch/arm/kernel/
H A Ddma-isa.c47 static int isa_get_dma_residue(unsigned int chan, dma_t *dma) isa_get_dma_residue() argument
49 unsigned int io_port = isa_dma_port[chan][ISA_DMA_COUNT]; isa_get_dma_residue()
55 return chan < 4 ? count : (count << 1); isa_get_dma_residue()
58 static void isa_enable_dma(unsigned int chan, dma_t *dma) isa_enable_dma() argument
65 mode = (chan & 3) | dma->dma_mode; isa_enable_dma()
100 outb(address >> 16, isa_dma_port[chan][ISA_DMA_PGLO]); isa_enable_dma()
101 outb(address >> 24, isa_dma_port[chan][ISA_DMA_PGHI]); isa_enable_dma()
103 if (chan >= 4) { isa_enable_dma()
108 outb(0, isa_dma_port[chan][ISA_DMA_CLRFF]); isa_enable_dma()
110 outb(address, isa_dma_port[chan][ISA_DMA_ADDR]); isa_enable_dma()
111 outb(address >> 8, isa_dma_port[chan][ISA_DMA_ADDR]); isa_enable_dma()
113 outb(length, isa_dma_port[chan][ISA_DMA_COUNT]); isa_enable_dma()
114 outb(length >> 8, isa_dma_port[chan][ISA_DMA_COUNT]); isa_enable_dma()
116 outb(mode, isa_dma_port[chan][ISA_DMA_MODE]); isa_enable_dma()
119 outb(chan & 3, isa_dma_port[chan][ISA_DMA_MASK]); isa_enable_dma()
122 static void isa_disable_dma(unsigned int chan, dma_t *dma) isa_disable_dma() argument
124 outb(chan | 4, isa_dma_port[chan][ISA_DMA_MASK]); isa_disable_dma()
175 unsigned int chan, i; isa_init_dma() local
177 for (chan = 0; chan < 8; chan++) { isa_init_dma()
178 isa_dma[chan].d_ops = &isa_dma_ops; isa_init_dma()
179 isa_disable_dma(chan, NULL); isa_init_dma()
213 for (chan = 0; chan < 8; chan++) { isa_init_dma()
214 int ret = isa_dma_add(chan, &isa_dma[chan]); isa_init_dma()
217 chan, ret); isa_init_dma()
H A Ddma.c31 static inline dma_t *dma_channel(unsigned int chan) dma_channel() argument
33 if (chan >= MAX_DMA_CHANNELS) dma_channel()
36 return dma_chan[chan]; dma_channel()
39 int __init isa_dma_add(unsigned int chan, dma_t *dma) isa_dma_add() argument
46 if (dma_chan[chan]) isa_dma_add()
48 dma_chan[chan] = dma; isa_dma_add()
57 int request_dma(unsigned int chan, const char *device_id) request_dma() argument
59 dma_t *dma = dma_channel(chan); request_dma()
74 ret = dma->d_ops->request(chan, dma); request_dma()
82 pr_err("dma: trying to allocate DMA%d\n", chan); request_dma()
95 void free_dma(unsigned int chan) free_dma() argument
97 dma_t *dma = dma_channel(chan); free_dma()
103 pr_err("dma%d: freeing active DMA\n", chan); free_dma()
104 dma->d_ops->disable(chan, dma); free_dma()
110 dma->d_ops->free(chan, dma); free_dma()
114 pr_err("dma%d: trying to free free DMA\n", chan); free_dma()
118 pr_err("dma: trying to free DMA%d\n", chan); free_dma()
124 void set_dma_sg (unsigned int chan, struct scatterlist *sg, int nr_sg) set_dma_sg() argument
126 dma_t *dma = dma_channel(chan); set_dma_sg()
129 pr_err("dma%d: altering DMA SG while DMA active\n", chan); set_dma_sg()
141 void __set_dma_addr (unsigned int chan, void *addr) __set_dma_addr() argument
143 dma_t *dma = dma_channel(chan); __set_dma_addr()
146 pr_err("dma%d: altering DMA address while DMA active\n", chan); __set_dma_addr()
158 void set_dma_count (unsigned int chan, unsigned long count) set_dma_count() argument
160 dma_t *dma = dma_channel(chan); set_dma_count()
163 pr_err("dma%d: altering DMA count while DMA active\n", chan); set_dma_count()
173 void set_dma_mode (unsigned int chan, unsigned int mode) set_dma_mode() argument
175 dma_t *dma = dma_channel(chan); set_dma_mode()
178 pr_err("dma%d: altering DMA mode while DMA active\n", chan); set_dma_mode()
187 void enable_dma (unsigned int chan) enable_dma() argument
189 dma_t *dma = dma_channel(chan); enable_dma()
196 dma->d_ops->enable(chan, dma); enable_dma()
201 pr_err("dma%d: trying to enable free DMA\n", chan); enable_dma()
208 void disable_dma (unsigned int chan) disable_dma() argument
210 dma_t *dma = dma_channel(chan); disable_dma()
217 dma->d_ops->disable(chan, dma); disable_dma()
222 pr_err("dma%d: trying to disable free DMA\n", chan); disable_dma()
230 int dma_channel_active(unsigned int chan) dma_channel_active() argument
232 dma_t *dma = dma_channel(chan); dma_channel_active()
237 void set_dma_page(unsigned int chan, char pagenr) set_dma_page() argument
239 pr_err("dma%d: trying to set_dma_page\n", chan); set_dma_page()
243 void set_dma_speed(unsigned int chan, int cycle_ns) set_dma_speed() argument
245 dma_t *dma = dma_channel(chan); set_dma_speed()
249 ret = dma->d_ops->setspeed(chan, dma, cycle_ns); set_dma_speed()
254 int get_dma_residue(unsigned int chan) get_dma_residue() argument
256 dma_t *dma = dma_channel(chan); get_dma_residue()
260 ret = dma->d_ops->residue(chan, dma); get_dma_residue()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
H A Dbase.c63 struct nvkm_fifo_chan *chan; nvkm_fifo_channel_create_() local
71 chan = *ptr; nvkm_fifo_channel_create_()
76 chan->pushdma = (void *)nvkm_handle_ref(parent, pushbuf); nvkm_fifo_channel_create_()
77 if (!chan->pushdma) nvkm_fifo_channel_create_()
80 dmaeng = (void *)chan->pushdma->base.engine; nvkm_fifo_channel_create_()
81 switch (chan->pushdma->base.oclass->handle) { nvkm_fifo_channel_create_()
89 ret = dmaeng->bind(chan->pushdma, parent, &chan->pushgpu); nvkm_fifo_channel_create_()
95 for (chan->chid = priv->min; chan->chid < priv->max; chan->chid++) { nvkm_fifo_channel_create_()
96 if (!priv->channel[chan->chid]) { nvkm_fifo_channel_create_()
97 priv->channel[chan->chid] = nv_object(chan); nvkm_fifo_channel_create_()
103 if (chan->chid == priv->max) { nvkm_fifo_channel_create_()
108 chan->addr = nv_device_resource_start(device, bar) + nvkm_fifo_channel_create_()
109 addr + size * chan->chid; nvkm_fifo_channel_create_()
110 chan->size = size; nvkm_fifo_channel_create_()
116 nvkm_fifo_channel_destroy(struct nvkm_fifo_chan *chan) nvkm_fifo_channel_destroy() argument
118 struct nvkm_fifo *priv = (void *)nv_object(chan)->engine; nvkm_fifo_channel_destroy()
121 if (chan->user) nvkm_fifo_channel_destroy()
122 iounmap(chan->user); nvkm_fifo_channel_destroy()
125 priv->channel[chan->chid] = NULL; nvkm_fifo_channel_destroy()
128 nvkm_gpuobj_ref(NULL, &chan->pushgpu); nvkm_fifo_channel_destroy()
129 nvkm_object_ref(NULL, (struct nvkm_object **)&chan->pushdma); nvkm_fifo_channel_destroy()
130 nvkm_namedb_destroy(&chan->namedb); nvkm_fifo_channel_destroy()
136 struct nvkm_fifo_chan *chan = (void *)object; _nvkm_fifo_channel_dtor() local
137 nvkm_fifo_channel_destroy(chan); _nvkm_fifo_channel_dtor()
143 struct nvkm_fifo_chan *chan = (void *)object; _nvkm_fifo_channel_map() local
144 *addr = chan->addr; _nvkm_fifo_channel_map()
145 *size = chan->size; _nvkm_fifo_channel_map()
152 struct nvkm_fifo_chan *chan = (void *)object; _nvkm_fifo_channel_rd32() local
153 if (unlikely(!chan->user)) { _nvkm_fifo_channel_rd32()
154 chan->user = ioremap(chan->addr, chan->size); _nvkm_fifo_channel_rd32()
155 if (WARN_ON_ONCE(chan->user == NULL)) _nvkm_fifo_channel_rd32()
158 return ioread32_native(chan->user + addr); _nvkm_fifo_channel_rd32()
164 struct nvkm_fifo_chan *chan = (void *)object; _nvkm_fifo_channel_wr32() local
165 if (unlikely(!chan->user)) { _nvkm_fifo_channel_wr32()
166 chan->user = ioremap(chan->addr, chan->size); _nvkm_fifo_channel_wr32()
167 if (WARN_ON_ONCE(chan->user == NULL)) _nvkm_fifo_channel_wr32()
170 iowrite32_native(data, chan->user + addr); _nvkm_fifo_channel_wr32()
235 struct nvkm_fifo_chan *chan = NULL; nvkm_client_name_for_fifo_chid() local
240 chan = (void *)fifo->channel[chid]; nvkm_client_name_for_fifo_chid()
243 return nvkm_client_name(chan); nvkm_client_name_for_fifo_chid()
H A Dnv50.c108 struct nv50_fifo_chan *chan = (void *)parent; nv50_fifo_context_detach() local
138 chan->base.chid, nvkm_client_name(chan)); nv50_fifo_context_detach()
161 struct nv50_fifo_chan *chan = (void *)parent; nv50_fifo_object_attach() local
178 return nvkm_ramht_insert(chan->ramht, 0, handle, context); nv50_fifo_object_attach()
184 struct nv50_fifo_chan *chan = (void *)parent; nv50_fifo_object_detach() local
185 nvkm_ramht_remove(chan->ramht, cookie); nv50_fifo_object_detach()
198 struct nv50_fifo_chan *chan; nv50_fifo_chan_ctor_dma() local
214 (1ULL << NVDEV_ENGINE_MPEG), &chan); nv50_fifo_chan_ctor_dma()
215 *pobject = nv_object(chan); nv50_fifo_chan_ctor_dma()
219 args->v0.chid = chan->base.chid; nv50_fifo_chan_ctor_dma()
221 nv_parent(chan)->context_attach = nv50_fifo_context_attach; nv50_fifo_chan_ctor_dma()
222 nv_parent(chan)->context_detach = nv50_fifo_context_detach; nv50_fifo_chan_ctor_dma()
223 nv_parent(chan)->object_attach = nv50_fifo_object_attach; nv50_fifo_chan_ctor_dma()
224 nv_parent(chan)->object_detach = nv50_fifo_object_detach; nv50_fifo_chan_ctor_dma()
226 ret = nvkm_ramht_new(nv_object(chan), nv_object(chan), 0x8000, 16, nv50_fifo_chan_ctor_dma()
227 &chan->ramht); nv50_fifo_chan_ctor_dma()
237 nv_wo32(base->ramfc, 0x48, chan->base.pushgpu->node->offset >> 4); nv50_fifo_chan_ctor_dma()
242 nv_wo32(base->ramfc, 0x80, ((chan->ramht->bits - 9) << 27) | nv50_fifo_chan_ctor_dma()
244 (chan->ramht->gpuobj.node->offset >> 4)); nv50_fifo_chan_ctor_dma()
259 struct nv50_fifo_chan *chan; nv50_fifo_chan_ctor_ind() local
277 (1ULL << NVDEV_ENGINE_MPEG), &chan); nv50_fifo_chan_ctor_ind()
278 *pobject = nv_object(chan); nv50_fifo_chan_ctor_ind()
282 args->v0.chid = chan->base.chid; nv50_fifo_chan_ctor_ind()
284 nv_parent(chan)->context_attach = nv50_fifo_context_attach; nv50_fifo_chan_ctor_ind()
285 nv_parent(chan)->context_detach = nv50_fifo_context_detach; nv50_fifo_chan_ctor_ind()
286 nv_parent(chan)->object_attach = nv50_fifo_object_attach; nv50_fifo_chan_ctor_ind()
287 nv_parent(chan)->object_detach = nv50_fifo_object_detach; nv50_fifo_chan_ctor_ind()
289 ret = nvkm_ramht_new(nv_object(chan), nv_object(chan), 0x8000, 16, nv50_fifo_chan_ctor_ind()
290 &chan->ramht); nv50_fifo_chan_ctor_ind()
299 nv_wo32(base->ramfc, 0x48, chan->base.pushgpu->node->offset >> 4); nv50_fifo_chan_ctor_ind()
305 nv_wo32(base->ramfc, 0x80, ((chan->ramht->bits - 9) << 27) | nv50_fifo_chan_ctor_ind()
307 (chan->ramht->gpuobj.node->offset >> 4)); nv50_fifo_chan_ctor_ind()
315 struct nv50_fifo_chan *chan = (void *)object; nv50_fifo_chan_dtor() local
316 nvkm_ramht_ref(NULL, &chan->ramht); nv50_fifo_chan_dtor()
317 nvkm_fifo_channel_destroy(&chan->base); nv50_fifo_chan_dtor()
325 struct nv50_fifo_chan *chan = (void *)object; nv50_fifo_chan_init() local
327 u32 chid = chan->base.chid; nv50_fifo_chan_init()
330 ret = nvkm_fifo_channel_init(&chan->base); nv50_fifo_chan_init()
343 struct nv50_fifo_chan *chan = (void *)object; nv50_fifo_chan_fini() local
344 u32 chid = chan->base.chid; nv50_fifo_chan_fini()
351 return nvkm_fifo_channel_fini(&chan->base, suspend); nv50_fifo_chan_fini()
H A Dnv40.c72 struct nv04_fifo_chan *chan = (void *)parent; nv40_fifo_object_attach() local
73 u32 context, chid = chan->base.chid; nv40_fifo_object_attach()
108 struct nv04_fifo_chan *chan = (void *)parent; nv40_fifo_context_attach() local
131 if ((nv_rd32(priv, 0x003204) & priv->base.max) == chan->base.chid) nv40_fifo_context_attach()
133 nv_wo32(priv->ramfc, chan->ramfc + ctx, nv_engctx(engctx)->addr); nv40_fifo_context_attach()
145 struct nv04_fifo_chan *chan = (void *)parent; nv40_fifo_context_detach() local
167 if ((nv_rd32(priv, 0x003204) & priv->base.max) == chan->base.chid) nv40_fifo_context_detach()
169 nv_wo32(priv->ramfc, chan->ramfc + ctx, 0x00000000); nv40_fifo_context_detach()
185 struct nv04_fifo_chan *chan; nv40_fifo_chan_ctor() local
201 (1ULL << NVDEV_ENGINE_MPEG), &chan); nv40_fifo_chan_ctor()
202 *pobject = nv_object(chan); nv40_fifo_chan_ctor()
206 args->v0.chid = chan->base.chid; nv40_fifo_chan_ctor()
208 nv_parent(chan)->context_attach = nv40_fifo_context_attach; nv40_fifo_chan_ctor()
209 nv_parent(chan)->context_detach = nv40_fifo_context_detach; nv40_fifo_chan_ctor()
210 nv_parent(chan)->object_attach = nv40_fifo_object_attach; nv40_fifo_chan_ctor()
211 nv_parent(chan)->object_detach = nv04_fifo_object_detach; nv40_fifo_chan_ctor()
212 chan->ramfc = chan->base.chid * 128; nv40_fifo_chan_ctor()
214 nv_wo32(priv->ramfc, chan->ramfc + 0x00, args->v0.offset); nv40_fifo_chan_ctor()
215 nv_wo32(priv->ramfc, chan->ramfc + 0x04, args->v0.offset); nv40_fifo_chan_ctor()
216 nv_wo32(priv->ramfc, chan->ramfc + 0x0c, chan->base.pushgpu->addr >> 4); nv40_fifo_chan_ctor()
217 nv_wo32(priv->ramfc, chan->ramfc + 0x18, 0x30000000 | nv40_fifo_chan_ctor()
224 nv_wo32(priv->ramfc, chan->ramfc + 0x3c, 0x0001ffff); nv40_fifo_chan_ctor()
H A Dnv10.c62 struct nv04_fifo_chan *chan; nv10_fifo_chan_ctor() local
77 (1ULL << NVDEV_ENGINE_GR), &chan); nv10_fifo_chan_ctor()
78 *pobject = nv_object(chan); nv10_fifo_chan_ctor()
82 args->v0.chid = chan->base.chid; nv10_fifo_chan_ctor()
84 nv_parent(chan)->object_attach = nv04_fifo_object_attach; nv10_fifo_chan_ctor()
85 nv_parent(chan)->object_detach = nv04_fifo_object_detach; nv10_fifo_chan_ctor()
86 nv_parent(chan)->context_attach = nv04_fifo_context_attach; nv10_fifo_chan_ctor()
87 chan->ramfc = chan->base.chid * 32; nv10_fifo_chan_ctor()
89 nv_wo32(priv->ramfc, chan->ramfc + 0x00, args->v0.offset); nv10_fifo_chan_ctor()
90 nv_wo32(priv->ramfc, chan->ramfc + 0x04, args->v0.offset); nv10_fifo_chan_ctor()
91 nv_wo32(priv->ramfc, chan->ramfc + 0x0c, chan->base.pushgpu->addr >> 4); nv10_fifo_chan_ctor()
92 nv_wo32(priv->ramfc, chan->ramfc + 0x14, nv10_fifo_chan_ctor()
H A Dnv17.c67 struct nv04_fifo_chan *chan; nv17_fifo_chan_ctor() local
84 &chan); nv17_fifo_chan_ctor()
85 *pobject = nv_object(chan); nv17_fifo_chan_ctor()
89 args->v0.chid = chan->base.chid; nv17_fifo_chan_ctor()
91 nv_parent(chan)->object_attach = nv04_fifo_object_attach; nv17_fifo_chan_ctor()
92 nv_parent(chan)->object_detach = nv04_fifo_object_detach; nv17_fifo_chan_ctor()
93 nv_parent(chan)->context_attach = nv04_fifo_context_attach; nv17_fifo_chan_ctor()
94 chan->ramfc = chan->base.chid * 64; nv17_fifo_chan_ctor()
96 nv_wo32(priv->ramfc, chan->ramfc + 0x00, args->v0.offset); nv17_fifo_chan_ctor()
97 nv_wo32(priv->ramfc, chan->ramfc + 0x04, args->v0.offset); nv17_fifo_chan_ctor()
98 nv_wo32(priv->ramfc, chan->ramfc + 0x0c, chan->base.pushgpu->addr >> 4); nv17_fifo_chan_ctor()
99 nv_wo32(priv->ramfc, chan->ramfc + 0x14, nv17_fifo_chan_ctor()
H A Dg84.c86 struct nv50_fifo_chan *chan = (void *)parent; g84_fifo_context_detach() local
112 chan->base.chid, nvkm_client_name(chan)); g84_fifo_context_detach()
131 struct nv50_fifo_chan *chan = (void *)parent; g84_fifo_object_attach() local
158 return nvkm_ramht_insert(chan->ramht, 0, handle, context); g84_fifo_object_attach()
171 struct nv50_fifo_chan *chan; g84_fifo_chan_ctor_dma() local
197 (1ULL << NVDEV_ENGINE_VIC), &chan); g84_fifo_chan_ctor_dma()
198 *pobject = nv_object(chan); g84_fifo_chan_ctor_dma()
202 args->v0.chid = chan->base.chid; g84_fifo_chan_ctor_dma()
204 ret = nvkm_ramht_new(nv_object(chan), nv_object(chan), 0x8000, 16, g84_fifo_chan_ctor_dma()
205 &chan->ramht); g84_fifo_chan_ctor_dma()
209 nv_parent(chan)->context_attach = g84_fifo_context_attach; g84_fifo_chan_ctor_dma()
210 nv_parent(chan)->context_detach = g84_fifo_context_detach; g84_fifo_chan_ctor_dma()
211 nv_parent(chan)->object_attach = g84_fifo_object_attach; g84_fifo_chan_ctor_dma()
212 nv_parent(chan)->object_detach = nv50_fifo_object_detach; g84_fifo_chan_ctor_dma()
220 nv_wo32(base->ramfc, 0x48, chan->base.pushgpu->node->offset >> 4); g84_fifo_chan_ctor_dma()
225 nv_wo32(base->ramfc, 0x80, ((chan->ramht->bits - 9) << 27) | g84_fifo_chan_ctor_dma()
227 (chan->ramht->gpuobj.node->offset >> 4)); g84_fifo_chan_ctor_dma()
244 struct nv50_fifo_chan *chan; g84_fifo_chan_ctor_ind() local
272 (1ULL << NVDEV_ENGINE_VIC), &chan); g84_fifo_chan_ctor_ind()
273 *pobject = nv_object(chan); g84_fifo_chan_ctor_ind()
277 args->v0.chid = chan->base.chid; g84_fifo_chan_ctor_ind()
279 ret = nvkm_ramht_new(nv_object(chan), nv_object(chan), 0x8000, 16, g84_fifo_chan_ctor_ind()
280 &chan->ramht); g84_fifo_chan_ctor_ind()
284 nv_parent(chan)->context_attach = g84_fifo_context_attach; g84_fifo_chan_ctor_ind()
285 nv_parent(chan)->context_detach = g84_fifo_context_detach; g84_fifo_chan_ctor_ind()
286 nv_parent(chan)->object_attach = g84_fifo_object_attach; g84_fifo_chan_ctor_ind()
287 nv_parent(chan)->object_detach = nv50_fifo_object_detach; g84_fifo_chan_ctor_ind()
294 nv_wo32(base->ramfc, 0x48, chan->base.pushgpu->node->offset >> 4); g84_fifo_chan_ctor_ind()
300 nv_wo32(base->ramfc, 0x80, ((chan->ramht->bits - 9) << 27) | g84_fifo_chan_ctor_ind()
302 (chan->ramht->gpuobj.node->offset >> 4)); g84_fifo_chan_ctor_ind()
314 struct nv50_fifo_chan *chan = (void *)object; g84_fifo_chan_init() local
316 u32 chid = chan->base.chid; g84_fifo_chan_init()
319 ret = nvkm_fifo_channel_init(&chan->base); g84_fifo_chan_init()
H A Dnv04.c59 struct nv04_fifo_chan *chan = (void *)parent; nv04_fifo_object_attach() local
60 u32 context, chid = chan->base.chid; nv04_fifo_object_attach()
119 struct nv04_fifo_chan *chan; nv04_fifo_chan_ctor() local
134 (1ULL << NVDEV_ENGINE_GR), &chan); nv04_fifo_chan_ctor()
135 *pobject = nv_object(chan); nv04_fifo_chan_ctor()
139 args->v0.chid = chan->base.chid; nv04_fifo_chan_ctor()
141 nv_parent(chan)->object_attach = nv04_fifo_object_attach; nv04_fifo_chan_ctor()
142 nv_parent(chan)->object_detach = nv04_fifo_object_detach; nv04_fifo_chan_ctor()
143 nv_parent(chan)->context_attach = nv04_fifo_context_attach; nv04_fifo_chan_ctor()
144 chan->ramfc = chan->base.chid * 32; nv04_fifo_chan_ctor()
146 nv_wo32(priv->ramfc, chan->ramfc + 0x00, args->v0.offset); nv04_fifo_chan_ctor()
147 nv_wo32(priv->ramfc, chan->ramfc + 0x04, args->v0.offset); nv04_fifo_chan_ctor()
148 nv_wo32(priv->ramfc, chan->ramfc + 0x08, chan->base.pushgpu->addr >> 4); nv04_fifo_chan_ctor()
149 nv_wo32(priv->ramfc, chan->ramfc + 0x10, nv04_fifo_chan_ctor()
163 struct nv04_fifo_chan *chan = (void *)object; nv04_fifo_chan_dtor() local
167 nv_wo32(priv->ramfc, chan->ramfc + c->ctxp, 0x00000000); nv04_fifo_chan_dtor()
170 nvkm_fifo_channel_destroy(&chan->base); nv04_fifo_chan_dtor()
177 struct nv04_fifo_chan *chan = (void *)object; nv04_fifo_chan_init() local
178 u32 mask = 1 << chan->base.chid; nv04_fifo_chan_init()
182 ret = nvkm_fifo_channel_init(&chan->base); nv04_fifo_chan_init()
196 struct nv04_fifo_chan *chan = (void *)object; nv04_fifo_chan_fini() local
200 u32 data = chan->ramfc; nv04_fifo_chan_fini()
209 if (chid == chan->base.chid) { nv04_fifo_chan_fini()
236 nv_mask(priv, NV04_PFIFO_MODE, 1 << chan->base.chid, 0); nv04_fifo_chan_fini()
240 return nvkm_fifo_channel_fini(&chan->base, suspend); nv04_fifo_chan_fini()
359 struct nv04_fifo_chan *chan = NULL; nv04_fifo_swmthd() local
369 chan = (void *)priv->base.channel[chid]; nv04_fifo_swmthd()
370 if (unlikely(!chan)) nv04_fifo_swmthd()
375 bind = nvkm_namedb_get(nv_namedb(chan), data); nv04_fifo_swmthd()
381 chan->subc[subc] = data; nv04_fifo_swmthd()
394 bind = nvkm_namedb_get(nv_namedb(chan), chan->subc[subc]); nv04_fifo_swmthd()
/linux-4.1.27/sound/core/seq/
H A Dseq_midi_emul.c48 struct snd_midi_channel *chan,
52 struct snd_midi_channel *chan,
54 static void rpn(struct snd_midi_op *ops, void *drv, struct snd_midi_channel *chan,
56 static void nrpn(struct snd_midi_op *ops, void *drv, struct snd_midi_channel *chan,
61 struct snd_midi_channel *chan);
63 struct snd_midi_channel *chan);
64 static void snd_midi_reset_controllers(struct snd_midi_channel *chan);
75 * GM - You can use all gm_ prefixed elements of chan. Controls, RPN, NRPN,
77 * GS - You can use all gs_ prefixed elements of chan. Codes for GS will be
79 * XG - You can use all xg_ prefixed elements of chan. Codes for XG will
87 struct snd_midi_channel *chan; snd_midi_process_event() local
107 chan = chanset->channels + dest_channel; snd_midi_process_event()
129 if (chan->note[ev->data.note.note] & SNDRV_MIDI_NOTE_ON) { snd_midi_process_event()
131 ops->note_off(drv, ev->data.note.note, 0, chan); snd_midi_process_event()
133 chan->note[ev->data.note.note] = SNDRV_MIDI_NOTE_ON; snd_midi_process_event()
135 ops->note_on(drv, ev->data.note.note, ev->data.note.velocity, chan); snd_midi_process_event()
138 if (! (chan->note[ev->data.note.note] & SNDRV_MIDI_NOTE_ON)) snd_midi_process_event()
141 note_off(ops, drv, chan, ev->data.note.note, ev->data.note.velocity); snd_midi_process_event()
145 ops->key_press(drv, ev->data.note.note, ev->data.note.velocity, chan); snd_midi_process_event()
148 do_control(ops, drv, chanset, chan, snd_midi_process_event()
152 chan->midi_program = ev->data.control.value; snd_midi_process_event()
155 chan->midi_pitchbend = ev->data.control.value; snd_midi_process_event()
157 ops->control(drv, MIDI_CTL_PITCHBEND, chan); snd_midi_process_event()
160 chan->midi_pressure = ev->data.control.value; snd_midi_process_event()
162 ops->control(drv, MIDI_CTL_CHAN_PRESSURE, chan); snd_midi_process_event()
168 chan->control[ev->data.control.param + 32] = snd_midi_process_event()
170 do_control(ops, drv, chanset, chan, snd_midi_process_event()
174 do_control(ops, drv, chanset, chan, snd_midi_process_event()
180 chan->param_type = SNDRV_MIDI_PARAM_TYPE_NONREGISTERED; snd_midi_process_event()
181 chan->control[MIDI_CTL_MSB_DATA_ENTRY] snd_midi_process_event()
183 chan->control[MIDI_CTL_LSB_DATA_ENTRY] snd_midi_process_event()
185 chan->control[MIDI_CTL_NONREG_PARM_NUM_MSB] snd_midi_process_event()
187 chan->control[MIDI_CTL_NONREG_PARM_NUM_LSB] snd_midi_process_event()
189 nrpn(ops, drv, chan, chanset); snd_midi_process_event()
193 chan->param_type = SNDRV_MIDI_PARAM_TYPE_REGISTERED; snd_midi_process_event()
194 chan->control[MIDI_CTL_MSB_DATA_ENTRY] snd_midi_process_event()
196 chan->control[MIDI_CTL_LSB_DATA_ENTRY] snd_midi_process_event()
198 chan->control[MIDI_CTL_REGIST_PARM_NUM_MSB] snd_midi_process_event()
200 chan->control[MIDI_CTL_REGIST_PARM_NUM_LSB] snd_midi_process_event()
202 rpn(ops, drv, chan, chanset); snd_midi_process_event()
245 note_off(struct snd_midi_op *ops, void *drv, struct snd_midi_channel *chan, note_off() argument
248 if (chan->gm_hold) { note_off()
250 chan->note[note] |= SNDRV_MIDI_NOTE_RELEASED; note_off()
251 } else if (chan->note[note] & SNDRV_MIDI_NOTE_SOSTENUTO) { note_off()
254 chan->note[note] |= SNDRV_MIDI_NOTE_RELEASED; note_off()
256 chan->note[note] = 0; note_off()
258 ops->note_off(drv, note, vel, chan); note_off()
268 struct snd_midi_channel *chan, int control, int value) do_control()
272 if (control >= ARRAY_SIZE(chan->control)) do_control()
280 chan->control[control] = value; do_control()
287 if (chan->note[i] & SNDRV_MIDI_NOTE_RELEASED) { do_control()
288 chan->note[i] = SNDRV_MIDI_NOTE_OFF; do_control()
290 ops->note_off(drv, i, 0, chan); do_control()
301 if (chan->note[i] & SNDRV_MIDI_NOTE_ON) do_control()
302 chan->note[i] |= SNDRV_MIDI_NOTE_SOSTENUTO; do_control()
307 if (chan->note[i] & SNDRV_MIDI_NOTE_SOSTENUTO) { do_control()
308 chan->note[i] &= ~SNDRV_MIDI_NOTE_SOSTENUTO; do_control()
309 if (chan->note[i] & SNDRV_MIDI_NOTE_RELEASED) { do_control()
310 chan->note[i] = SNDRV_MIDI_NOTE_OFF; do_control()
312 ops->note_off(drv, i, 0, chan); do_control()
319 chan->control[MIDI_CTL_LSB_DATA_ENTRY] = 0; do_control()
322 if (chan->param_type == SNDRV_MIDI_PARAM_TYPE_REGISTERED) do_control()
323 rpn(ops, drv, chan, chset); do_control()
325 nrpn(ops, drv, chan, chset); do_control()
329 chan->param_type = SNDRV_MIDI_PARAM_TYPE_REGISTERED; do_control()
333 chan->param_type = SNDRV_MIDI_PARAM_TYPE_NONREGISTERED; do_control()
337 all_sounds_off(ops, drv, chan); do_control()
341 all_notes_off(ops, drv, chan); do_control()
347 chan->drum_channel = 1; do_control()
349 chan->drum_channel = 0; do_control()
356 snd_midi_reset_controllers(chan); do_control()
376 ops->control(drv, control, chan); do_control()
394 struct snd_midi_channel *chan = chset->channels + i; snd_midi_channel_set_clear() local
395 memset(chan->note, 0, sizeof(chan->note)); snd_midi_channel_set_clear()
397 chan->midi_aftertouch = 0; snd_midi_channel_set_clear()
398 chan->midi_pressure = 0; snd_midi_channel_set_clear()
399 chan->midi_program = 0; snd_midi_channel_set_clear()
400 chan->midi_pitchbend = 0; snd_midi_channel_set_clear()
401 snd_midi_reset_controllers(chan); snd_midi_channel_set_clear()
402 chan->gm_rpn_pitch_bend_range = 256; /* 2 semitones */ snd_midi_channel_set_clear()
403 chan->gm_rpn_fine_tuning = 0; snd_midi_channel_set_clear()
404 chan->gm_rpn_coarse_tuning = 0; snd_midi_channel_set_clear()
407 chan->drum_channel = 1; snd_midi_channel_set_clear()
409 chan->drum_channel = 0; snd_midi_channel_set_clear()
417 rpn(struct snd_midi_op *ops, void *drv, struct snd_midi_channel *chan, rpn() argument
424 type = (chan->control[MIDI_CTL_REGIST_PARM_NUM_MSB] << 8) | rpn()
425 chan->control[MIDI_CTL_REGIST_PARM_NUM_LSB]; rpn()
426 val = (chan->control[MIDI_CTL_MSB_DATA_ENTRY] << 7) | rpn()
427 chan->control[MIDI_CTL_LSB_DATA_ENTRY]; rpn()
432 chan->gm_rpn_pitch_bend_range = val; rpn()
437 chan->gm_rpn_fine_tuning = val - 8192; rpn()
442 chan->gm_rpn_coarse_tuning = val - 8192; rpn()
457 nrpn(struct snd_midi_op *ops, void *drv, struct snd_midi_channel *chan, nrpn() argument
462 ops->nrpn(drv, chan, chset); nrpn()
599 all_sounds_off(struct snd_midi_op *ops, void *drv, struct snd_midi_channel *chan) all_sounds_off() argument
606 if (chan->note[n]) { all_sounds_off()
607 ops->note_terminate(drv, n, chan); all_sounds_off()
608 chan->note[n] = 0; all_sounds_off()
617 all_notes_off(struct snd_midi_op *ops, void *drv, struct snd_midi_channel *chan) all_notes_off() argument
624 if (chan->note[n] == SNDRV_MIDI_NOTE_ON) all_notes_off()
625 note_off(ops, drv, chan, n, 0); all_notes_off()
655 struct snd_midi_channel *chan; snd_midi_channel_init_set() local
658 chan = kmalloc(n * sizeof(struct snd_midi_channel), GFP_KERNEL); snd_midi_channel_init_set()
659 if (chan) { snd_midi_channel_init_set()
661 snd_midi_channel_init(chan+i, i); snd_midi_channel_init_set()
664 return chan; snd_midi_channel_init_set()
675 struct snd_midi_channel *chan = chset->channels + ch; reset_all_channels() local
676 snd_midi_reset_controllers(chan); reset_all_channels()
677 chan->gm_rpn_pitch_bend_range = 256; /* 2 semitones */ reset_all_channels()
678 chan->gm_rpn_fine_tuning = 0; reset_all_channels()
679 chan->gm_rpn_coarse_tuning = 0; reset_all_channels()
682 chan->drum_channel = 1; reset_all_channels()
684 chan->drum_channel = 0; reset_all_channels()
708 static void snd_midi_reset_controllers(struct snd_midi_channel *chan) snd_midi_reset_controllers() argument
710 memset(chan->control, 0, sizeof(chan->control)); snd_midi_reset_controllers()
711 chan->gm_volume = 127; snd_midi_reset_controllers()
712 chan->gm_expression = 127; snd_midi_reset_controllers()
713 chan->gm_pan = 64; snd_midi_reset_controllers()
267 do_control(struct snd_midi_op *ops, void *drv, struct snd_midi_channel_set *chset, struct snd_midi_channel *chan, int control, int value) do_control() argument
/linux-4.1.27/drivers/isdn/pcbit/
H A Dcapi.h25 extern int capi_decode_conn_conf(struct pcbit_chan *chan, struct sk_buff *skb,
28 extern int capi_decode_conn_ind(struct pcbit_chan *chan, struct sk_buff *skb,
30 extern int capi_conn_resp(struct pcbit_chan *chan, struct sk_buff **skb);
32 extern int capi_conn_active_req(struct pcbit_chan *chan, struct sk_buff **skb);
33 extern int capi_decode_conn_actv_conf(struct pcbit_chan *chan,
36 extern int capi_decode_conn_actv_ind(struct pcbit_chan *chan,
38 extern int capi_conn_active_resp(struct pcbit_chan *chan,
42 extern int capi_select_proto_req(struct pcbit_chan *chan, struct sk_buff **skb,
44 extern int capi_decode_sel_proto_conf(struct pcbit_chan *chan,
47 extern int capi_activate_transp_req(struct pcbit_chan *chan,
49 extern int capi_decode_actv_trans_conf(struct pcbit_chan *chan,
52 extern int capi_tdata_req(struct pcbit_chan *chan, struct sk_buff *skb);
53 extern int capi_tdata_resp(struct pcbit_chan *chan, struct sk_buff **skb);
58 extern int capi_decode_disc_ind(struct pcbit_chan *chan, struct sk_buff *skb);
59 extern int capi_disc_resp(struct pcbit_chan *chan, struct sk_buff **skb);
H A Dcallbacks.h16 extern void cb_out_1(struct pcbit_dev *dev, struct pcbit_chan *chan,
19 extern void cb_out_2(struct pcbit_dev *dev, struct pcbit_chan *chan,
22 extern void cb_in_1(struct pcbit_dev *dev, struct pcbit_chan *chan,
24 extern void cb_in_2(struct pcbit_dev *dev, struct pcbit_chan *chan,
26 extern void cb_in_3(struct pcbit_dev *dev, struct pcbit_chan *chan,
29 extern void cb_disc_1(struct pcbit_dev *dev, struct pcbit_chan *chan,
31 extern void cb_disc_2(struct pcbit_dev *dev, struct pcbit_chan *chan,
33 extern void cb_disc_3(struct pcbit_dev *dev, struct pcbit_chan *chan,
36 extern void cb_notdone(struct pcbit_dev *dev, struct pcbit_chan *chan,
39 extern void cb_selp_1(struct pcbit_dev *dev, struct pcbit_chan *chan,
41 extern void cb_open(struct pcbit_dev *dev, struct pcbit_chan *chan,
H A Dcallbacks.c42 void cb_out_1(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_out_1() argument
60 chan->proto)) < 0) cb_out_1()
69 chan->callref = 0; cb_out_1()
70 chan->layer2link = 0; cb_out_1()
71 chan->snum = 0; cb_out_1()
72 chan->s_refnum = refnum; cb_out_1()
84 void cb_out_2(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_out_2() argument
92 if ((len = capi_conn_active_resp(chan, &skb)) < 0) cb_out_2()
99 chan->s_refnum = refnum; cb_out_2()
106 ictl.arg = chan->id; cb_out_2()
113 if ((len = capi_select_proto_req(chan, &skb, 1 /*outgoing*/)) < 0) { cb_out_2()
119 chan->s_refnum = refnum; cb_out_2()
130 void cb_in_1(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_in_1() argument
141 ictl.arg = chan->id; cb_in_1()
173 if ((len = capi_conn_resp(chan, &skb)) < 0) { cb_in_1()
179 chan->s_refnum = refnum; cb_in_1()
190 void cb_in_2(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_in_2() argument
197 if ((len = capi_conn_active_req(chan, &skb)) < 0) { cb_in_2()
204 chan->s_refnum = refnum; cb_in_2()
216 void cb_in_3(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_in_3() argument
223 if ((len = capi_select_proto_req(chan, &skb, 0 /*incoming*/)) < 0) cb_in_3()
230 chan->s_refnum = refnum; cb_in_3()
242 void cb_disc_1(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_disc_1() argument
250 if ((len = capi_disc_resp(chan, &skb)) < 0) { cb_disc_1()
256 chan->s_refnum = refnum; cb_disc_1()
262 ictl.arg = chan->id; cb_disc_1()
271 void cb_disc_2(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_disc_2() argument
278 if ((len = capi_disc_req(chan->callref, &skb, CAUSE_NORMAL)) < 0) cb_disc_2()
285 chan->s_refnum = refnum; cb_disc_2()
295 void cb_disc_3(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_disc_3() argument
302 ictl.arg = chan->id; cb_disc_3()
306 void cb_notdone(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_notdone() argument
312 * send activate b-chan protocol
314 void cb_selp_1(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_selp_1() argument
321 if ((len = capi_activate_transp_req(chan, &skb)) < 0) cb_selp_1()
328 chan->s_refnum = refnum; cb_selp_1()
336 void cb_open(struct pcbit_dev *dev, struct pcbit_chan *chan, cb_open() argument
343 ictl.arg = chan->id; cb_open()
H A Dcapi.c134 int capi_conn_resp(struct pcbit_chan *chan, struct sk_buff **skb) capi_conn_resp() argument
143 *((ushort *)skb_put(*skb, 2)) = chan->callref; capi_conn_resp()
151 int capi_conn_active_req(struct pcbit_chan *chan, struct sk_buff **skb) capi_conn_active_req() argument
163 *((ushort *)skb_put(*skb, 2)) = chan->callref; capi_conn_active_req()
166 printk(KERN_DEBUG "Call Reference: %04x\n", chan->callref); capi_conn_active_req()
179 int capi_conn_active_resp(struct pcbit_chan *chan, struct sk_buff **skb) capi_conn_active_resp() argument
191 *((ushort *)skb_put(*skb, 2)) = chan->callref; capi_conn_active_resp()
197 int capi_select_proto_req(struct pcbit_chan *chan, struct sk_buff **skb, capi_select_proto_req() argument
211 *((ushort *)skb_put(*skb, 2)) = chan->callref; capi_select_proto_req()
215 switch (chan->proto) { capi_select_proto_req()
263 int capi_activate_transp_req(struct pcbit_chan *chan, struct sk_buff **skb) capi_activate_transp_req() argument
272 *((ushort *)skb_put(*skb, 2)) = chan->callref; capi_activate_transp_req()
275 *(skb_put(*skb, 1)) = chan->layer2link; /* Layer2 id */ capi_activate_transp_req()
285 int capi_tdata_req(struct pcbit_chan *chan, struct sk_buff *skb) capi_tdata_req() argument
309 *((u16 *) (skb->data)) = chan->callref; capi_tdata_req()
310 skb->data[2] = chan->layer2link; capi_tdata_req()
313 chan->s_refnum = (chan->s_refnum + 1) % 8; capi_tdata_req()
314 *((u32 *) (skb->data + 5)) = chan->s_refnum; capi_tdata_req()
321 int capi_tdata_resp(struct pcbit_chan *chan, struct sk_buff **skb) capi_tdata_resp() argument
330 *((ushort *)skb_put(*skb, 2)) = chan->callref; capi_tdata_resp()
332 *(skb_put(*skb, 1)) = chan->layer2link; capi_tdata_resp()
333 *(skb_put(*skb, 1)) = chan->r_refnum; capi_tdata_resp()
362 int capi_disc_resp(struct pcbit_chan *chan, struct sk_buff **skb) capi_disc_resp() argument
370 *((ushort *)skb_put(*skb, 2)) = chan->callref; capi_disc_resp()
381 int capi_decode_conn_ind(struct pcbit_chan *chan, capi_decode_conn_ind() argument
388 chan->callref = *((ushort *)skb->data); capi_decode_conn_ind()
392 printk(KERN_DEBUG "Call Reference: %04x\n", chan->callref); capi_decode_conn_ind()
399 Octect 3 = 0100 10CC - [ 7 Basic, 4 , 2-1 chan ] capi_decode_conn_ind()
407 printk(KERN_DEBUG "decode_conn_ind: chan ok\n"); capi_decode_conn_ind()
493 int capi_decode_conn_conf(struct pcbit_chan *chan, struct sk_buff *skb, capi_decode_conn_conf() argument
498 chan->callref = *((ushort *)skb->data); /* Update CallReference */ capi_decode_conn_conf()
528 int capi_decode_conn_actv_ind(struct pcbit_chan *chan, struct sk_buff *skb) capi_decode_conn_actv_ind() argument
566 int capi_decode_conn_actv_conf(struct pcbit_chan *chan, struct sk_buff *skb) capi_decode_conn_actv_conf() argument
580 int capi_decode_sel_proto_conf(struct pcbit_chan *chan, struct sk_buff *skb) capi_decode_sel_proto_conf() argument
584 chan->layer2link = *(skb->data); capi_decode_sel_proto_conf()
593 int capi_decode_actv_trans_conf(struct pcbit_chan *chan, struct sk_buff *skb) capi_decode_actv_trans_conf() argument
597 if (chan->layer2link != *(skb->data)) capi_decode_actv_trans_conf()
608 int capi_decode_disc_ind(struct pcbit_chan *chan, struct sk_buff *skb) capi_decode_disc_ind() argument
H A Ddrv.c61 static int pcbit_xmit(int driver, int chan, int ack, struct sk_buff *skb);
239 struct pcbit_chan *chan; pcbit_command() local
250 chan = (ctl->arg & 0x0F) ? dev->b2 : dev->b1; pcbit_command()
260 pcbit_fsm_event(dev, chan, EV_USR_SETUP_REQ, &info); pcbit_command()
263 pcbit_fsm_event(dev, chan, EV_USR_SETUP_RESP, NULL); pcbit_command()
269 pcbit_fsm_event(dev, chan, EV_USR_RELEASE_REQ, NULL); pcbit_command()
272 chan->proto = (ctl->arg >> 8); pcbit_command()
301 struct pcbit_chan *chan; pcbit_block_timer() local
305 chan = (struct pcbit_chan *)data; pcbit_block_timer()
307 dev = chan2dev(chan); pcbit_block_timer()
314 del_timer(&chan->block_timer); pcbit_block_timer()
315 chan->block_timer.function = NULL; pcbit_block_timer()
320 chan->queued = 0; pcbit_block_timer()
323 ictl.arg = chan->id; pcbit_block_timer()
332 struct pcbit_chan *chan; pcbit_xmit() local
342 chan = chnum ? dev->b2 : dev->b1; pcbit_xmit()
345 if (chan->fsm_state != ST_ACTIVE) pcbit_xmit()
348 if (chan->queued >= MAX_QUEUED) pcbit_xmit()
353 chan->queued); pcbit_xmit()
361 if (chan->block_timer.function == NULL) { pcbit_xmit()
362 init_timer(&chan->block_timer); pcbit_xmit()
363 chan->block_timer.function = &pcbit_block_timer; pcbit_xmit()
364 chan->block_timer.data = (long) chan; pcbit_xmit()
365 chan->block_timer.expires = jiffies + 1 * HZ; pcbit_xmit()
366 add_timer(&chan->block_timer); pcbit_xmit()
373 chan->queued++; pcbit_xmit()
377 hdrlen = capi_tdata_req(chan, skb); pcbit_xmit()
380 chan->s_refnum = refnum; pcbit_xmit()
471 struct pcbit_chan *chan; pcbit_l3_receive() local
481 if (!(chan = capi_channel(dev, skb))) { pcbit_l3_receive()
486 chan->r_refnum = skb->data[7]; pcbit_l3_receive()
489 dev->dev_if->rcvcallb_skb(dev->id, chan->id, skb); pcbit_l3_receive()
491 if (capi_tdata_resp(chan, &skb2) > 0) pcbit_l3_receive()
497 if (!(chan = capi_channel(dev, skb))) { pcbit_l3_receive()
509 if (chan->queued == MAX_QUEUED) { pcbit_l3_receive()
510 del_timer(&chan->block_timer); pcbit_l3_receive()
511 chan->block_timer.function = NULL; pcbit_l3_receive()
515 chan->queued--; pcbit_l3_receive()
519 ictl.arg = chan->id; pcbit_l3_receive()
530 chan = dev->b1; pcbit_l3_receive()
532 chan = dev->b2; pcbit_l3_receive()
545 capi_decode_conn_ind(chan, skb, &cbdata); pcbit_l3_receive()
548 pcbit_fsm_event(dev, chan, EV_NET_SETUP, NULL); pcbit_l3_receive()
551 pcbit_fsm_event(dev, chan, EV_USR_PROCED_REQ, &cbdata); pcbit_l3_receive()
553 pcbit_fsm_event(dev, chan, EV_USR_RELEASE_REQ, NULL); pcbit_l3_receive()
573 chan = dev->b1; pcbit_l3_receive()
576 chan = dev->b2; pcbit_l3_receive()
578 chan = NULL; pcbit_l3_receive()
583 if (capi_decode_conn_conf(chan, skb, &complete)) { pcbit_l3_receive()
585 pcbit_fsm_event(dev, chan, EV_ERROR, NULL); pcbit_l3_receive()
589 pcbit_fsm_event(dev, chan, EV_NET_CALL_PROC, NULL); pcbit_l3_receive()
591 pcbit_fsm_event(dev, chan, EV_NET_SETUP_ACK, NULL); pcbit_l3_receive()
595 if (!(chan = capi_channel(dev, skb))) { pcbit_l3_receive()
601 if (capi_decode_conn_actv_ind(chan, skb)) { pcbit_l3_receive()
603 /* pcbit_fsm_event(dev, chan, EV_ERROR, NULL); */ pcbit_l3_receive()
606 chan->r_refnum = refnum; pcbit_l3_receive()
607 pcbit_fsm_event(dev, chan, EV_NET_CONN, NULL); pcbit_l3_receive()
611 if (!(chan = capi_channel(dev, skb))) { pcbit_l3_receive()
617 if (capi_decode_conn_actv_conf(chan, skb) == 0) pcbit_l3_receive()
618 pcbit_fsm_event(dev, chan, EV_NET_CONN_ACK, NULL); pcbit_l3_receive()
626 if (!(chan = capi_channel(dev, skb))) { pcbit_l3_receive()
632 if (!(err = capi_decode_sel_proto_conf(chan, skb))) pcbit_l3_receive()
633 pcbit_fsm_event(dev, chan, EV_NET_SELP_RESP, NULL); pcbit_l3_receive()
640 if (!(chan = capi_channel(dev, skb))) { pcbit_l3_receive()
646 if (!capi_decode_actv_trans_conf(chan, skb)) pcbit_l3_receive()
647 pcbit_fsm_event(dev, chan, EV_NET_ACTV_RESP, NULL); pcbit_l3_receive()
652 if (!(chan = capi_channel(dev, skb))) { pcbit_l3_receive()
658 if (!capi_decode_disc_ind(chan, skb)) pcbit_l3_receive()
659 pcbit_fsm_event(dev, chan, EV_NET_DISC, NULL); pcbit_l3_receive()
664 if (!(chan = capi_channel(dev, skb))) { pcbit_l3_receive()
670 if (!capi_decode_disc_ind(chan, skb)) pcbit_l3_receive()
671 pcbit_fsm_event(dev, chan, EV_NET_RELEASE, NULL); pcbit_l3_receive()
773 void pcbit_state_change(struct pcbit_dev *dev, struct pcbit_chan *chan, pcbit_state_change() argument
779 dev->id, chan->id, pcbit_state_change()
H A Dedss1.c248 struct pcbit_chan *chan; pcbit_fsm_timer() local
250 chan = (struct pcbit_chan *) data; pcbit_fsm_timer()
252 del_timer(&chan->fsm_timer); pcbit_fsm_timer()
253 chan->fsm_timer.function = NULL; pcbit_fsm_timer()
255 dev = chan2dev(chan); pcbit_fsm_timer()
262 pcbit_fsm_event(dev, chan, EV_TIMER, NULL); pcbit_fsm_timer()
266 void pcbit_fsm_event(struct pcbit_dev *dev, struct pcbit_chan *chan, pcbit_fsm_event() argument
276 if (action->init == chan->fsm_state && action->event == event) pcbit_fsm_event()
283 event, chan->fsm_state); pcbit_fsm_event()
287 if (chan->fsm_timer.function) { pcbit_fsm_event()
288 del_timer(&chan->fsm_timer); pcbit_fsm_event()
289 chan->fsm_timer.function = NULL; pcbit_fsm_event()
292 chan->fsm_state = action->final; pcbit_fsm_event()
294 pcbit_state_change(dev, chan, action->init, event, action->final); pcbit_fsm_event()
297 if (tentry->init == chan->fsm_state) pcbit_fsm_event()
301 init_timer(&chan->fsm_timer); pcbit_fsm_event()
302 chan->fsm_timer.function = &pcbit_fsm_timer; pcbit_fsm_event()
303 chan->fsm_timer.data = (ulong) chan; pcbit_fsm_event()
304 chan->fsm_timer.expires = jiffies + tentry->timeout * HZ; pcbit_fsm_event()
305 add_timer(&chan->fsm_timer); pcbit_fsm_event()
311 action->callb(dev, chan, data); pcbit_fsm_event()
/linux-4.1.27/sound/soc/qcom/
H A Dlpass-lpaif-ipq806x.h103 #define LPAIF_IRQ_PER(chan) (1 << (LPAIF_IRQ_BITSTRIDE * (chan)))
104 #define LPAIF_IRQ_XRUN(chan) (2 << (LPAIF_IRQ_BITSTRIDE * (chan)))
105 #define LPAIF_IRQ_ERR(chan) (4 << (LPAIF_IRQ_BITSTRIDE * (chan)))
106 #define LPAIF_IRQ_ALL(chan) (7 << (LPAIF_IRQ_BITSTRIDE * (chan)))
112 #define LPAIF_RDMA_REG_ADDR(addr, chan) \
113 (LPAIF_RDMA_REG_BASE + (addr) + (LPAIF_RDMA_REG_STRIDE * (chan)))
126 #define LPAIF_RDMACTL_REG(chan) LPAIF_RDMA_REG_ADDR(0x00, (chan))
127 #define LPAIF_RDMABASE_REG(chan) LPAIF_RDMA_REG_ADDR(0x04, (chan))
128 #define LPAIF_RDMABUFF_REG(chan) LPAIF_RDMA_REG_ADDR(0x08, (chan))
129 #define LPAIF_RDMACURR_REG(chan) LPAIF_RDMA_REG_ADDR(0x0C, (chan))
130 #define LPAIF_RDMAPER_REG(chan) LPAIF_RDMA_REG_ADDR(0x10, (chan))
/linux-4.1.27/drivers/media/pci/cx25821/
H A Dcx25821-video.c148 struct cx25821_channel *chan = q->drv_priv; cx25821_queue_setup() local
149 unsigned size = (chan->fmt->depth * chan->width * chan->height) >> 3; cx25821_queue_setup()
156 alloc_ctxs[0] = chan->dev->alloc_ctx; cx25821_queue_setup()
162 struct cx25821_channel *chan = vb->vb2_queue->drv_priv; cx25821_buffer_prepare() local
163 struct cx25821_dev *dev = chan->dev; cx25821_buffer_prepare()
171 if (chan->pixel_formats == PIXEL_FRMT_411) cx25821_buffer_prepare()
172 buf->bpl = (chan->fmt->depth * chan->width) >> 3; cx25821_buffer_prepare()
174 buf->bpl = (chan->fmt->depth >> 3) * chan->width; cx25821_buffer_prepare()
176 if (vb2_plane_size(vb, 0) < chan->height * buf->bpl) cx25821_buffer_prepare()
178 vb2_set_plane_payload(vb, 0, chan->height * buf->bpl); cx25821_buffer_prepare()
179 buf->vb.v4l2_buf.field = chan->field; cx25821_buffer_prepare()
181 if (chan->pixel_formats == PIXEL_FRMT_411) { cx25821_buffer_prepare()
186 if (chan->use_cif_resolution) { cx25821_buffer_prepare()
190 bpl_local = chan->cif_width << 1; cx25821_buffer_prepare()
194 switch (chan->field) { cx25821_buffer_prepare()
198 buf->bpl, 0, chan->height); cx25821_buffer_prepare()
203 buf->bpl, 0, chan->height); cx25821_buffer_prepare()
213 chan->height >> 1); cx25821_buffer_prepare()
218 0, buf->bpl * (chan->height >> 1), cx25821_buffer_prepare()
219 buf->bpl, 0, chan->height >> 1); cx25821_buffer_prepare()
224 buf->bpl * (chan->height >> 1), 0, cx25821_buffer_prepare()
225 buf->bpl, 0, chan->height >> 1); cx25821_buffer_prepare()
234 buf, buf->vb.v4l2_buf.index, chan->width, chan->height, cx25821_buffer_prepare()
235 chan->fmt->depth, chan->fmt->name, cx25821_buffer_prepare()
245 struct cx25821_channel *chan = vb->vb2_queue->drv_priv; cx25821_buffer_finish() local
246 struct cx25821_dev *dev = chan->dev; cx25821_buffer_finish()
255 struct cx25821_channel *chan = vb->vb2_queue->drv_priv; cx25821_buffer_queue() local
256 struct cx25821_dev *dev = chan->dev; cx25821_buffer_queue()
258 struct cx25821_dmaqueue *q = &dev->channels[chan->id].dma_vidq; cx25821_buffer_queue()
278 struct cx25821_channel *chan = q->drv_priv; cx25821_start_streaming() local
279 struct cx25821_dev *dev = chan->dev; cx25821_start_streaming()
280 struct cx25821_dmaqueue *dmaq = &dev->channels[chan->id].dma_vidq; cx25821_start_streaming()
285 cx25821_start_video_dma(dev, dmaq, buf, chan->sram_channels); cx25821_start_streaming()
291 struct cx25821_channel *chan = q->drv_priv; cx25821_stop_streaming() local
292 struct cx25821_dev *dev = chan->dev; cx25821_stop_streaming()
293 struct cx25821_dmaqueue *dmaq = &dev->channels[chan->id].dma_vidq; cx25821_stop_streaming()
296 cx_write(chan->sram_channels->dma_ctl, 0); /* FIFO and RISC disable */ cx25821_stop_streaming()
336 struct cx25821_channel *chan = video_drvdata(file); cx25821_vidioc_g_fmt_vid_cap() local
338 f->fmt.pix.width = chan->width; cx25821_vidioc_g_fmt_vid_cap()
339 f->fmt.pix.height = chan->height; cx25821_vidioc_g_fmt_vid_cap()
340 f->fmt.pix.field = chan->field; cx25821_vidioc_g_fmt_vid_cap()
341 f->fmt.pix.pixelformat = chan->fmt->fourcc; cx25821_vidioc_g_fmt_vid_cap()
342 f->fmt.pix.bytesperline = (chan->width * chan->fmt->depth) >> 3; cx25821_vidioc_g_fmt_vid_cap()
343 f->fmt.pix.sizeimage = chan->height * f->fmt.pix.bytesperline; cx25821_vidioc_g_fmt_vid_cap()
352 struct cx25821_channel *chan = video_drvdata(file); cx25821_vidioc_try_fmt_vid_cap() local
353 struct cx25821_dev *dev = chan->dev; cx25821_vidioc_try_fmt_vid_cap()
390 struct cx25821_channel *chan = video_drvdata(file); vidioc_s_fmt_vid_cap() local
391 struct cx25821_dev *dev = chan->dev; vidioc_s_fmt_vid_cap()
400 chan->fmt = cx25821_format_by_fourcc(f->fmt.pix.pixelformat); vidioc_s_fmt_vid_cap()
401 chan->field = f->fmt.pix.field; vidioc_s_fmt_vid_cap()
402 chan->width = f->fmt.pix.width; vidioc_s_fmt_vid_cap()
403 chan->height = f->fmt.pix.height; vidioc_s_fmt_vid_cap()
413 if (chan->width == 320 || chan->width == 352) vidioc_s_fmt_vid_cap()
414 chan->use_cif_resolution = 1; vidioc_s_fmt_vid_cap()
416 chan->use_cif_resolution = 0; vidioc_s_fmt_vid_cap()
418 chan->cif_width = chan->width; vidioc_s_fmt_vid_cap()
419 medusa_set_resolution(dev, chan->width, SRAM_CH00); vidioc_s_fmt_vid_cap()
425 struct cx25821_channel *chan = video_drvdata(file); vidioc_log_status() local
426 struct cx25821_dev *dev = chan->dev; vidioc_log_status()
427 const struct sram_channel *sram_ch = chan->sram_channels; vidioc_log_status()
440 struct cx25821_channel *chan = video_drvdata(file); cx25821_vidioc_querycap() local
441 struct cx25821_dev *dev = chan->dev; cx25821_vidioc_querycap()
449 if (chan->id >= VID_CHANNEL_NUM) cx25821_vidioc_querycap()
459 struct cx25821_channel *chan = video_drvdata(file); cx25821_vidioc_g_std() local
461 *tvnorms = chan->dev->tvnorm; cx25821_vidioc_g_std()
468 struct cx25821_channel *chan = video_drvdata(file); cx25821_vidioc_s_std() local
469 struct cx25821_dev *dev = chan->dev; cx25821_vidioc_s_std()
475 chan->width = 720; cx25821_vidioc_s_std()
476 chan->height = (dev->tvnorm & V4L2_STD_625_50) ? 576 : 480; cx25821_vidioc_s_std()
508 struct cx25821_channel *chan = cx25821_s_ctrl() local
510 struct cx25821_dev *dev = chan->dev; cx25821_s_ctrl()
514 medusa_set_brightness(dev, ctrl->val, chan->id); cx25821_s_ctrl()
517 medusa_set_hue(dev, ctrl->val, chan->id); cx25821_s_ctrl()
520 medusa_set_contrast(dev, ctrl->val, chan->id); cx25821_s_ctrl()
523 medusa_set_saturation(dev, ctrl->val, chan->id); cx25821_s_ctrl()
557 struct cx25821_channel *chan = video_drvdata(file); cx25821_vidioc_try_fmt_vid_out() local
558 struct cx25821_dev *dev = chan->dev; cx25821_vidioc_try_fmt_vid_out()
576 struct cx25821_channel *chan = video_drvdata(file); vidioc_s_fmt_vid_out() local
584 chan->fmt = cx25821_format_by_fourcc(f->fmt.pix.pixelformat); vidioc_s_fmt_vid_out()
585 chan->field = f->fmt.pix.field; vidioc_s_fmt_vid_out()
586 chan->width = f->fmt.pix.width; vidioc_s_fmt_vid_out()
587 chan->height = f->fmt.pix.height; vidioc_s_fmt_vid_out()
589 chan->pixel_formats = PIXEL_FRMT_411; vidioc_s_fmt_vid_out()
591 chan->pixel_formats = PIXEL_FRMT_422; vidioc_s_fmt_vid_out()
696 struct cx25821_channel *chan = &dev->channels[i]; cx25821_video_register() local
697 struct video_device *vdev = &chan->vdev; cx25821_video_register()
698 struct v4l2_ctrl_handler *hdl = &chan->hdl; cx25821_video_register()
723 chan->out = &dev->vid_out_data[i - SRAM_CH09]; cx25821_video_register()
724 chan->out->chan = chan; cx25821_video_register()
727 chan->sram_channels = &cx25821_sram_channels[i]; cx25821_video_register()
728 chan->width = 720; cx25821_video_register()
729 chan->field = V4L2_FIELD_INTERLACED; cx25821_video_register()
731 chan->height = 576; cx25821_video_register()
733 chan->height = 480; cx25821_video_register()
735 if (chan->pixel_formats == PIXEL_FRMT_411) cx25821_video_register()
736 chan->fmt = cx25821_format_by_fourcc(V4L2_PIX_FMT_Y41P); cx25821_video_register()
738 chan->fmt = cx25821_format_by_fourcc(V4L2_PIX_FMT_YUYV); cx25821_video_register()
740 cx_write(chan->sram_channels->int_stat, 0xffffffff); cx25821_video_register()
742 INIT_LIST_HEAD(&chan->dma_vidq.active); cx25821_video_register()
744 q = &chan->vidq; cx25821_video_register()
752 q->drv_priv = chan; cx25821_video_register()
775 video_set_drvdata(vdev, chan); cx25821_video_register()
H A Dcx25821-video-upstream.c95 static __le32 *cx25821_update_riscprogram(struct cx25821_channel *chan, cx25821_update_riscprogram() argument
101 struct cx25821_video_out_data *out = chan->out; cx25821_update_riscprogram()
127 static __le32 *cx25821_risc_field_upstream(struct cx25821_channel *chan, __le32 *rp, cx25821_risc_field_upstream() argument
133 struct cx25821_video_out_data *out = chan->out; cx25821_risc_field_upstream()
135 const struct sram_channel *sram_ch = chan->sram_channels; cx25821_risc_field_upstream()
172 static int cx25821_risc_buffer_upstream(struct cx25821_channel *chan, cx25821_risc_buffer_upstream() argument
177 struct cx25821_video_out_data *out = chan->out; cx25821_risc_buffer_upstream()
210 rp = cx25821_risc_field_upstream(chan, rp, cx25821_risc_buffer_upstream()
219 rp = cx25821_risc_field_upstream(chan, rp, cx25821_risc_buffer_upstream()
245 void cx25821_stop_upstream_video(struct cx25821_channel *chan) cx25821_stop_upstream_video() argument
247 struct cx25821_video_out_data *out = chan->out; cx25821_stop_upstream_video()
248 struct cx25821_dev *dev = chan->dev; cx25821_stop_upstream_video()
249 const struct sram_channel *sram_ch = chan->sram_channels; cx25821_stop_upstream_video()
268 free_irq(dev->pci->irq, chan); cx25821_stop_upstream_video()
283 void cx25821_free_mem_upstream(struct cx25821_channel *chan) cx25821_free_mem_upstream() argument
285 struct cx25821_video_out_data *out = chan->out; cx25821_free_mem_upstream()
286 struct cx25821_dev *dev = chan->dev; cx25821_free_mem_upstream()
289 cx25821_stop_upstream_video(chan); cx25821_free_mem_upstream()
305 int cx25821_write_frame(struct cx25821_channel *chan, cx25821_write_frame() argument
308 struct cx25821_video_out_data *out = chan->out; cx25821_write_frame()
346 static int cx25821_upstream_buffer_prepare(struct cx25821_channel *chan, cx25821_upstream_buffer_prepare() argument
350 struct cx25821_video_out_data *out = chan->out; cx25821_upstream_buffer_prepare()
351 struct cx25821_dev *dev = chan->dev; cx25821_upstream_buffer_prepare()
394 ret = cx25821_risc_buffer_upstream(chan, dev->pci, 0, bpl, cx25821_upstream_buffer_prepare()
407 static int cx25821_video_upstream_irq(struct cx25821_channel *chan, u32 status) cx25821_video_upstream_irq() argument
409 struct cx25821_video_out_data *out = chan->out; cx25821_video_upstream_irq()
410 struct cx25821_dev *dev = chan->dev; cx25821_video_upstream_irq()
412 const struct sram_channel *channel = chan->sram_channels; cx25821_video_upstream_irq()
455 rp = cx25821_update_riscprogram(chan, cx25821_video_upstream_irq()
496 struct cx25821_channel *chan = dev_id; cx25821_upstream_irq() local
497 struct cx25821_dev *dev = chan->dev; cx25821_upstream_irq()
505 sram_ch = chan->sram_channels; cx25821_upstream_irq()
511 handled = cx25821_video_upstream_irq(chan, vid_status); cx25821_upstream_irq()
516 static void cx25821_set_pixelengine(struct cx25821_channel *chan, cx25821_set_pixelengine() argument
520 struct cx25821_video_out_data *out = chan->out; cx25821_set_pixelengine()
521 struct cx25821_dev *dev = chan->dev; cx25821_set_pixelengine()
551 static int cx25821_start_video_dma_upstream(struct cx25821_channel *chan, cx25821_start_video_dma_upstream() argument
554 struct cx25821_video_out_data *out = chan->out; cx25821_start_video_dma_upstream()
555 struct cx25821_dev *dev = chan->dev; cx25821_start_video_dma_upstream()
584 IRQF_SHARED, dev->name, chan); cx25821_start_video_dma_upstream()
605 int cx25821_vidupstream_init(struct cx25821_channel *chan, cx25821_vidupstream_init() argument
608 struct cx25821_video_out_data *out = chan->out; cx25821_vidupstream_init()
609 struct cx25821_dev *dev = chan->dev; cx25821_vidupstream_init()
621 sram_ch = chan->sram_channels; cx25821_vidupstream_init()
656 cx25821_set_pixelengine(chan, sram_ch, out->_pixel_format); cx25821_vidupstream_init()
662 err = cx25821_upstream_buffer_prepare(chan, sram_ch, out->_line_size); cx25821_vidupstream_init()
669 cx25821_start_video_dma_upstream(chan, sram_ch); cx25821_vidupstream_init()
/linux-4.1.27/drivers/dma/ioat/
H A Ddma_v2.c54 struct ioat_chan_common *chan = &ioat->base; __ioat2_issue_pending() local
58 writew(ioat->dmacount, chan->reg_base + IOAT_CHAN_DMACOUNT_OFFSET); __ioat2_issue_pending()
59 dev_dbg(to_dev(chan), __ioat2_issue_pending()
129 struct ioat_chan_common *chan = &ioat->base; __cleanup() local
136 dev_dbg(to_dev(chan), "%s: head: %#x tail: %#x issued: %#x\n", __cleanup()
162 chan->last_completion = phys_complete; __cleanup()
164 dev_dbg(to_dev(chan), "%s: cancel completion timeout\n", __cleanup()
166 clear_bit(IOAT_COMPLETION_PENDING, &chan->state); __cleanup()
167 mod_timer(&chan->timer, jiffies + IDLE_TIMEOUT); __cleanup()
173 * @chan: ioat channel to be cleaned up
177 struct ioat_chan_common *chan = &ioat->base; ioat2_cleanup() local
180 spin_lock_bh(&chan->cleanup_lock); ioat2_cleanup()
181 if (ioat_cleanup_preamble(chan, &phys_complete)) ioat2_cleanup()
183 spin_unlock_bh(&chan->cleanup_lock); ioat2_cleanup()
189 struct ioat_chan_common *chan = &ioat->base; ioat2_cleanup_event() local
192 if (!test_bit(IOAT_RUN, &chan->state)) ioat2_cleanup_event()
199 struct ioat_chan_common *chan = &ioat->base; __ioat2_restart_chan() local
204 set_bit(IOAT_COMPLETION_PENDING, &chan->state); __ioat2_restart_chan()
205 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); __ioat2_restart_chan()
207 dev_dbg(to_dev(chan), __ioat2_restart_chan()
221 int ioat2_quiesce(struct ioat_chan_common *chan, unsigned long tmo) ioat2_quiesce() argument
227 status = ioat_chansts(chan); ioat2_quiesce()
229 ioat_suspend(chan); ioat2_quiesce()
235 status = ioat_chansts(chan); ioat2_quiesce()
242 int ioat2_reset_sync(struct ioat_chan_common *chan, unsigned long tmo) ioat2_reset_sync() argument
247 ioat_reset(chan); ioat2_reset_sync()
248 while (ioat_reset_pending(chan)) { ioat2_reset_sync()
261 struct ioat_chan_common *chan = &ioat->base; ioat2_restart_channel() local
264 ioat2_quiesce(chan, 0); ioat2_restart_channel()
265 if (ioat_cleanup_preamble(chan, &phys_complete)) ioat2_restart_channel()
273 struct ioat_chan_common *chan = &ioat->base; check_active() local
276 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); check_active()
280 if (test_and_clear_bit(IOAT_CHAN_ACTIVE, &chan->state)) check_active()
281 mod_timer(&chan->timer, jiffies + IDLE_TIMEOUT); check_active()
292 mod_timer(&chan->timer, jiffies + IDLE_TIMEOUT); check_active()
300 struct ioat_chan_common *chan = &ioat->base; ioat2_timer_event() local
304 status = ioat_chansts(chan); ioat2_timer_event()
312 chanerr = readl(chan->reg_base + IOAT_CHANERR_OFFSET); ioat2_timer_event()
313 dev_err(to_dev(chan), "%s: Channel halted (%x)\n", ioat2_timer_event()
315 if (test_bit(IOAT_RUN, &chan->state)) ioat2_timer_event()
325 spin_lock_bh(&chan->cleanup_lock); ioat2_timer_event()
326 if (ioat_cleanup_preamble(chan, &phys_complete)) ioat2_timer_event()
328 else if (test_bit(IOAT_COMPLETION_ACK, &chan->state)) { ioat2_timer_event()
332 spin_unlock_bh(&chan->cleanup_lock); ioat2_timer_event()
335 set_bit(IOAT_COMPLETION_ACK, &chan->state); ioat2_timer_event()
336 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat2_timer_event()
341 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat2_timer_event()
347 spin_unlock_bh(&chan->cleanup_lock); ioat2_timer_event()
350 static int ioat2_reset_hw(struct ioat_chan_common *chan) ioat2_reset_hw() argument
355 ioat2_quiesce(chan, msecs_to_jiffies(100)); ioat2_reset_hw()
357 chanerr = readl(chan->reg_base + IOAT_CHANERR_OFFSET); ioat2_reset_hw()
358 writel(chanerr, chan->reg_base + IOAT_CHANERR_OFFSET); ioat2_reset_hw()
360 return ioat2_reset_sync(chan, msecs_to_jiffies(200)); ioat2_reset_hw()
413 struct dma_chan *c = tx->chan; ioat2_tx_submit_unlock()
415 struct ioat_chan_common *chan = &ioat->base; ioat2_tx_submit_unlock() local
421 if (!test_and_set_bit(IOAT_CHAN_ACTIVE, &chan->state)) ioat2_tx_submit_unlock()
422 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat2_tx_submit_unlock()
438 static struct ioat_ring_ent *ioat2_alloc_ring_ent(struct dma_chan *chan, gfp_t flags) ioat2_alloc_ring_ent() argument
445 dma = to_ioatdma_device(chan->device); ioat2_alloc_ring_ent()
457 dma_async_tx_descriptor_init(&desc->txd, chan); ioat2_alloc_ring_ent()
464 static void ioat2_free_ring_ent(struct ioat_ring_ent *desc, struct dma_chan *chan) ioat2_free_ring_ent() argument
468 dma = to_ioatdma_device(chan->device); ioat2_free_ring_ent()
512 * @chan: channel to be initialized
517 struct ioat_chan_common *chan = &ioat->base; ioat2_alloc_chan_resources() local
528 writew(IOAT_CHANCTRL_RUN, chan->reg_base + IOAT_CHANCTRL_OFFSET); ioat2_alloc_chan_resources()
532 chan->completion = pci_pool_alloc(chan->device->completion_pool, ioat2_alloc_chan_resources()
533 GFP_KERNEL, &chan->completion_dma); ioat2_alloc_chan_resources()
534 if (!chan->completion) ioat2_alloc_chan_resources()
537 memset(chan->completion, 0, sizeof(*chan->completion)); ioat2_alloc_chan_resources()
538 writel(((u64) chan->completion_dma) & 0x00000000FFFFFFFF, ioat2_alloc_chan_resources()
539 chan->reg_base + IOAT_CHANCMP_OFFSET_LOW); ioat2_alloc_chan_resources()
540 writel(((u64) chan->completion_dma) >> 32, ioat2_alloc_chan_resources()
541 chan->reg_base + IOAT_CHANCMP_OFFSET_HIGH); ioat2_alloc_chan_resources()
548 spin_lock_bh(&chan->cleanup_lock); ioat2_alloc_chan_resources()
555 set_bit(IOAT_RUN, &chan->state); ioat2_alloc_chan_resources()
557 spin_unlock_bh(&chan->cleanup_lock); ioat2_alloc_chan_resources()
564 status = ioat_chansts(chan); ioat2_alloc_chan_resources()
570 u32 chanerr = readl(chan->reg_base + IOAT_CHANERR_OFFSET); ioat2_alloc_chan_resources()
572 dev_WARN(to_dev(chan), ioat2_alloc_chan_resources()
585 struct ioat_chan_common *chan = &ioat->base; reshape_ring() local
586 struct dma_chan *c = &chan->common; reshape_ring()
676 dev_dbg(to_dev(chan), "%s: allocated %d descriptors\n", reshape_ring()
693 struct ioat_chan_common *chan = &ioat->base; ioat2_check_space_lock() local
703 dev_dbg(to_dev(chan), "%s: num_descs: %d (%x:%x:%x)\n", ioat2_check_space_lock()
708 retry = test_and_set_bit(IOAT_RESHAPE_PENDING, &chan->state); ioat2_check_space_lock()
715 spin_lock_bh(&chan->cleanup_lock); ioat2_check_space_lock()
718 clear_bit(IOAT_RESHAPE_PENDING, &chan->state); ioat2_check_space_lock()
720 spin_unlock_bh(&chan->cleanup_lock); ioat2_check_space_lock()
727 dev_dbg(to_dev(chan), "%s: ring full! num_descs: %d (%x:%x:%x)\n", ioat2_check_space_lock()
734 if (time_is_before_jiffies(chan->timer.expires) ioat2_check_space_lock()
735 && timer_pending(&chan->timer)) { ioat2_check_space_lock()
736 struct ioatdma_device *device = chan->device; ioat2_check_space_lock()
738 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat2_check_space_lock()
739 device->timer_fn((unsigned long) &chan->common); ioat2_check_space_lock()
793 * @chan: the channel to be cleaned
798 struct ioat_chan_common *chan = &ioat->base; ioat2_free_chan_resources() local
799 struct ioatdma_device *device = chan->device; ioat2_free_chan_resources()
811 ioat_stop(chan); ioat2_free_chan_resources()
812 device->reset_hw(chan); ioat2_free_chan_resources()
814 spin_lock_bh(&chan->cleanup_lock); ioat2_free_chan_resources()
817 dev_dbg(to_dev(chan), "freeing %d idle descriptors\n", descs); ioat2_free_chan_resources()
824 dev_err(to_dev(chan), "Freeing %d in use descriptors!\n", ioat2_free_chan_resources()
836 pci_pool_free(device->completion_pool, chan->completion, ioat2_free_chan_resources()
837 chan->completion_dma); ioat2_free_chan_resources()
839 spin_unlock_bh(&chan->cleanup_lock); ioat2_free_chan_resources()
841 chan->last_completion = 0; ioat2_free_chan_resources()
842 chan->completion_dma = 0; ioat2_free_chan_resources()
881 struct ioat_chan_common *chan; ioat2_dma_probe() local
901 chan = to_chan_common(c); ioat2_dma_probe()
903 chan->reg_base + IOAT_DCACTRL_OFFSET); ioat2_dma_probe()
H A Ddma.c58 struct ioat_chan_common *chan; ioat_dma_do_interrupt() local
75 chan = ioat_chan_by_index(instance, bit); ioat_dma_do_interrupt()
76 if (test_bit(IOAT_RUN, &chan->state)) ioat_dma_do_interrupt()
77 tasklet_schedule(&chan->cleanup_task); ioat_dma_do_interrupt()
91 struct ioat_chan_common *chan = data; ioat_dma_do_interrupt_msix() local
93 if (test_bit(IOAT_RUN, &chan->state)) ioat_dma_do_interrupt_msix()
94 tasklet_schedule(&chan->cleanup_task); ioat_dma_do_interrupt_msix()
100 void ioat_init_channel(struct ioatdma_device *device, struct ioat_chan_common *chan, int idx) ioat_init_channel() argument
103 struct dma_chan *c = &chan->common; ioat_init_channel()
106 chan->device = device; ioat_init_channel()
107 chan->reg_base = device->reg_base + (0x80 * (idx + 1)); ioat_init_channel()
108 spin_lock_init(&chan->cleanup_lock); ioat_init_channel()
109 chan->common.device = dma; ioat_init_channel()
110 dma_cookie_init(&chan->common); ioat_init_channel()
111 list_add_tail(&chan->common.device_node, &dma->channels); ioat_init_channel()
112 device->idx[idx] = chan; ioat_init_channel()
113 init_timer(&chan->timer); ioat_init_channel()
114 chan->timer.function = device->timer_fn; ioat_init_channel()
115 chan->timer.data = data; ioat_init_channel()
116 tasklet_init(&chan->cleanup_task, device->cleanup_fn, data); ioat_init_channel()
167 * @chan: DMA channel handle
180 static void ioat1_dma_memcpy_issue_pending(struct dma_chan *chan) ioat1_dma_memcpy_issue_pending() argument
182 struct ioat_dma_chan *ioat = to_ioat_chan(chan); ioat1_dma_memcpy_issue_pending()
197 struct ioat_chan_common *chan = &ioat->base; ioat1_reset_channel() local
198 void __iomem *reg_base = chan->reg_base; ioat1_reset_channel()
201 dev_warn(to_dev(chan), "reset\n"); ioat1_reset_channel()
203 chansts = *chan->completion & IOAT_CHANSTS_STATUS; ioat1_reset_channel()
205 dev_err(to_dev(chan), ioat1_reset_channel()
206 "chan%d, CHANSTS = 0x%08x CHANERR = 0x%04x, clearing\n", ioat1_reset_channel()
207 chan_num(chan), chansts, chanerr); ioat1_reset_channel()
221 reg_base + IOAT_CHANCMD_OFFSET(chan->device->version)); ioat1_reset_channel()
222 set_bit(IOAT_RESET_PENDING, &chan->state); ioat1_reset_channel()
223 mod_timer(&chan->timer, jiffies + RESET_DELAY); ioat1_reset_channel()
228 struct dma_chan *c = tx->chan; ioat1_tx_submit()
231 struct ioat_chan_common *chan = &ioat->base; ioat1_tx_submit() local
251 if (!test_and_set_bit(IOAT_COMPLETION_PENDING, &chan->state)) ioat1_tx_submit()
252 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat1_tx_submit()
305 * @chan: the channel to be filled out
310 struct ioat_chan_common *chan = &ioat->base; ioat1_dma_alloc_chan_resources() local
321 writew(IOAT_CHANCTRL_RUN, chan->reg_base + IOAT_CHANCTRL_OFFSET); ioat1_dma_alloc_chan_resources()
323 chanerr = readl(chan->reg_base + IOAT_CHANERR_OFFSET); ioat1_dma_alloc_chan_resources()
325 dev_err(to_dev(chan), "CHANERR = %x, clearing\n", chanerr); ioat1_dma_alloc_chan_resources()
326 writel(chanerr, chan->reg_base + IOAT_CHANERR_OFFSET); ioat1_dma_alloc_chan_resources()
333 dev_err(to_dev(chan), "Only %d initial descriptors\n", i); ioat1_dma_alloc_chan_resources()
346 chan->completion = pci_pool_alloc(chan->device->completion_pool, ioat1_dma_alloc_chan_resources()
347 GFP_KERNEL, &chan->completion_dma); ioat1_dma_alloc_chan_resources()
348 memset(chan->completion, 0, sizeof(*chan->completion)); ioat1_dma_alloc_chan_resources()
349 writel(((u64) chan->completion_dma) & 0x00000000FFFFFFFF, ioat1_dma_alloc_chan_resources()
350 chan->reg_base + IOAT_CHANCMP_OFFSET_LOW); ioat1_dma_alloc_chan_resources()
351 writel(((u64) chan->completion_dma) >> 32, ioat1_dma_alloc_chan_resources()
352 chan->reg_base + IOAT_CHANCMP_OFFSET_HIGH); ioat1_dma_alloc_chan_resources()
354 set_bit(IOAT_RUN, &chan->state); ioat1_dma_alloc_chan_resources()
356 dev_dbg(to_dev(chan), "%s: allocated %d descriptors\n", ioat1_dma_alloc_chan_resources()
361 void ioat_stop(struct ioat_chan_common *chan) ioat_stop() argument
363 struct ioatdma_device *device = chan->device; ioat_stop()
365 int chan_id = chan_num(chan); ioat_stop()
371 clear_bit(IOAT_RUN, &chan->state); ioat_stop()
388 del_timer_sync(&chan->timer); ioat_stop()
391 tasklet_kill(&chan->cleanup_task); ioat_stop()
394 device->cleanup_fn((unsigned long) &chan->common); ioat_stop()
399 * @chan: the channel to be cleaned
404 struct ioat_chan_common *chan = &ioat->base; ioat1_dma_free_chan_resources() local
405 struct ioatdma_device *ioatdma_device = chan->device; ioat1_dma_free_chan_resources()
415 ioat_stop(chan); ioat1_dma_free_chan_resources()
421 chan->reg_base + IOAT_CHANCMD_OFFSET(chan->device->version)); ioat1_dma_free_chan_resources()
426 dev_dbg(to_dev(chan), "%s: freeing %d from used list\n", ioat1_dma_free_chan_resources()
445 chan->completion, ioat1_dma_free_chan_resources()
446 chan->completion_dma); ioat1_dma_free_chan_resources()
450 dev_err(to_dev(chan), "Freeing %d in use descriptors!\n", ioat1_dma_free_chan_resources()
453 chan->last_completion = 0; ioat1_dma_free_chan_resources()
454 chan->completion_dma = 0; ioat1_dma_free_chan_resources()
536 struct ioat_chan_common *chan = &ioat->base; ioat1_dma_prep_memcpy() local
538 dev_err(to_dev(chan), ioat1_dma_prep_memcpy()
539 "chan%d - get_next_desc failed\n", chan_num(chan)); ioat1_dma_prep_memcpy()
560 struct ioat_chan_common *chan = &ioat->base; ioat1_cleanup_event() local
563 if (!test_bit(IOAT_RUN, &chan->state)) ioat1_cleanup_event()
568 dma_addr_t ioat_get_current_completion(struct ioat_chan_common *chan) ioat_get_current_completion() argument
573 completion = *chan->completion; ioat_get_current_completion()
576 dev_dbg(to_dev(chan), "%s: phys_complete: %#llx\n", __func__, ioat_get_current_completion()
580 u32 chanerr = readl(chan->reg_base + IOAT_CHANERR_OFFSET); ioat_get_current_completion()
581 dev_err(to_dev(chan), "Channel halted, chanerr = %x\n", ioat_get_current_completion()
590 bool ioat_cleanup_preamble(struct ioat_chan_common *chan, ioat_cleanup_preamble() argument
593 *phys_complete = ioat_get_current_completion(chan); ioat_cleanup_preamble()
594 if (*phys_complete == chan->last_completion) ioat_cleanup_preamble()
596 clear_bit(IOAT_COMPLETION_ACK, &chan->state); ioat_cleanup_preamble()
597 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat_cleanup_preamble()
604 struct ioat_chan_common *chan = &ioat->base; __cleanup() local
608 dev_dbg(to_dev(chan), "%s: phys_complete: %llx\n", __cleanup()
649 dev_dbg(to_dev(chan), __cleanup()
652 clear_bit(IOAT_COMPLETION_PENDING, &chan->state); __cleanup()
660 chan->last_completion = phys_complete; __cleanup()
665 * @chan: ioat channel to be cleaned up
673 struct ioat_chan_common *chan = &ioat->base; ioat1_cleanup() local
676 prefetch(chan->completion); ioat1_cleanup()
678 if (!spin_trylock_bh(&chan->cleanup_lock)) ioat1_cleanup()
681 if (!ioat_cleanup_preamble(chan, &phys_complete)) { ioat1_cleanup()
682 spin_unlock_bh(&chan->cleanup_lock); ioat1_cleanup()
687 spin_unlock_bh(&chan->cleanup_lock); ioat1_cleanup()
694 spin_unlock_bh(&chan->cleanup_lock); ioat1_cleanup()
700 struct ioat_chan_common *chan = &ioat->base; ioat1_timer_event() local
702 dev_dbg(to_dev(chan), "%s: state: %lx\n", __func__, chan->state); ioat1_timer_event()
704 spin_lock_bh(&chan->cleanup_lock); ioat1_timer_event()
705 if (test_and_clear_bit(IOAT_RESET_PENDING, &chan->state)) { ioat1_timer_event()
713 ioat_start(chan); ioat1_timer_event()
716 set_bit(IOAT_COMPLETION_PENDING, &chan->state); ioat1_timer_event()
717 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat1_timer_event()
719 } else if (test_bit(IOAT_COMPLETION_PENDING, &chan->state)) { ioat1_timer_event()
727 if (ioat_cleanup_preamble(chan, &phys_complete)) ioat1_timer_event()
729 else if (test_bit(IOAT_COMPLETION_ACK, &chan->state)) ioat1_timer_event()
732 u64 status = ioat_chansts(chan); ioat1_timer_event()
736 *chan->completion = status; ioat1_timer_event()
738 set_bit(IOAT_COMPLETION_ACK, &chan->state); ioat1_timer_event()
739 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat1_timer_event()
743 spin_unlock_bh(&chan->cleanup_lock); ioat1_timer_event()
750 struct ioat_chan_common *chan = to_chan_common(c); ioat_dma_tx_status() local
751 struct ioatdma_device *device = chan->device; ioat_dma_tx_status()
765 struct ioat_chan_common *chan = &ioat->base; ioat1_dma_start_null_desc() local
774 dev_err(to_dev(chan), ioat1_dma_start_null_desc()
795 ioat_start(chan); ioat1_dma_start_null_desc()
848 dev_err(dev, "selftest cannot allocate chan resource\n"); ioat_dma_self_test()
923 struct ioat_chan_common *chan; ioat_dma_setup_interrupts() local
952 chan = ioat_chan_by_index(device, i); ioat_dma_setup_interrupts()
955 "ioat-msix", chan); ioat_dma_setup_interrupts()
959 chan = ioat_chan_by_index(device, j); ioat_dma_setup_interrupts()
960 devm_free_irq(dev, msix->vector, chan); ioat_dma_setup_interrupts()
1146 struct ioat_chan_common *chan; ioat_attr_show() local
1149 chan = container_of(kobj, struct ioat_chan_common, kobj); ioat_attr_show()
1153 return entry->show(&chan->common, page); ioat_attr_show()
1171 struct ioat_chan_common *chan = to_chan_common(c); ioat_kobject_add() local
1175 err = kobject_init_and_add(&chan->kobj, type, parent, "quickdata"); ioat_kobject_add()
1177 dev_warn(to_dev(chan), ioat_kobject_add()
1179 kobject_put(&chan->kobj); ioat_kobject_add()
1180 set_bit(IOAT_KOBJ_INIT_FAIL, &chan->state); ioat_kobject_add()
1191 struct ioat_chan_common *chan = to_chan_common(c); ioat_kobject_del() local
1193 if (!test_bit(IOAT_KOBJ_INIT_FAIL, &chan->state)) { ioat_kobject_del()
1194 kobject_del(&chan->kobj); ioat_kobject_del()
1195 kobject_put(&chan->kobj); ioat_kobject_del()
H A Ddma.h90 int (*reset_hw)(struct ioat_chan_common *chan);
163 struct ioat_chan_common *chan = to_chan_common(c); to_ioat_chan() local
165 return container_of(chan, struct ioat_dma_chan, base); to_ioat_chan()
198 __dump_desc_dbg(struct ioat_chan_common *chan, struct ioat_dma_descriptor *hw, __dump_desc_dbg() argument
201 struct device *dev = to_dev(chan); __dump_desc_dbg()
219 static inline u64 ioat_chansts_32(struct ioat_chan_common *chan) ioat_chansts_32() argument
221 u8 ver = chan->device->version; ioat_chansts_32()
228 status_lo = readl(chan->reg_base + IOAT_CHANSTS_OFFSET_LOW(ver)); ioat_chansts_32()
229 status = readl(chan->reg_base + IOAT_CHANSTS_OFFSET_HIGH(ver)); ioat_chansts_32()
238 static inline u64 ioat_chansts(struct ioat_chan_common *chan) ioat_chansts() argument
240 u8 ver = chan->device->version; ioat_chansts()
245 status = readq(chan->reg_base + IOAT_CHANSTS_OFFSET(ver)); ioat_chansts()
247 status = ioat_chansts_32(chan); ioat_chansts()
256 static inline void ioat_start(struct ioat_chan_common *chan) ioat_start() argument
258 u8 ver = chan->device->version; ioat_start()
260 writeb(IOAT_CHANCMD_START, chan->reg_base + IOAT_CHANCMD_OFFSET(ver)); ioat_start()
268 static inline u32 ioat_chanerr(struct ioat_chan_common *chan) ioat_chanerr() argument
270 return readl(chan->reg_base + IOAT_CHANERR_OFFSET); ioat_chanerr()
273 static inline void ioat_suspend(struct ioat_chan_common *chan) ioat_suspend() argument
275 u8 ver = chan->device->version; ioat_suspend()
277 writeb(IOAT_CHANCMD_SUSPEND, chan->reg_base + IOAT_CHANCMD_OFFSET(ver)); ioat_suspend()
280 static inline void ioat_reset(struct ioat_chan_common *chan) ioat_reset() argument
282 u8 ver = chan->device->version; ioat_reset()
284 writeb(IOAT_CHANCMD_RESET, chan->reg_base + IOAT_CHANCMD_OFFSET(ver)); ioat_reset()
287 static inline bool ioat_reset_pending(struct ioat_chan_common *chan) ioat_reset_pending() argument
289 u8 ver = chan->device->version; ioat_reset_pending()
292 cmd = readb(chan->reg_base + IOAT_CHANCMD_OFFSET(ver)); ioat_reset_pending()
298 struct ioat_chan_common *chan = &ioat->base; ioat_set_chainaddr() local
301 chan->reg_base + IOAT1_CHAINADDR_OFFSET_LOW); ioat_set_chainaddr()
303 chan->reg_base + IOAT1_CHAINADDR_OFFSET_HIGH); ioat_set_chainaddr()
338 dma_addr_t ioat_get_current_completion(struct ioat_chan_common *chan);
340 struct ioat_chan_common *chan, int idx);
343 bool ioat_cleanup_preamble(struct ioat_chan_common *chan,
348 void ioat_stop(struct ioat_chan_common *chan);
H A Ddma_v3.c307 static u64 ioat3_get_current_completion(struct ioat_chan_common *chan) ioat3_get_current_completion() argument
312 completion = *chan->completion; ioat3_get_current_completion()
315 dev_dbg(to_dev(chan), "%s: phys_complete: %#llx\n", __func__, ioat3_get_current_completion()
321 static bool ioat3_cleanup_preamble(struct ioat_chan_common *chan, ioat3_cleanup_preamble() argument
324 *phys_complete = ioat3_get_current_completion(chan); ioat3_cleanup_preamble()
325 if (*phys_complete == chan->last_completion) ioat3_cleanup_preamble()
328 clear_bit(IOAT_COMPLETION_ACK, &chan->state); ioat3_cleanup_preamble()
329 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat3_cleanup_preamble()
373 struct ioat_chan_common *chan = &ioat->base; __cleanup() local
374 struct ioatdma_device *device = chan->device; __cleanup()
380 dev_dbg(to_dev(chan), "%s: head: %#x tail: %#x issued: %#x\n", __cleanup()
434 chan->last_completion = phys_complete; __cleanup()
437 dev_dbg(to_dev(chan), "%s: cancel completion timeout\n", __cleanup()
439 clear_bit(IOAT_COMPLETION_PENDING, &chan->state); __cleanup()
440 mod_timer(&chan->timer, jiffies + IDLE_TIMEOUT); __cleanup()
444 chan->device->reg_base + IOAT_INTRDELAY_OFFSET); __cleanup()
449 struct ioat_chan_common *chan = &ioat->base; ioat3_cleanup() local
452 spin_lock_bh(&chan->cleanup_lock); ioat3_cleanup()
454 if (ioat3_cleanup_preamble(chan, &phys_complete)) ioat3_cleanup()
457 if (is_ioat_halted(*chan->completion)) { ioat3_cleanup()
458 u32 chanerr = readl(chan->reg_base + IOAT_CHANERR_OFFSET); ioat3_cleanup()
461 mod_timer(&chan->timer, jiffies + IDLE_TIMEOUT); ioat3_cleanup()
466 spin_unlock_bh(&chan->cleanup_lock); ioat3_cleanup()
472 struct ioat_chan_common *chan = &ioat->base; ioat3_cleanup_event() local
475 if (!test_bit(IOAT_RUN, &chan->state)) ioat3_cleanup_event()
482 struct ioat_chan_common *chan = &ioat->base; ioat3_restart_channel() local
485 ioat2_quiesce(chan, 0); ioat3_restart_channel()
486 if (ioat3_cleanup_preamble(chan, &phys_complete)) ioat3_restart_channel()
494 struct ioat_chan_common *chan = &ioat->base; ioat3_eh() local
495 struct pci_dev *pdev = to_pdev(chan); ioat3_eh()
505 if (ioat3_cleanup_preamble(chan, &phys_complete)) ioat3_eh()
508 chanerr = readl(chan->reg_base + IOAT_CHANERR_OFFSET); ioat3_eh()
511 dev_dbg(to_dev(chan), "%s: error = %x:%x\n", ioat3_eh()
540 dev_err(to_dev(chan), "%s: fatal error (%x:%x)\n", ioat3_eh()
555 writel(chanerr, chan->reg_base + IOAT_CHANERR_OFFSET); ioat3_eh()
559 *chan->completion = desc->txd.phys; ioat3_eh()
568 struct ioat_chan_common *chan = &ioat->base; check_active() local
571 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); check_active()
575 if (test_and_clear_bit(IOAT_CHAN_ACTIVE, &chan->state)) check_active()
576 mod_timer(&chan->timer, jiffies + IDLE_TIMEOUT); check_active()
587 mod_timer(&chan->timer, jiffies + IDLE_TIMEOUT); check_active()
595 struct ioat_chan_common *chan = &ioat->base; ioat3_timer_event() local
599 status = ioat_chansts(chan); ioat3_timer_event()
607 chanerr = readl(chan->reg_base + IOAT_CHANERR_OFFSET); ioat3_timer_event()
608 dev_err(to_dev(chan), "%s: Channel halted (%x)\n", ioat3_timer_event()
610 if (test_bit(IOAT_RUN, &chan->state)) ioat3_timer_event()
620 spin_lock_bh(&chan->cleanup_lock); ioat3_timer_event()
621 if (ioat_cleanup_preamble(chan, &phys_complete)) ioat3_timer_event()
623 else if (test_bit(IOAT_COMPLETION_ACK, &chan->state)) { ioat3_timer_event()
627 spin_unlock_bh(&chan->cleanup_lock); ioat3_timer_event()
630 set_bit(IOAT_COMPLETION_ACK, &chan->state); ioat3_timer_event()
631 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat3_timer_event()
636 mod_timer(&chan->timer, jiffies + COMPLETION_TIMEOUT); ioat3_timer_event()
642 spin_unlock_bh(&chan->cleanup_lock); ioat3_timer_event()
753 ioat3_prep_xor(struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src, ioat3_prep_xor() argument
756 return __ioat3_prep_xor_lock(chan, NULL, dest, src, src_cnt, len, flags); ioat3_prep_xor()
760 ioat3_prep_xor_val(struct dma_chan *chan, dma_addr_t *src, ioat3_prep_xor_val() argument
769 return __ioat3_prep_xor_lock(chan, result, src[0], &src[1], ioat3_prep_xor_val()
842 struct ioat_chan_common *chan = &ioat->base; __ioat3_prep_pq_lock() local
843 struct ioatdma_device *device = chan->device; __ioat3_prep_pq_lock()
856 dev_dbg(to_dev(chan), "%s\n", __func__); __ioat3_prep_pq_lock()
966 struct ioat_chan_common *chan = &ioat->base; __ioat3_prep_pq16_lock() local
967 struct ioatdma_device *device = chan->device; __ioat3_prep_pq16_lock()
978 dev_dbg(to_dev(chan), "%s\n", __func__); __ioat3_prep_pq16_lock()
1004 dev_err(to_dev(chan), __ioat3_prep_pq16_lock()
1071 ioat3_prep_pq(struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, ioat3_prep_pq() argument
1095 __ioat3_prep_pq16_lock(chan, NULL, dst, single_source, ioat3_prep_pq()
1098 __ioat3_prep_pq_lock(chan, NULL, dst, single_source, 2, ioat3_prep_pq()
1103 __ioat3_prep_pq16_lock(chan, NULL, dst, src, src_cnt, ioat3_prep_pq()
1105 __ioat3_prep_pq_lock(chan, NULL, dst, src, src_cnt, ioat3_prep_pq()
1111 ioat3_prep_pq_val(struct dma_chan *chan, dma_addr_t *pq, dma_addr_t *src, ioat3_prep_pq_val() argument
1127 __ioat3_prep_pq16_lock(chan, pqres, pq, src, src_cnt, scf, len, ioat3_prep_pq_val()
1129 __ioat3_prep_pq_lock(chan, pqres, pq, src, src_cnt, scf, len, ioat3_prep_pq_val()
1134 ioat3_prep_pqxor(struct dma_chan *chan, dma_addr_t dst, dma_addr_t *src, ioat3_prep_pqxor() argument
1146 __ioat3_prep_pq16_lock(chan, NULL, pq, src, src_cnt, scf, len, ioat3_prep_pqxor()
1148 __ioat3_prep_pq_lock(chan, NULL, pq, src, src_cnt, scf, len, ioat3_prep_pqxor()
1153 ioat3_prep_pqxor_val(struct dma_chan *chan, dma_addr_t *src, ioat3_prep_pqxor_val() argument
1171 __ioat3_prep_pq16_lock(chan, result, pq, &src[1], src_cnt - 1, ioat3_prep_pqxor_val()
1173 __ioat3_prep_pq_lock(chan, result, pq, &src[1], src_cnt - 1, ioat3_prep_pqxor_val()
1507 struct ioat_chan_common *chan; ioat3_irq_reinit() local
1509 chan = ioat_chan_by_index(device, i); ioat3_irq_reinit()
1510 devm_free_irq(&pdev->dev, msix->vector, chan); ioat3_irq_reinit()
1529 static int ioat3_reset_hw(struct ioat_chan_common *chan) ioat3_reset_hw() argument
1534 struct ioatdma_device *device = chan->device; ioat3_reset_hw()
1540 ioat2_quiesce(chan, msecs_to_jiffies(100)); ioat3_reset_hw()
1542 chanerr = readl(chan->reg_base + IOAT_CHANERR_OFFSET); ioat3_reset_hw()
1543 writel(chanerr, chan->reg_base + IOAT_CHANERR_OFFSET); ioat3_reset_hw()
1568 err = ioat2_reset_sync(chan, msecs_to_jiffies(200)); ioat3_reset_hw()
1582 struct ioat_chan_common *chan; ioat3_intr_quirk() local
1593 chan = to_chan_common(c); ioat3_intr_quirk()
1594 errmask = readl(chan->reg_base + ioat3_intr_quirk()
1598 writel(errmask, chan->reg_base + ioat3_intr_quirk()
1610 struct ioat_chan_common *chan; ioat3_dma_probe() local
1702 chan = to_chan_common(c); ioat3_dma_probe()
1704 chan->reg_base + IOAT_DCACTRL_OFFSET); ioat3_dma_probe()
/linux-4.1.27/drivers/net/ethernet/ti/
H A Ddavinci_cpdma.c134 #define chan_read(chan, fld) __raw_readl((chan)->fld)
137 #define chan_write(chan, fld, v) __raw_writel(v, (chan)->fld)
140 #define cpdma_desc_to_port(chan, mode, directed) \
142 if (!is_rx_chan(chan) && ((directed == 1) || \
497 struct cpdma_chan *chan; cpdma_chan_create() local
504 chan = devm_kzalloc(ctlr->dev, sizeof(*chan), GFP_KERNEL); cpdma_chan_create()
505 if (!chan) cpdma_chan_create()
511 devm_kfree(ctlr->dev, chan); cpdma_chan_create()
515 chan->ctlr = ctlr; cpdma_chan_create()
516 chan->state = CPDMA_STATE_IDLE; cpdma_chan_create()
517 chan->chan_num = chan_num; cpdma_chan_create()
518 chan->handler = handler; cpdma_chan_create()
520 if (is_rx_chan(chan)) { cpdma_chan_create()
521 chan->hdp = ctlr->params.rxhdp + offset; cpdma_chan_create()
522 chan->cp = ctlr->params.rxcp + offset; cpdma_chan_create()
523 chan->rxfree = ctlr->params.rxfree + offset; cpdma_chan_create()
524 chan->int_set = CPDMA_RXINTMASKSET; cpdma_chan_create()
525 chan->int_clear = CPDMA_RXINTMASKCLEAR; cpdma_chan_create()
526 chan->td = CPDMA_RXTEARDOWN; cpdma_chan_create()
527 chan->dir = DMA_FROM_DEVICE; cpdma_chan_create()
529 chan->hdp = ctlr->params.txhdp + offset; cpdma_chan_create()
530 chan->cp = ctlr->params.txcp + offset; cpdma_chan_create()
531 chan->int_set = CPDMA_TXINTMASKSET; cpdma_chan_create()
532 chan->int_clear = CPDMA_TXINTMASKCLEAR; cpdma_chan_create()
533 chan->td = CPDMA_TXTEARDOWN; cpdma_chan_create()
534 chan->dir = DMA_TO_DEVICE; cpdma_chan_create()
536 chan->mask = BIT(chan_linear(chan)); cpdma_chan_create()
538 spin_lock_init(&chan->lock); cpdma_chan_create()
540 ctlr->channels[chan_num] = chan; cpdma_chan_create()
542 return chan; cpdma_chan_create()
546 int cpdma_chan_destroy(struct cpdma_chan *chan) cpdma_chan_destroy() argument
551 if (!chan) cpdma_chan_destroy()
553 ctlr = chan->ctlr; cpdma_chan_destroy()
556 if (chan->state != CPDMA_STATE_IDLE) cpdma_chan_destroy()
557 cpdma_chan_stop(chan); cpdma_chan_destroy()
558 ctlr->channels[chan->chan_num] = NULL; cpdma_chan_destroy()
564 int cpdma_chan_get_stats(struct cpdma_chan *chan, cpdma_chan_get_stats() argument
568 if (!chan) cpdma_chan_get_stats()
570 spin_lock_irqsave(&chan->lock, flags); cpdma_chan_get_stats()
571 memcpy(stats, &chan->stats, sizeof(*stats)); cpdma_chan_get_stats()
572 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_get_stats()
577 int cpdma_chan_dump(struct cpdma_chan *chan) cpdma_chan_dump() argument
580 struct device *dev = chan->ctlr->dev; cpdma_chan_dump()
582 spin_lock_irqsave(&chan->lock, flags); cpdma_chan_dump()
585 chan->chan_num, is_rx_chan(chan) ? "rx" : "tx", cpdma_chan_dump()
586 chan_linear(chan), cpdma_state_str[chan->state]); cpdma_chan_dump()
587 dev_info(dev, "\thdp: %x\n", chan_read(chan, hdp)); cpdma_chan_dump()
588 dev_info(dev, "\tcp: %x\n", chan_read(chan, cp)); cpdma_chan_dump()
589 if (chan->rxfree) { cpdma_chan_dump()
591 chan_read(chan, rxfree)); cpdma_chan_dump()
595 chan->stats.head_enqueue); cpdma_chan_dump()
597 chan->stats.tail_enqueue); cpdma_chan_dump()
599 chan->stats.pad_enqueue); cpdma_chan_dump()
601 chan->stats.misqueued); cpdma_chan_dump()
603 chan->stats.desc_alloc_fail); cpdma_chan_dump()
605 chan->stats.pad_alloc_fail); cpdma_chan_dump()
607 chan->stats.runt_receive_buff); cpdma_chan_dump()
609 chan->stats.runt_transmit_buff); cpdma_chan_dump()
611 chan->stats.empty_dequeue); cpdma_chan_dump()
613 chan->stats.busy_dequeue); cpdma_chan_dump()
615 chan->stats.good_dequeue); cpdma_chan_dump()
617 chan->stats.requeue); cpdma_chan_dump()
619 chan->stats.teardown_dequeue); cpdma_chan_dump()
621 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_dump()
625 static void __cpdma_chan_submit(struct cpdma_chan *chan, __cpdma_chan_submit() argument
628 struct cpdma_ctlr *ctlr = chan->ctlr; __cpdma_chan_submit()
629 struct cpdma_desc __iomem *prev = chan->tail; __cpdma_chan_submit()
637 if (!chan->head) { __cpdma_chan_submit()
638 chan->stats.head_enqueue++; __cpdma_chan_submit()
639 chan->head = desc; __cpdma_chan_submit()
640 chan->tail = desc; __cpdma_chan_submit()
641 if (chan->state == CPDMA_STATE_ACTIVE) __cpdma_chan_submit()
642 chan_write(chan, hdp, desc_dma); __cpdma_chan_submit()
648 chan->tail = desc; __cpdma_chan_submit()
649 chan->stats.tail_enqueue++; __cpdma_chan_submit()
654 (chan->state == CPDMA_STATE_ACTIVE)) { __cpdma_chan_submit()
656 chan_write(chan, hdp, desc_dma); __cpdma_chan_submit()
657 chan->stats.misqueued++; __cpdma_chan_submit()
661 int cpdma_chan_submit(struct cpdma_chan *chan, void *token, void *data, cpdma_chan_submit() argument
664 struct cpdma_ctlr *ctlr = chan->ctlr; cpdma_chan_submit()
671 spin_lock_irqsave(&chan->lock, flags); cpdma_chan_submit()
673 if (chan->state == CPDMA_STATE_TEARDOWN) { cpdma_chan_submit()
678 desc = cpdma_desc_alloc(ctlr->pool, 1, is_rx_chan(chan)); cpdma_chan_submit()
680 chan->stats.desc_alloc_fail++; cpdma_chan_submit()
687 chan->stats.runt_transmit_buff++; cpdma_chan_submit()
690 buffer = dma_map_single(ctlr->dev, data, len, chan->dir); cpdma_chan_submit()
699 cpdma_desc_to_port(chan, mode, directed); cpdma_chan_submit()
709 __cpdma_chan_submit(chan, desc); cpdma_chan_submit()
711 if (chan->state == CPDMA_STATE_ACTIVE && chan->rxfree) cpdma_chan_submit()
712 chan_write(chan, rxfree, 1); cpdma_chan_submit()
714 chan->count++; cpdma_chan_submit()
717 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_submit()
722 bool cpdma_check_free_tx_desc(struct cpdma_chan *chan) cpdma_check_free_tx_desc() argument
727 struct cpdma_ctlr *ctlr = chan->ctlr; cpdma_check_free_tx_desc()
745 static void __cpdma_chan_free(struct cpdma_chan *chan, __cpdma_chan_free() argument
749 struct cpdma_ctlr *ctlr = chan->ctlr; __cpdma_chan_free()
759 dma_unmap_single(ctlr->dev, buff_dma, origlen, chan->dir); __cpdma_chan_free()
761 (*chan->handler)(token, outlen, status); __cpdma_chan_free()
764 static int __cpdma_chan_process(struct cpdma_chan *chan) __cpdma_chan_process() argument
766 struct cpdma_ctlr *ctlr = chan->ctlr; __cpdma_chan_process()
774 spin_lock_irqsave(&chan->lock, flags); __cpdma_chan_process()
776 desc = chan->head; __cpdma_chan_process()
778 chan->stats.empty_dequeue++; __cpdma_chan_process()
787 chan->stats.busy_dequeue++; __cpdma_chan_process()
798 chan->head = desc_from_phys(pool, desc_read(desc, hw_next)); __cpdma_chan_process()
799 chan_write(chan, cp, desc_dma); __cpdma_chan_process()
800 chan->count--; __cpdma_chan_process()
801 chan->stats.good_dequeue++; __cpdma_chan_process()
804 chan->stats.requeue++; __cpdma_chan_process()
805 chan_write(chan, hdp, desc_phys(pool, chan->head)); __cpdma_chan_process()
808 spin_unlock_irqrestore(&chan->lock, flags); __cpdma_chan_process()
814 __cpdma_chan_free(chan, desc, outlen, cb_status); __cpdma_chan_process()
818 spin_unlock_irqrestore(&chan->lock, flags); __cpdma_chan_process()
822 int cpdma_chan_process(struct cpdma_chan *chan, int quota) cpdma_chan_process() argument
826 if (chan->state != CPDMA_STATE_ACTIVE) cpdma_chan_process()
830 ret = __cpdma_chan_process(chan); cpdma_chan_process()
839 int cpdma_chan_start(struct cpdma_chan *chan) cpdma_chan_start() argument
841 struct cpdma_ctlr *ctlr = chan->ctlr; cpdma_chan_start()
845 spin_lock_irqsave(&chan->lock, flags); cpdma_chan_start()
846 if (chan->state != CPDMA_STATE_IDLE) { cpdma_chan_start()
847 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_start()
851 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_start()
854 dma_reg_write(ctlr, chan->int_set, chan->mask); cpdma_chan_start()
855 chan->state = CPDMA_STATE_ACTIVE; cpdma_chan_start()
856 if (chan->head) { cpdma_chan_start()
857 chan_write(chan, hdp, desc_phys(pool, chan->head)); cpdma_chan_start()
858 if (chan->rxfree) cpdma_chan_start()
859 chan_write(chan, rxfree, chan->count); cpdma_chan_start()
862 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_start()
867 int cpdma_chan_stop(struct cpdma_chan *chan) cpdma_chan_stop() argument
869 struct cpdma_ctlr *ctlr = chan->ctlr; cpdma_chan_stop()
875 spin_lock_irqsave(&chan->lock, flags); cpdma_chan_stop()
876 if (chan->state == CPDMA_STATE_TEARDOWN) { cpdma_chan_stop()
877 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_stop()
881 chan->state = CPDMA_STATE_TEARDOWN; cpdma_chan_stop()
882 dma_reg_write(ctlr, chan->int_clear, chan->mask); cpdma_chan_stop()
885 dma_reg_write(ctlr, chan->td, chan_linear(chan)); cpdma_chan_stop()
890 u32 cp = chan_read(chan, cp); cpdma_chan_stop()
897 chan_write(chan, cp, CPDMA_TEARDOWN_VALUE); cpdma_chan_stop()
900 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_stop()
902 ret = __cpdma_chan_process(chan); cpdma_chan_stop()
906 spin_lock_irqsave(&chan->lock, flags); cpdma_chan_stop()
909 while (chan->head) { cpdma_chan_stop()
910 struct cpdma_desc __iomem *desc = chan->head; cpdma_chan_stop()
914 chan->head = desc_from_phys(pool, next_dma); cpdma_chan_stop()
915 chan->count--; cpdma_chan_stop()
916 chan->stats.teardown_dequeue++; cpdma_chan_stop()
919 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_stop()
920 __cpdma_chan_free(chan, desc, 0, -ENOSYS); cpdma_chan_stop()
921 spin_lock_irqsave(&chan->lock, flags); cpdma_chan_stop()
924 chan->state = CPDMA_STATE_IDLE; cpdma_chan_stop()
925 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_stop()
930 int cpdma_chan_int_ctrl(struct cpdma_chan *chan, bool enable) cpdma_chan_int_ctrl() argument
934 spin_lock_irqsave(&chan->lock, flags); cpdma_chan_int_ctrl()
935 if (chan->state != CPDMA_STATE_ACTIVE) { cpdma_chan_int_ctrl()
936 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_int_ctrl()
940 dma_reg_write(chan->ctlr, enable ? chan->int_set : chan->int_clear, cpdma_chan_int_ctrl()
941 chan->mask); cpdma_chan_int_ctrl()
942 spin_unlock_irqrestore(&chan->lock, flags); cpdma_chan_int_ctrl()
H A Ddavinci_cpdma.h20 #define tx_chan_num(chan) (chan)
21 #define rx_chan_num(chan) ((chan) + CPDMA_MAX_CHANNELS)
22 #define is_rx_chan(chan) ((chan)->chan_num >= CPDMA_MAX_CHANNELS)
23 #define is_tx_chan(chan) (!is_rx_chan(chan))
25 #define chan_linear(chan) __chan_linear((chan)->chan_num)
84 int cpdma_chan_destroy(struct cpdma_chan *chan);
85 int cpdma_chan_start(struct cpdma_chan *chan);
86 int cpdma_chan_stop(struct cpdma_chan *chan);
87 int cpdma_chan_dump(struct cpdma_chan *chan);
89 int cpdma_chan_get_stats(struct cpdma_chan *chan,
91 int cpdma_chan_submit(struct cpdma_chan *chan, void *token, void *data,
93 int cpdma_chan_process(struct cpdma_chan *chan, int quota);
97 int cpdma_chan_int_ctrl(struct cpdma_chan *chan, bool enable);
98 bool cpdma_check_free_tx_desc(struct cpdma_chan *chan);
/linux-4.1.27/drivers/video/fbdev/intelfb/
H A Dintelfb_i2c.c56 struct intelfb_i2c_chan *chan = data; intelfb_gpio_setscl() local
57 struct intelfb_info *dinfo = chan->dinfo; intelfb_gpio_setscl()
60 OUTREG(chan->reg, (state ? SCL_VAL_OUT : 0) | intelfb_gpio_setscl()
62 val = INREG(chan->reg); intelfb_gpio_setscl()
67 struct intelfb_i2c_chan *chan = data; intelfb_gpio_setsda() local
68 struct intelfb_info *dinfo = chan->dinfo; intelfb_gpio_setsda()
71 OUTREG(chan->reg, (state ? SDA_VAL_OUT : 0) | intelfb_gpio_setsda()
73 val = INREG(chan->reg); intelfb_gpio_setsda()
78 struct intelfb_i2c_chan *chan = data; intelfb_gpio_getscl() local
79 struct intelfb_info *dinfo = chan->dinfo; intelfb_gpio_getscl()
82 OUTREG(chan->reg, SCL_DIR_MASK); intelfb_gpio_getscl()
83 OUTREG(chan->reg, 0); intelfb_gpio_getscl()
84 val = INREG(chan->reg); intelfb_gpio_getscl()
90 struct intelfb_i2c_chan *chan = data; intelfb_gpio_getsda() local
91 struct intelfb_info *dinfo = chan->dinfo; intelfb_gpio_getsda()
94 OUTREG(chan->reg, SDA_DIR_MASK); intelfb_gpio_getsda()
95 OUTREG(chan->reg, 0); intelfb_gpio_getsda()
96 val = INREG(chan->reg); intelfb_gpio_getsda()
101 struct intelfb_i2c_chan *chan, intelfb_setup_i2c_bus()
107 chan->dinfo = dinfo; intelfb_setup_i2c_bus()
108 chan->reg = reg; intelfb_setup_i2c_bus()
109 snprintf(chan->adapter.name, sizeof(chan->adapter.name), intelfb_setup_i2c_bus()
111 chan->adapter.class = class; intelfb_setup_i2c_bus()
112 chan->adapter.owner = THIS_MODULE; intelfb_setup_i2c_bus()
113 chan->adapter.algo_data = &chan->algo; intelfb_setup_i2c_bus()
114 chan->adapter.dev.parent = &chan->dinfo->pdev->dev; intelfb_setup_i2c_bus()
115 chan->algo.setsda = intelfb_gpio_setsda; intelfb_setup_i2c_bus()
116 chan->algo.setscl = intelfb_gpio_setscl; intelfb_setup_i2c_bus()
117 chan->algo.getsda = intelfb_gpio_getsda; intelfb_setup_i2c_bus()
118 chan->algo.getscl = intelfb_gpio_getscl; intelfb_setup_i2c_bus()
119 chan->algo.udelay = 40; intelfb_setup_i2c_bus()
120 chan->algo.timeout = 20; intelfb_setup_i2c_bus()
121 chan->algo.data = chan; intelfb_setup_i2c_bus()
123 i2c_set_adapdata(&chan->adapter, chan); intelfb_setup_i2c_bus()
126 intelfb_gpio_setsda(chan, 1); intelfb_setup_i2c_bus()
127 intelfb_gpio_setscl(chan, 1); intelfb_setup_i2c_bus()
130 rc = i2c_bit_add_bus(&chan->adapter); intelfb_setup_i2c_bus()
100 intelfb_setup_i2c_bus(struct intelfb_info *dinfo, struct intelfb_i2c_chan *chan, const u32 reg, const char *name, int class) intelfb_setup_i2c_bus() argument
/linux-4.1.27/drivers/soc/ti/
H A Dknav_dma.c137 static bool check_config(struct knav_dma_chan *chan, struct knav_dma_cfg *cfg) check_config() argument
139 if (!memcmp(&chan->cfg, cfg, sizeof(*cfg))) check_config()
145 static int chan_start(struct knav_dma_chan *chan, chan_start() argument
150 spin_lock(&chan->lock); chan_start()
151 if ((chan->direction == DMA_MEM_TO_DEV) && chan->reg_chan) { chan_start()
156 writel_relaxed(v, &chan->reg_chan->mode); chan_start()
157 writel_relaxed(DMA_ENABLE, &chan->reg_chan->control); chan_start()
160 if (chan->reg_tx_sched) chan_start()
161 writel_relaxed(cfg->u.tx.priority, &chan->reg_tx_sched->prio); chan_start()
163 if (chan->reg_rx_flow) { chan_start()
179 writel_relaxed(v, &chan->reg_rx_flow->control); chan_start()
180 writel_relaxed(0, &chan->reg_rx_flow->tags); chan_start()
181 writel_relaxed(0, &chan->reg_rx_flow->tag_sel); chan_start()
185 writel_relaxed(v, &chan->reg_rx_flow->fdq_sel[0]); chan_start()
189 writel_relaxed(v, &chan->reg_rx_flow->fdq_sel[1]); chan_start()
191 writel_relaxed(0, &chan->reg_rx_flow->thresh[0]); chan_start()
192 writel_relaxed(0, &chan->reg_rx_flow->thresh[1]); chan_start()
193 writel_relaxed(0, &chan->reg_rx_flow->thresh[2]); chan_start()
197 memcpy(&chan->cfg, cfg, sizeof(*cfg)); chan_start()
198 spin_unlock(&chan->lock); chan_start()
203 static int chan_teardown(struct knav_dma_chan *chan) chan_teardown() argument
207 if (!chan->reg_chan) chan_teardown()
211 writel_relaxed(DMA_TEARDOWN, &chan->reg_chan->control); chan_teardown()
216 value = readl_relaxed(&chan->reg_chan->control); chan_teardown()
221 if (readl_relaxed(&chan->reg_chan->control) & DMA_ENABLE) { chan_teardown()
229 static void chan_stop(struct knav_dma_chan *chan) chan_stop() argument
231 spin_lock(&chan->lock); chan_stop()
232 if (chan->reg_rx_flow) { chan_stop()
234 writel_relaxed(0, &chan->reg_rx_flow->fdq_sel[0]); chan_stop()
235 writel_relaxed(0, &chan->reg_rx_flow->fdq_sel[1]); chan_stop()
236 writel_relaxed(0, &chan->reg_rx_flow->thresh[0]); chan_stop()
237 writel_relaxed(0, &chan->reg_rx_flow->thresh[1]); chan_stop()
238 writel_relaxed(0, &chan->reg_rx_flow->thresh[2]); chan_stop()
242 chan_teardown(chan); chan_stop()
245 if (chan->reg_rx_flow) { chan_stop()
246 writel_relaxed(0, &chan->reg_rx_flow->control); chan_stop()
247 writel_relaxed(0, &chan->reg_rx_flow->tags); chan_stop()
248 writel_relaxed(0, &chan->reg_rx_flow->tag_sel); chan_stop()
251 memset(&chan->cfg, 0, sizeof(struct knav_dma_cfg)); chan_stop()
252 spin_unlock(&chan->lock); chan_stop()
313 struct knav_dma_chan *chan) dma_debug_show_channels()
318 ((chan->direction == DMA_MEM_TO_DEV) ? "tx chan" : "rx flow"), dma_debug_show_channels()
319 chan_number(chan)); dma_debug_show_channels()
321 if (chan->direction == DMA_MEM_TO_DEV) { dma_debug_show_channels()
323 chan->cfg.u.tx.filt_einfo, dma_debug_show_channels()
324 chan->cfg.u.tx.filt_pswords, dma_debug_show_channels()
325 chan->cfg.u.tx.priority); dma_debug_show_channels()
328 chan->cfg.u.rx.einfo_present, dma_debug_show_channels()
329 chan->cfg.u.rx.psinfo_present, dma_debug_show_channels()
330 chan->cfg.u.rx.desc_type); dma_debug_show_channels()
332 chan->cfg.u.rx.dst_q, dma_debug_show_channels()
333 chan->cfg.u.rx.thresh); dma_debug_show_channels()
335 seq_printf(s, "[%d]", chan->cfg.u.rx.fdq[i]); dma_debug_show_channels()
343 struct knav_dma_chan *chan; dma_debug_show_devices() local
345 list_for_each_entry(chan, &dma->chan_list, list) { dma_debug_show_devices()
346 if (atomic_read(&chan->ref_count)) dma_debug_show_devices()
347 dma_debug_show_channels(s, chan); dma_debug_show_devices()
421 struct knav_dma_chan *chan; knav_dma_open_channel() local
463 list_for_each_entry(chan, &dma->chan_list, list) { knav_dma_open_channel()
465 if (chan->channel == chan_num) { knav_dma_open_channel()
470 if (chan->flow == chan_num) { knav_dma_open_channel()
482 if (atomic_read(&chan->ref_count) >= 1) { knav_dma_open_channel()
483 if (!check_config(chan, config)) { knav_dma_open_channel()
490 if (atomic_inc_return(&chan->dma->ref_count) <= 1) knav_dma_open_channel()
491 knav_dma_hw_init(chan->dma); knav_dma_open_channel()
493 if (atomic_inc_return(&chan->ref_count) <= 1) knav_dma_open_channel()
494 chan_start(chan, config); knav_dma_open_channel()
499 return chan; knav_dma_open_channel()
511 struct knav_dma_chan *chan = channel; knav_dma_close_channel() local
518 if (atomic_dec_return(&chan->ref_count) <= 0) knav_dma_close_channel()
519 chan_stop(chan); knav_dma_close_channel()
521 if (atomic_dec_return(&chan->dma->ref_count) <= 0) knav_dma_close_channel()
522 knav_dma_hw_destroy(chan->dma); knav_dma_close_channel()
525 chan->channel, chan->flow, chan->dma->name); knav_dma_close_channel()
555 static int pktdma_init_rx_chan(struct knav_dma_chan *chan, u32 flow) pktdma_init_rx_chan() argument
557 struct knav_dma_device *dma = chan->dma; pktdma_init_rx_chan()
559 chan->flow = flow; pktdma_init_rx_chan()
560 chan->reg_rx_flow = dma->reg_rx_flow + flow; pktdma_init_rx_chan()
561 chan->channel = DMA_INVALID_ID; pktdma_init_rx_chan()
562 dev_dbg(kdev->dev, "rx flow(%d) (%p)\n", chan->flow, chan->reg_rx_flow); pktdma_init_rx_chan()
567 static int pktdma_init_tx_chan(struct knav_dma_chan *chan, u32 channel) pktdma_init_tx_chan() argument
569 struct knav_dma_device *dma = chan->dma; pktdma_init_tx_chan()
571 chan->channel = channel; pktdma_init_tx_chan()
572 chan->reg_chan = dma->reg_tx_chan + channel; pktdma_init_tx_chan()
573 chan->reg_tx_sched = dma->reg_tx_sched + channel; pktdma_init_tx_chan()
574 chan->flow = DMA_INVALID_ID; pktdma_init_tx_chan()
575 dev_dbg(kdev->dev, "tx channel(%d) (%p)\n", chan->channel, chan->reg_chan); pktdma_init_tx_chan()
585 struct knav_dma_chan *chan; pktdma_init_chan() local
588 chan = devm_kzalloc(dev, sizeof(*chan), GFP_KERNEL); pktdma_init_chan()
589 if (!chan) pktdma_init_chan()
592 INIT_LIST_HEAD(&chan->list); pktdma_init_chan()
593 chan->dma = dma; pktdma_init_chan()
594 chan->direction = DMA_NONE; pktdma_init_chan()
595 atomic_set(&chan->ref_count, 0); pktdma_init_chan()
596 spin_lock_init(&chan->lock); pktdma_init_chan()
599 chan->direction = dir; pktdma_init_chan()
600 ret = pktdma_init_tx_chan(chan, chan_num); pktdma_init_chan()
602 chan->direction = dir; pktdma_init_chan()
603 ret = pktdma_init_rx_chan(chan, chan_num); pktdma_init_chan()
608 list_add_tail(&chan->list, &dma->chan_list); pktdma_init_chan()
312 dma_debug_show_channels(struct seq_file *s, struct knav_dma_chan *chan) dma_debug_show_channels() argument
/linux-4.1.27/drivers/firewire/
H A Dnosy.h45 #define PCI_INT_DMA_HLT(chan) (1 << (chan * 2))
46 #define PCI_INT_DMA_PCL(chan) (1 << (chan * 2 + 1))
58 #define DMA_BREG(base, chan) (base + chan * 0x20)
59 #define DMA_SREG(base, chan) (base + chan * 0x10)
99 #define DMA_PREV_PCL(chan) (DMA_BREG(DMA0_PREV_PCL, chan))
106 #define DMA_CURRENT_PCL(chan) (DMA_BREG(DMA0_CURRENT_PCL, chan))
113 #define DMA_CHAN_STAT(chan) (DMA_BREG(DMA0_CHAN_STAT, chan))
127 #define DMA_CHAN_CTRL(chan) (DMA_BREG(DMA0_CHAN_CTRL, chan))
138 #define DMA_READY(chan) (DMA_BREG(DMA0_READY, chan))
156 #define DMA_WORD0_CMP_VALUE(chan) (DMA_SREG(DMA0_WORD0_CMP_VALUE, chan))
163 #define DMA_WORD0_CMP_ENABLE(chan) (DMA_SREG(DMA0_WORD0_CMP_ENABLE, chan))
170 #define DMA_WORD1_CMP_VALUE(chan) (DMA_SREG(DMA0_WORD1_CMP_VALUE, chan))
177 #define DMA_WORD1_CMP_ENABLE(chan) (DMA_SREG(DMA0_WORD1_CMP_ENABLE, chan))
/linux-4.1.27/drivers/dma/sh/
H A Dusb-dmac.c90 #define to_usb_dmac_chan(c) container_of(c, struct usb_dmac_chan, vc.chan)
164 static u32 usb_dmac_chan_read(struct usb_dmac_chan *chan, u32 reg) usb_dmac_chan_read() argument
166 return readl(chan->iomem + reg); usb_dmac_chan_read()
169 static void usb_dmac_chan_write(struct usb_dmac_chan *chan, u32 reg, u32 data) usb_dmac_chan_write() argument
171 writel(data, chan->iomem + reg); usb_dmac_chan_write()
178 static bool usb_dmac_chan_is_busy(struct usb_dmac_chan *chan) usb_dmac_chan_is_busy() argument
180 u32 chcr = usb_dmac_chan_read(chan, USB_DMACHCR); usb_dmac_chan_is_busy()
196 static void usb_dmac_chan_start_sg(struct usb_dmac_chan *chan, usb_dmac_chan_start_sg() argument
199 struct usb_dmac_desc *desc = chan->desc; usb_dmac_chan_start_sg()
203 WARN_ON_ONCE(usb_dmac_chan_is_busy(chan)); usb_dmac_chan_start_sg()
210 dev_dbg(chan->vc.chan.device->dev, usb_dmac_chan_start_sg()
211 "chan%u: queue sg %p: %u@%pad -> %pad\n", usb_dmac_chan_start_sg()
212 chan->index, sg, sg->size, &src_addr, &dst_addr); usb_dmac_chan_start_sg()
214 usb_dmac_chan_write(chan, USB_DMASAR, src_addr & 0xffffffff); usb_dmac_chan_start_sg()
215 usb_dmac_chan_write(chan, USB_DMADAR, dst_addr & 0xffffffff); usb_dmac_chan_start_sg()
216 usb_dmac_chan_write(chan, USB_DMATCR, usb_dmac_chan_start_sg()
218 usb_dmac_chan_write(chan, USB_DMATEND, usb_dmac_calc_tend(sg->size)); usb_dmac_chan_start_sg()
220 usb_dmac_chan_write(chan, USB_DMACHCR, USB_DMAC_CHCR_TS | usb_dmac_chan_start_sg()
225 static void usb_dmac_chan_start_desc(struct usb_dmac_chan *chan) usb_dmac_chan_start_desc() argument
229 vd = vchan_next_desc(&chan->vc); usb_dmac_chan_start_desc()
231 chan->desc = NULL; usb_dmac_chan_start_desc()
242 chan->desc = to_usb_dmac_desc(vd); usb_dmac_chan_start_desc()
243 chan->desc->sg_index = 0; usb_dmac_chan_start_desc()
244 usb_dmac_chan_start_sg(chan, 0); usb_dmac_chan_start_desc()
266 static int usb_dmac_desc_alloc(struct usb_dmac_chan *chan, unsigned int sg_len, usb_dmac_desc_alloc() argument
279 spin_lock_irqsave(&chan->vc.lock, flags); usb_dmac_desc_alloc()
280 list_add_tail(&desc->node, &chan->desc_freed); usb_dmac_desc_alloc()
281 spin_unlock_irqrestore(&chan->vc.lock, flags); usb_dmac_desc_alloc()
286 static void usb_dmac_desc_free(struct usb_dmac_chan *chan) usb_dmac_desc_free() argument
291 list_splice_init(&chan->desc_freed, &list); usb_dmac_desc_free()
292 list_splice_init(&chan->desc_got, &list); usb_dmac_desc_free()
298 chan->descs_allocated = 0; usb_dmac_desc_free()
301 static struct usb_dmac_desc *usb_dmac_desc_get(struct usb_dmac_chan *chan, usb_dmac_desc_get() argument
308 spin_lock_irqsave(&chan->vc.lock, flags); usb_dmac_desc_get()
309 list_for_each_entry(desc, &chan->desc_freed, node) { usb_dmac_desc_get()
311 list_move_tail(&desc->node, &chan->desc_got); usb_dmac_desc_get()
312 spin_unlock_irqrestore(&chan->vc.lock, flags); usb_dmac_desc_get()
316 spin_unlock_irqrestore(&chan->vc.lock, flags); usb_dmac_desc_get()
319 if (!usb_dmac_desc_alloc(chan, sg_len, gfp)) { usb_dmac_desc_get()
321 spin_lock_irqsave(&chan->vc.lock, flags); usb_dmac_desc_get()
322 desc = list_last_entry(&chan->desc_freed, struct usb_dmac_desc, usb_dmac_desc_get()
324 list_move_tail(&desc->node, &chan->desc_got); usb_dmac_desc_get()
325 spin_unlock_irqrestore(&chan->vc.lock, flags); usb_dmac_desc_get()
332 static void usb_dmac_desc_put(struct usb_dmac_chan *chan, usb_dmac_desc_put() argument
337 spin_lock_irqsave(&chan->vc.lock, flags); usb_dmac_desc_put()
338 list_move_tail(&desc->node, &chan->desc_freed); usb_dmac_desc_put()
339 spin_unlock_irqrestore(&chan->vc.lock, flags); usb_dmac_desc_put()
348 struct dma_chan *chan = &uchan->vc.chan; usb_dmac_soft_reset() local
349 struct usb_dmac *dmac = to_usb_dmac(chan->device); usb_dmac_soft_reset()
365 static void usb_dmac_chan_halt(struct usb_dmac_chan *chan) usb_dmac_chan_halt() argument
367 u32 chcr = usb_dmac_chan_read(chan, USB_DMACHCR); usb_dmac_chan_halt()
370 usb_dmac_chan_write(chan, USB_DMACHCR, chcr); usb_dmac_chan_halt()
372 usb_dmac_soft_reset(chan); usb_dmac_chan_halt()
384 static int usb_dmac_alloc_chan_resources(struct dma_chan *chan) usb_dmac_alloc_chan_resources() argument
386 struct usb_dmac_chan *uchan = to_usb_dmac_chan(chan); usb_dmac_alloc_chan_resources()
399 return pm_runtime_get_sync(chan->device->dev); usb_dmac_alloc_chan_resources()
402 static void usb_dmac_free_chan_resources(struct dma_chan *chan) usb_dmac_free_chan_resources() argument
404 struct usb_dmac_chan *uchan = to_usb_dmac_chan(chan); usb_dmac_free_chan_resources()
415 pm_runtime_put(chan->device->dev); usb_dmac_free_chan_resources()
419 usb_dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, usb_dmac_prep_slave_sg() argument
423 struct usb_dmac_chan *uchan = to_usb_dmac_chan(chan); usb_dmac_prep_slave_sg()
429 dev_warn(chan->device->dev, usb_dmac_prep_slave_sg()
448 static int usb_dmac_chan_terminate_all(struct dma_chan *chan) usb_dmac_chan_terminate_all() argument
450 struct usb_dmac_chan *uchan = to_usb_dmac_chan(chan); usb_dmac_chan_terminate_all()
470 static unsigned int usb_dmac_get_current_residue(struct usb_dmac_chan *chan, usb_dmac_get_current_residue() argument
483 residue -= usb_dmac_chan_read(chan, USB_DMADAR) - mem_addr; usb_dmac_get_current_residue()
485 residue -= usb_dmac_chan_read(chan, USB_DMASAR) - mem_addr; usb_dmac_get_current_residue()
490 static u32 usb_dmac_chan_get_residue_if_complete(struct usb_dmac_chan *chan, usb_dmac_chan_get_residue_if_complete() argument
496 list_for_each_entry_reverse(desc, &chan->desc_freed, node) { usb_dmac_chan_get_residue_if_complete()
506 static u32 usb_dmac_chan_get_residue(struct usb_dmac_chan *chan, usb_dmac_chan_get_residue() argument
511 struct usb_dmac_desc *desc = chan->desc; usb_dmac_chan_get_residue()
515 vd = vchan_find_desc(&chan->vc, cookie); usb_dmac_chan_get_residue()
526 residue += usb_dmac_get_current_residue(chan, desc, desc->sg_index); usb_dmac_chan_get_residue()
531 static enum dma_status usb_dmac_tx_status(struct dma_chan *chan, usb_dmac_tx_status() argument
535 struct usb_dmac_chan *uchan = to_usb_dmac_chan(chan); usb_dmac_tx_status()
540 status = dma_cookie_status(chan, cookie, txstate); usb_dmac_tx_status()
557 static void usb_dmac_issue_pending(struct dma_chan *chan) usb_dmac_issue_pending() argument
559 struct usb_dmac_chan *uchan = to_usb_dmac_chan(chan); usb_dmac_issue_pending()
571 struct usb_dmac_chan *chan = to_usb_dmac_chan(vd->tx.chan); usb_dmac_virt_desc_free() local
573 usb_dmac_desc_put(chan, desc); usb_dmac_virt_desc_free()
580 static void usb_dmac_isr_transfer_end(struct usb_dmac_chan *chan) usb_dmac_isr_transfer_end() argument
582 struct usb_dmac_desc *desc = chan->desc; usb_dmac_isr_transfer_end()
587 usb_dmac_chan_start_sg(chan, desc->sg_index); usb_dmac_isr_transfer_end()
589 desc->residue = usb_dmac_get_current_residue(chan, desc, usb_dmac_isr_transfer_end()
595 usb_dmac_chan_start_desc(chan); usb_dmac_isr_transfer_end()
601 struct usb_dmac_chan *chan = dev; usb_dmac_isr_channel() local
607 spin_lock(&chan->vc.lock); usb_dmac_isr_channel()
609 chcr = usb_dmac_chan_read(chan, USB_DMACHCR); usb_dmac_isr_channel()
618 usb_dmac_chan_write(chan, USB_DMACHCR, chcr & ~mask); usb_dmac_isr_channel()
621 usb_dmac_isr_transfer_end(chan); usb_dmac_isr_channel()
625 spin_unlock(&chan->vc.lock); usb_dmac_isr_channel()
634 static bool usb_dmac_chan_filter(struct dma_chan *chan, void *arg) usb_dmac_chan_filter() argument
636 struct usb_dmac_chan *uchan = to_usb_dmac_chan(chan); usb_dmac_chan_filter()
639 if (dma_spec->np != chan->device->dev->of_node) usb_dmac_chan_filter()
653 struct dma_chan *chan; usb_dmac_of_xlate() local
663 chan = dma_request_channel(mask, usb_dmac_chan_filter, dma_spec); usb_dmac_of_xlate()
664 if (!chan) usb_dmac_of_xlate()
667 uchan = to_usb_dmac_chan(chan); usb_dmac_of_xlate()
669 return chan; usb_dmac_of_xlate()
H A Drcar-dmac.c122 * @chan: base DMA channel object
141 struct dma_chan chan; member in struct:rcar_dmac_chan
167 #define to_rcar_dmac_chan(c) container_of(c, struct rcar_dmac_chan, chan)
290 static u32 rcar_dmac_chan_read(struct rcar_dmac_chan *chan, u32 reg) rcar_dmac_chan_read() argument
293 return readw(chan->iomem + reg); rcar_dmac_chan_read()
295 return readl(chan->iomem + reg); rcar_dmac_chan_read()
298 static void rcar_dmac_chan_write(struct rcar_dmac_chan *chan, u32 reg, u32 data) rcar_dmac_chan_write() argument
301 writew(data, chan->iomem + reg); rcar_dmac_chan_write()
303 writel(data, chan->iomem + reg); rcar_dmac_chan_write()
310 static bool rcar_dmac_chan_is_busy(struct rcar_dmac_chan *chan) rcar_dmac_chan_is_busy() argument
312 u32 chcr = rcar_dmac_chan_read(chan, RCAR_DMACHCR); rcar_dmac_chan_is_busy()
317 static void rcar_dmac_chan_start_xfer(struct rcar_dmac_chan *chan) rcar_dmac_chan_start_xfer() argument
319 struct rcar_dmac_desc *desc = chan->desc.running; rcar_dmac_chan_start_xfer()
322 WARN_ON_ONCE(rcar_dmac_chan_is_busy(chan)); rcar_dmac_chan_start_xfer()
324 if (chan->mid_rid >= 0) rcar_dmac_chan_start_xfer()
325 rcar_dmac_chan_write(chan, RCAR_DMARS, chan->mid_rid); rcar_dmac_chan_start_xfer()
330 dev_dbg(chan->chan.device->dev, rcar_dmac_chan_start_xfer()
331 "chan%u: queue desc %p: %u@%pad\n", rcar_dmac_chan_start_xfer()
332 chan->index, desc, desc->nchunks, &desc->hwdescs.dma); rcar_dmac_chan_start_xfer()
335 rcar_dmac_chan_write(chan, RCAR_DMAFIXDPBASE, rcar_dmac_chan_start_xfer()
338 rcar_dmac_chan_write(chan, RCAR_DMADPBASE, rcar_dmac_chan_start_xfer()
341 rcar_dmac_chan_write(chan, RCAR_DMACHCRB, rcar_dmac_chan_start_xfer()
354 rcar_dmac_chan_write(chan, RCAR_DMADAR, rcar_dmac_chan_start_xfer()
361 rcar_dmac_chan_write(chan, RCAR_DMADPCR, RCAR_DMADPCR_DIPT(1)); rcar_dmac_chan_start_xfer()
387 dev_dbg(chan->chan.device->dev, rcar_dmac_chan_start_xfer()
388 "chan%u: queue chunk %p: %u@%pad -> %pad\n", rcar_dmac_chan_start_xfer()
389 chan->index, chunk, chunk->size, &chunk->src_addr, rcar_dmac_chan_start_xfer()
393 rcar_dmac_chan_write(chan, RCAR_DMAFIXSAR, rcar_dmac_chan_start_xfer()
395 rcar_dmac_chan_write(chan, RCAR_DMAFIXDAR, rcar_dmac_chan_start_xfer()
398 rcar_dmac_chan_write(chan, RCAR_DMASAR, rcar_dmac_chan_start_xfer()
400 rcar_dmac_chan_write(chan, RCAR_DMADAR, rcar_dmac_chan_start_xfer()
402 rcar_dmac_chan_write(chan, RCAR_DMATCR, rcar_dmac_chan_start_xfer()
408 rcar_dmac_chan_write(chan, RCAR_DMACHCR, chcr | RCAR_DMACHCR_DE); rcar_dmac_chan_start_xfer()
435 struct rcar_dmac_chan *chan = to_rcar_dmac_chan(tx->chan); rcar_dmac_tx_submit() local
440 spin_lock_irqsave(&chan->lock, flags); rcar_dmac_tx_submit()
444 dev_dbg(chan->chan.device->dev, "chan%u: submit #%d@%p\n", rcar_dmac_tx_submit()
445 chan->index, tx->cookie, desc); rcar_dmac_tx_submit()
447 list_add_tail(&desc->node, &chan->desc.pending); rcar_dmac_tx_submit()
451 spin_unlock_irqrestore(&chan->lock, flags); rcar_dmac_tx_submit()
462 * @chan: the DMA channel
465 static int rcar_dmac_desc_alloc(struct rcar_dmac_chan *chan, gfp_t gfp) rcar_dmac_desc_alloc() argument
478 dma_async_tx_descriptor_init(&desc->async_tx, &chan->chan); rcar_dmac_desc_alloc()
485 spin_lock_irq(&chan->lock); rcar_dmac_desc_alloc()
486 list_splice_tail(&list, &chan->desc.free); rcar_dmac_desc_alloc()
487 list_add_tail(&page->node, &chan->desc.pages); rcar_dmac_desc_alloc()
488 spin_unlock_irq(&chan->lock); rcar_dmac_desc_alloc()
495 * @chan: the DMA channel
505 static void rcar_dmac_desc_put(struct rcar_dmac_chan *chan, rcar_dmac_desc_put() argument
510 spin_lock_irqsave(&chan->lock, flags); rcar_dmac_desc_put()
511 list_splice_tail_init(&desc->chunks, &chan->desc.chunks_free); rcar_dmac_desc_put()
512 list_add_tail(&desc->node, &chan->desc.free); rcar_dmac_desc_put()
513 spin_unlock_irqrestore(&chan->lock, flags); rcar_dmac_desc_put()
516 static void rcar_dmac_desc_recycle_acked(struct rcar_dmac_chan *chan) rcar_dmac_desc_recycle_acked() argument
527 spin_lock_irq(&chan->lock); rcar_dmac_desc_recycle_acked()
528 list_splice_init(&chan->desc.wait, &list); rcar_dmac_desc_recycle_acked()
529 spin_unlock_irq(&chan->lock); rcar_dmac_desc_recycle_acked()
534 rcar_dmac_desc_put(chan, desc); rcar_dmac_desc_recycle_acked()
542 spin_lock_irq(&chan->lock); rcar_dmac_desc_recycle_acked()
543 list_splice(&list, &chan->desc.wait); rcar_dmac_desc_recycle_acked()
544 spin_unlock_irq(&chan->lock); rcar_dmac_desc_recycle_acked()
549 * @chan: the DMA channel
556 static struct rcar_dmac_desc *rcar_dmac_desc_get(struct rcar_dmac_chan *chan) rcar_dmac_desc_get() argument
562 rcar_dmac_desc_recycle_acked(chan); rcar_dmac_desc_get()
564 spin_lock_irq(&chan->lock); rcar_dmac_desc_get()
566 while (list_empty(&chan->desc.free)) { rcar_dmac_desc_get()
573 spin_unlock_irq(&chan->lock); rcar_dmac_desc_get()
574 ret = rcar_dmac_desc_alloc(chan, GFP_NOWAIT); rcar_dmac_desc_get()
577 spin_lock_irq(&chan->lock); rcar_dmac_desc_get()
580 desc = list_first_entry(&chan->desc.free, struct rcar_dmac_desc, node); rcar_dmac_desc_get()
583 spin_unlock_irq(&chan->lock); rcar_dmac_desc_get()
590 * @chan: the DMA channel
593 static int rcar_dmac_xfer_chunk_alloc(struct rcar_dmac_chan *chan, gfp_t gfp) rcar_dmac_xfer_chunk_alloc() argument
609 spin_lock_irq(&chan->lock); rcar_dmac_xfer_chunk_alloc()
610 list_splice_tail(&list, &chan->desc.chunks_free); rcar_dmac_xfer_chunk_alloc()
611 list_add_tail(&page->node, &chan->desc.pages); rcar_dmac_xfer_chunk_alloc()
612 spin_unlock_irq(&chan->lock); rcar_dmac_xfer_chunk_alloc()
619 * @chan: the DMA channel
627 rcar_dmac_xfer_chunk_get(struct rcar_dmac_chan *chan) rcar_dmac_xfer_chunk_get() argument
632 spin_lock_irq(&chan->lock); rcar_dmac_xfer_chunk_get()
634 while (list_empty(&chan->desc.chunks_free)) { rcar_dmac_xfer_chunk_get()
641 spin_unlock_irq(&chan->lock); rcar_dmac_xfer_chunk_get()
642 ret = rcar_dmac_xfer_chunk_alloc(chan, GFP_NOWAIT); rcar_dmac_xfer_chunk_get()
645 spin_lock_irq(&chan->lock); rcar_dmac_xfer_chunk_get()
648 chunk = list_first_entry(&chan->desc.chunks_free, rcar_dmac_xfer_chunk_get()
652 spin_unlock_irq(&chan->lock); rcar_dmac_xfer_chunk_get()
657 static void rcar_dmac_realloc_hwdesc(struct rcar_dmac_chan *chan, rcar_dmac_realloc_hwdesc() argument
672 dma_free_coherent(chan->chan.device->dev, desc->hwdescs.size, rcar_dmac_realloc_hwdesc()
681 desc->hwdescs.mem = dma_alloc_coherent(chan->chan.device->dev, size, rcar_dmac_realloc_hwdesc()
689 static int rcar_dmac_fill_hwdesc(struct rcar_dmac_chan *chan, rcar_dmac_fill_hwdesc() argument
695 rcar_dmac_realloc_hwdesc(chan, desc, desc->nchunks * sizeof(*hwdesc)); rcar_dmac_fill_hwdesc()
715 static void rcar_dmac_chan_halt(struct rcar_dmac_chan *chan) rcar_dmac_chan_halt() argument
717 u32 chcr = rcar_dmac_chan_read(chan, RCAR_DMACHCR); rcar_dmac_chan_halt()
721 rcar_dmac_chan_write(chan, RCAR_DMACHCR, chcr); rcar_dmac_chan_halt()
724 static void rcar_dmac_chan_reinit(struct rcar_dmac_chan *chan) rcar_dmac_chan_reinit() argument
730 spin_lock_irqsave(&chan->lock, flags); rcar_dmac_chan_reinit()
733 list_splice_init(&chan->desc.pending, &descs); rcar_dmac_chan_reinit()
734 list_splice_init(&chan->desc.active, &descs); rcar_dmac_chan_reinit()
735 list_splice_init(&chan->desc.done, &descs); rcar_dmac_chan_reinit()
736 list_splice_init(&chan->desc.wait, &descs); rcar_dmac_chan_reinit()
738 chan->desc.running = NULL; rcar_dmac_chan_reinit()
740 spin_unlock_irqrestore(&chan->lock, flags); rcar_dmac_chan_reinit()
744 rcar_dmac_desc_put(chan, desc); rcar_dmac_chan_reinit()
759 struct rcar_dmac_chan *chan = &dmac->channels[i]; rcar_dmac_abort() local
762 spin_lock(&chan->lock); rcar_dmac_abort()
763 rcar_dmac_chan_halt(chan); rcar_dmac_abort()
764 spin_unlock(&chan->lock); rcar_dmac_abort()
766 rcar_dmac_chan_reinit(chan); rcar_dmac_abort()
774 static void rcar_dmac_chan_configure_desc(struct rcar_dmac_chan *chan, rcar_dmac_chan_configure_desc() argument
791 xfer_size = chan->src_xfer_size; rcar_dmac_chan_configure_desc()
797 xfer_size = chan->dst_xfer_size; rcar_dmac_chan_configure_desc()
823 rcar_dmac_chan_prep_sg(struct rcar_dmac_chan *chan, struct scatterlist *sgl, rcar_dmac_chan_prep_sg() argument
837 desc = rcar_dmac_desc_get(chan); rcar_dmac_chan_prep_sg()
847 rcar_dmac_chan_configure_desc(chan, desc); rcar_dmac_chan_prep_sg()
883 chunk = rcar_dmac_xfer_chunk_get(chan); for_each_sg()
885 rcar_dmac_desc_put(chan, desc); for_each_sg()
899 dev_dbg(chan->chan.device->dev, for_each_sg()
900 "chan%u: chunk %p/%p sgl %u@%p, %u/%u %pad -> %pad\n", for_each_sg()
901 chan->index, chunk, desc, i, sg, size, len, for_each_sg()
930 if (rcar_dmac_fill_hwdesc(chan, desc) < 0)
941 static int rcar_dmac_alloc_chan_resources(struct dma_chan *chan) rcar_dmac_alloc_chan_resources() argument
943 struct rcar_dmac_chan *rchan = to_rcar_dmac_chan(chan); rcar_dmac_alloc_chan_resources()
958 return pm_runtime_get_sync(chan->device->dev); rcar_dmac_alloc_chan_resources()
961 static void rcar_dmac_free_chan_resources(struct dma_chan *chan) rcar_dmac_free_chan_resources() argument
963 struct rcar_dmac_chan *rchan = to_rcar_dmac_chan(chan); rcar_dmac_free_chan_resources()
964 struct rcar_dmac *dmac = to_rcar_dmac(chan->device); rcar_dmac_free_chan_resources()
996 pm_runtime_put(chan->device->dev); rcar_dmac_free_chan_resources()
1000 rcar_dmac_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dma_dest, rcar_dmac_prep_dma_memcpy() argument
1003 struct rcar_dmac_chan *rchan = to_rcar_dmac_chan(chan); rcar_dmac_prep_dma_memcpy()
1020 rcar_dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, rcar_dmac_prep_slave_sg() argument
1024 struct rcar_dmac_chan *rchan = to_rcar_dmac_chan(chan); rcar_dmac_prep_slave_sg()
1029 dev_warn(chan->device->dev, rcar_dmac_prep_slave_sg()
1044 rcar_dmac_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf_addr, rcar_dmac_prep_dma_cyclic() argument
1048 struct rcar_dmac_chan *rchan = to_rcar_dmac_chan(chan); rcar_dmac_prep_dma_cyclic()
1057 dev_warn(chan->device->dev, rcar_dmac_prep_dma_cyclic()
1065 dev_err(chan->device->dev, rcar_dmac_prep_dma_cyclic()
1066 "chan%u: sg length %d exceds limit %d", rcar_dmac_prep_dma_cyclic()
1099 static int rcar_dmac_device_config(struct dma_chan *chan, rcar_dmac_device_config() argument
1102 struct rcar_dmac_chan *rchan = to_rcar_dmac_chan(chan); rcar_dmac_device_config()
1116 static int rcar_dmac_chan_terminate_all(struct dma_chan *chan) rcar_dmac_chan_terminate_all() argument
1118 struct rcar_dmac_chan *rchan = to_rcar_dmac_chan(chan); rcar_dmac_chan_terminate_all()
1135 static unsigned int rcar_dmac_chan_get_residue(struct rcar_dmac_chan *chan, rcar_dmac_chan_get_residue() argument
1138 struct rcar_dmac_desc *desc = chan->desc.running; rcar_dmac_chan_get_residue()
1162 dptr = (rcar_dmac_chan_read(chan, RCAR_DMACHCRB) & rcar_dmac_chan_get_residue()
1178 residue += rcar_dmac_chan_read(chan, RCAR_DMATCR) << desc->xfer_shift; rcar_dmac_chan_get_residue()
1183 static enum dma_status rcar_dmac_tx_status(struct dma_chan *chan, rcar_dmac_tx_status() argument
1187 struct rcar_dmac_chan *rchan = to_rcar_dmac_chan(chan); rcar_dmac_tx_status()
1192 status = dma_cookie_status(chan, cookie, txstate); rcar_dmac_tx_status()
1205 static void rcar_dmac_issue_pending(struct dma_chan *chan) rcar_dmac_issue_pending() argument
1207 struct rcar_dmac_chan *rchan = to_rcar_dmac_chan(chan); rcar_dmac_issue_pending()
1240 static irqreturn_t rcar_dmac_isr_desc_stage_end(struct rcar_dmac_chan *chan) rcar_dmac_isr_desc_stage_end() argument
1242 struct rcar_dmac_desc *desc = chan->desc.running; rcar_dmac_isr_desc_stage_end()
1255 stage = (rcar_dmac_chan_read(chan, RCAR_DMACHCRB) & rcar_dmac_isr_desc_stage_end()
1257 rcar_dmac_chan_write(chan, RCAR_DMADPCR, RCAR_DMADPCR_DIPT(stage)); rcar_dmac_isr_desc_stage_end()
1262 static irqreturn_t rcar_dmac_isr_transfer_end(struct rcar_dmac_chan *chan) rcar_dmac_isr_transfer_end() argument
1264 struct rcar_dmac_desc *desc = chan->desc.running; rcar_dmac_isr_transfer_end()
1308 list_move_tail(&desc->node, &chan->desc.done); rcar_dmac_isr_transfer_end()
1311 if (!list_empty(&chan->desc.active)) rcar_dmac_isr_transfer_end()
1312 chan->desc.running = list_first_entry(&chan->desc.active, rcar_dmac_isr_transfer_end()
1316 chan->desc.running = NULL; rcar_dmac_isr_transfer_end()
1319 if (chan->desc.running) rcar_dmac_isr_transfer_end()
1320 rcar_dmac_chan_start_xfer(chan); rcar_dmac_isr_transfer_end()
1328 struct rcar_dmac_chan *chan = dev; rcar_dmac_isr_channel() local
1332 spin_lock(&chan->lock); rcar_dmac_isr_channel()
1334 chcr = rcar_dmac_chan_read(chan, RCAR_DMACHCR); rcar_dmac_isr_channel()
1337 rcar_dmac_chan_write(chan, RCAR_DMACHCR, chcr & ~mask); rcar_dmac_isr_channel()
1340 ret |= rcar_dmac_isr_desc_stage_end(chan); rcar_dmac_isr_channel()
1343 ret |= rcar_dmac_isr_transfer_end(chan); rcar_dmac_isr_channel()
1345 spin_unlock(&chan->lock); rcar_dmac_isr_channel()
1352 struct rcar_dmac_chan *chan = dev; rcar_dmac_isr_channel_thread() local
1355 spin_lock_irq(&chan->lock); rcar_dmac_isr_channel_thread()
1358 if (chan->desc.running && chan->desc.running->cyclic) { rcar_dmac_isr_channel_thread()
1362 desc = chan->desc.running; rcar_dmac_isr_channel_thread()
1367 spin_unlock_irq(&chan->lock); rcar_dmac_isr_channel_thread()
1369 spin_lock_irq(&chan->lock); rcar_dmac_isr_channel_thread()
1377 while (!list_empty(&chan->desc.done)) { rcar_dmac_isr_channel_thread()
1378 desc = list_first_entry(&chan->desc.done, struct rcar_dmac_desc, rcar_dmac_isr_channel_thread()
1384 spin_unlock_irq(&chan->lock); rcar_dmac_isr_channel_thread()
1391 spin_lock_irq(&chan->lock); rcar_dmac_isr_channel_thread()
1394 list_add_tail(&desc->node, &chan->desc.wait); rcar_dmac_isr_channel_thread()
1397 spin_unlock_irq(&chan->lock); rcar_dmac_isr_channel_thread()
1400 rcar_dmac_desc_recycle_acked(chan); rcar_dmac_isr_channel_thread()
1427 static bool rcar_dmac_chan_filter(struct dma_chan *chan, void *arg) rcar_dmac_chan_filter() argument
1429 struct rcar_dmac *dmac = to_rcar_dmac(chan->device); rcar_dmac_chan_filter()
1439 if (chan->device->device_config != rcar_dmac_device_config || rcar_dmac_chan_filter()
1440 dma_spec->np != chan->device->dev->of_node) rcar_dmac_chan_filter()
1450 struct dma_chan *chan; rcar_dmac_of_xlate() local
1460 chan = dma_request_channel(mask, rcar_dmac_chan_filter, dma_spec); rcar_dmac_of_xlate()
1461 if (!chan) rcar_dmac_of_xlate()
1464 rchan = to_rcar_dmac_chan(chan); rcar_dmac_of_xlate()
1467 return chan; rcar_dmac_of_xlate()
1519 struct dma_chan *chan = &rchan->chan; rcar_dmac_chan_probe() local
1562 chan->device = &dmac->engine; rcar_dmac_chan_probe()
1563 dma_cookie_init(chan); rcar_dmac_chan_probe()
1565 list_add_tail(&chan->device_node, &dmac->engine.channels); rcar_dmac_chan_probe()
H A Drcar-hpbdma.c287 struct hpb_dmae_chan *chan = to_chan(schan); hpb_dmae_halt() local
289 ch_reg_write(chan, HPB_DMAE_DCMDR_DQEND, HPB_DMAE_DCMDR); hpb_dmae_halt()
290 ch_reg_write(chan, HPB_DMAE_DSTPR_DMSTP, HPB_DMAE_DSTPR); hpb_dmae_halt()
292 chan->plane_idx = 0; hpb_dmae_halt()
293 chan->first_desc = true; hpb_dmae_halt()
316 struct hpb_dmae_chan *chan = to_chan(schan); hpb_dmae_start_xfer() local
317 struct hpb_dmae_device *hpbdev = to_dev(chan); hpb_dmae_start_xfer()
320 if (chan->cfg->flags & HPB_DMAE_SET_ASYNC_RESET) hpb_dmae_start_xfer()
321 hpb_dmae_async_reset(hpbdev, chan->cfg->rstr); hpb_dmae_start_xfer()
323 desc->plane_idx = chan->plane_idx; hpb_dmae_start_xfer()
324 hpb_dmae_set_reg(chan, &desc->hw, chan->plane_idx); hpb_dmae_start_xfer()
325 hpb_dmae_start(chan, !chan->first_desc); hpb_dmae_start_xfer()
327 if (chan->xfer_mode == XFER_DOUBLE) { hpb_dmae_start_xfer()
328 chan->plane_idx ^= 1; hpb_dmae_start_xfer()
329 chan->first_desc = false; hpb_dmae_start_xfer()
348 struct hpb_dmae_chan *chan = to_chan(schan); hpb_dmae_chan_irq() local
349 struct hpb_dmae_device *hpbdev = to_dev(chan); hpb_dmae_chan_irq()
350 int ch = chan->cfg->dma_ch; hpb_dmae_chan_irq()
381 struct hpb_dmae_chan *chan = to_chan(schan); hpb_dmae_get_partial() local
382 u32 tcr = ch_reg_read(chan, desc->plane_idx ? hpb_dmae_get_partial()
385 return (desc->hw.tcr - tcr) << chan->xmit_shift; hpb_dmae_get_partial()
390 struct hpb_dmae_chan *chan = to_chan(schan); hpb_dmae_channel_busy() local
391 u32 dstsr = ch_reg_read(chan, HPB_DMAE_DSTSR); hpb_dmae_channel_busy()
393 if (chan->xfer_mode == XFER_DOUBLE) hpb_dmae_channel_busy()
460 struct hpb_dmae_chan *chan = to_chan(schan); hpb_dmae_set_slave() local
462 hpb_dmae_find_slave(chan, slave_id); hpb_dmae_set_slave()
468 chan->cfg = sc; hpb_dmae_set_slave()
469 chan->slave_addr = slave_addr ? : sc->addr; hpb_dmae_set_slave()
470 return hpb_dmae_alloc_chan_resources(chan, sc); hpb_dmae_set_slave()
479 struct hpb_dmae_chan *chan = to_chan(schan); hpb_dmae_slave_addr() local
481 return chan->slave_addr; hpb_dmae_slave_addr()
542 struct resource *chan, *comm, *rest, *mode, *irq_res; hpb_dmae_probe() local
549 chan = platform_get_resource(pdev, IORESOURCE_MEM, 0); hpb_dmae_probe()
565 hpbdev->chan_reg = devm_ioremap_resource(&pdev->dev, chan); hpb_dmae_probe()
H A Dshdma-of.c27 struct dma_chan *chan; shdma_of_xlate() local
36 chan = dma_request_channel(mask, shdma_chan_filter, shdma_of_xlate()
38 if (chan) shdma_of_xlate()
39 to_shdma_chan(chan)->hw_req = id; shdma_of_xlate()
41 return chan; shdma_of_xlate()
H A Dshdma.h39 struct sh_dmae_chan *chan[SH_DMAE_MAX_CHANNELS]; member in struct:sh_dmae_device
59 #define to_sh_chan(chan) container_of(chan, struct sh_dmae_chan, shdma_chan)
62 #define to_sh_dev(chan) container_of(chan->shdma_chan.dma_chan.device,\
/linux-4.1.27/arch/arm/mach-iop13xx/include/mach/
H A Dadma.h25 #define ADMA_ACCR(chan) (chan->mmr_base + 0x0)
26 #define ADMA_ACSR(chan) (chan->mmr_base + 0x4)
27 #define ADMA_ADAR(chan) (chan->mmr_base + 0x8)
28 #define ADMA_IIPCR(chan) (chan->mmr_base + 0x18)
29 #define ADMA_IIPAR(chan) (chan->mmr_base + 0x1c)
30 #define ADMA_IIPUAR(chan) (chan->mmr_base + 0x20)
31 #define ADMA_ANDAR(chan) (chan->mmr_base + 0x24)
32 #define ADMA_ADCR(chan) (chan->mmr_base + 0x28)
33 #define ADMA_CARMD(chan) (chan->mmr_base + 0x2c)
34 #define ADMA_ABCR(chan) (chan->mmr_base + 0x30)
35 #define ADMA_DLADR(chan) (chan->mmr_base + 0x34)
36 #define ADMA_DUADR(chan) (chan->mmr_base + 0x38)
37 #define ADMA_SLAR(src, chan) (chan->mmr_base + (0x3c + (src << 3)))
38 #define ADMA_SUAR(src, chan) (chan->mmr_base + (0x40 + (src << 3)))
155 static inline u32 iop_chan_get_current_descriptor(struct iop_adma_chan *chan) iop_chan_get_current_descriptor() argument
157 return __raw_readl(ADMA_ADAR(chan)); iop_chan_get_current_descriptor()
160 static inline void iop_chan_set_next_descriptor(struct iop_adma_chan *chan, iop_chan_set_next_descriptor() argument
163 __raw_writel(next_desc_addr, ADMA_ANDAR(chan)); iop_chan_set_next_descriptor()
168 static inline char iop_chan_is_busy(struct iop_adma_chan *chan) iop_chan_is_busy() argument
170 if (__raw_readl(ADMA_ACSR(chan)) & iop_chan_is_busy()
178 iop_chan_get_desc_align(struct iop_adma_chan *chan, int num_slots) iop_chan_get_desc_align() argument
222 struct iop_adma_chan *chan) iop_desc_get_byte_count()
229 struct iop_adma_chan *chan, iop_desc_get_src_addr()
237 struct iop_adma_chan *chan) iop_desc_get_src_count()
361 struct iop_adma_chan *chan, iop_desc_set_byte_count()
395 struct iop_adma_chan *chan, iop_desc_set_dest_addr()
465 struct iop_adma_chan *chan) iop_desc_init_interrupt()
468 iop_desc_set_byte_count(desc, chan, 0); iop_desc_init_interrupt()
469 iop_desc_set_dest_addr(desc, chan, 0); iop_desc_init_interrupt()
528 static inline void iop_chan_append(struct iop_adma_chan *chan) iop_chan_append() argument
532 adma_accr = __raw_readl(ADMA_ACCR(chan)); iop_chan_append()
534 __raw_writel(adma_accr, ADMA_ACCR(chan)); iop_chan_append()
537 static inline u32 iop_chan_get_status(struct iop_adma_chan *chan) iop_chan_get_status() argument
539 return __raw_readl(ADMA_ACSR(chan)); iop_chan_get_status()
542 static inline void iop_chan_disable(struct iop_adma_chan *chan) iop_chan_disable() argument
544 u32 adma_chan_ctrl = __raw_readl(ADMA_ACCR(chan)); iop_chan_disable()
546 __raw_writel(adma_chan_ctrl, ADMA_ACCR(chan)); iop_chan_disable()
549 static inline void iop_chan_enable(struct iop_adma_chan *chan) iop_chan_enable() argument
553 adma_chan_ctrl = __raw_readl(ADMA_ACCR(chan)); iop_chan_enable()
555 __raw_writel(adma_chan_ctrl, ADMA_ACCR(chan)); iop_chan_enable()
558 static inline void iop_adma_device_clear_eot_status(struct iop_adma_chan *chan) iop_adma_device_clear_eot_status() argument
560 u32 status = __raw_readl(ADMA_ACSR(chan)); iop_adma_device_clear_eot_status()
562 __raw_writel(status, ADMA_ACSR(chan)); iop_adma_device_clear_eot_status()
565 static inline void iop_adma_device_clear_eoc_status(struct iop_adma_chan *chan) iop_adma_device_clear_eoc_status() argument
567 u32 status = __raw_readl(ADMA_ACSR(chan)); iop_adma_device_clear_eoc_status()
569 __raw_writel(status, ADMA_ACSR(chan)); iop_adma_device_clear_eoc_status()
572 static inline void iop_adma_device_clear_err_status(struct iop_adma_chan *chan) iop_adma_device_clear_err_status() argument
574 u32 status = __raw_readl(ADMA_ACSR(chan)); iop_adma_device_clear_err_status()
576 __raw_writel(status, ADMA_ACSR(chan)); iop_adma_device_clear_err_status()
580 iop_is_err_int_parity(unsigned long status, struct iop_adma_chan *chan) iop_is_err_int_parity() argument
586 iop_is_err_mcu_abort(unsigned long status, struct iop_adma_chan *chan) iop_is_err_mcu_abort() argument
592 iop_is_err_int_tabort(unsigned long status, struct iop_adma_chan *chan) iop_is_err_int_tabort() argument
598 iop_is_err_int_mabort(unsigned long status, struct iop_adma_chan *chan) iop_is_err_int_mabort() argument
604 iop_is_err_pci_tabort(unsigned long status, struct iop_adma_chan *chan) iop_is_err_pci_tabort() argument
610 iop_is_err_pci_mabort(unsigned long status, struct iop_adma_chan *chan) iop_is_err_pci_mabort() argument
616 iop_is_err_split_tx(unsigned long status, struct iop_adma_chan *chan) iop_is_err_split_tx() argument
221 iop_desc_get_byte_count(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan) iop_desc_get_byte_count() argument
228 iop_desc_get_src_addr(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan, int src_idx) iop_desc_get_src_addr() argument
236 iop_desc_get_src_count(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan) iop_desc_get_src_count() argument
360 iop_desc_set_byte_count(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan, u32 byte_count) iop_desc_set_byte_count() argument
394 iop_desc_set_dest_addr(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan, dma_addr_t addr) iop_desc_set_dest_addr() argument
464 iop_desc_init_interrupt(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan) iop_desc_init_interrupt() argument
/linux-4.1.27/drivers/usb/dwc2/
H A Dhcd_intr.c87 struct dwc2_host_chan *chan, dwc2_hc_handle_tt_clear()
92 if (!chan->qh) dwc2_hc_handle_tt_clear()
95 if (chan->qh->dev_speed == USB_SPEED_HIGH) dwc2_hc_handle_tt_clear()
106 chan->qh->tt_buffer_dirty = 1; dwc2_hc_handle_tt_clear()
109 chan->qh->tt_buffer_dirty = 0; dwc2_hc_handle_tt_clear()
162 struct dwc2_host_chan *chan; dwc2_rx_fifo_level_intr() local
169 chan = hsotg->hc_ptr_array[chnum]; dwc2_rx_fifo_level_intr()
170 if (!chan) { dwc2_rx_fifo_level_intr()
183 dev_vdbg(hsotg->dev, " DPID = %d, chan.dpid = %d\n", dpid, dwc2_rx_fifo_level_intr()
184 chan->data_pid_start); dwc2_rx_fifo_level_intr()
192 dwc2_read_packet(hsotg, chan->xfer_buf, bcnt); dwc2_rx_fifo_level_intr()
195 chan->xfer_count += bcnt; dwc2_rx_fifo_level_intr()
196 chan->xfer_buf += bcnt; dwc2_rx_fifo_level_intr()
401 struct dwc2_host_chan *chan, int chnum, dwc2_get_actual_xfer_length()
411 if (chan->ep_is_in) { dwc2_get_actual_xfer_length()
414 length = chan->xfer_len - count; dwc2_get_actual_xfer_length()
417 } else if (chan->qh->do_split) { dwc2_get_actual_xfer_length()
420 length = chan->xfer_len; dwc2_get_actual_xfer_length()
433 length = (chan->start_pkt_count - count) * chan->max_packet; dwc2_get_actual_xfer_length()
449 struct dwc2_host_chan *chan, int chnum, dwc2_update_urb_state()
456 int xfer_length = dwc2_get_actual_xfer_length(hsotg, chan, chnum, qtd, dwc2_update_urb_state()
466 if (chan->align_buf && xfer_length && chan->ep_is_in) { dwc2_update_urb_state()
468 memcpy(urb->buf + urb->actual_length, chan->qh->dw_align_buf, dwc2_update_urb_state()
476 if (xfer_length && chan->ep_type == USB_ENDPOINT_XFER_BULK && dwc2_update_urb_state()
479 !(urb->length % chan->max_packet)) { dwc2_update_urb_state()
488 __func__, (chan->ep_is_in ? "IN" : "OUT"), chnum); dwc2_update_urb_state()
489 dev_vdbg(hsotg->dev, " chan->xfer_len %d\n", chan->xfer_len); dwc2_update_urb_state()
506 struct dwc2_host_chan *chan, int chnum, dwc2_hcd_save_data_toggle()
512 if (chan->ep_type != USB_ENDPOINT_XFER_CONTROL) { dwc2_hcd_save_data_toggle()
514 chan->qh->data_toggle = DWC2_HC_PID_DATA0; dwc2_hcd_save_data_toggle()
516 chan->qh->data_toggle = DWC2_HC_PID_DATA1; dwc2_hcd_save_data_toggle()
536 struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, dwc2_update_isoc_urb_state()
552 chan, chnum, qtd, halt_status, NULL); dwc2_update_isoc_urb_state()
555 if (chan->align_buf && frame_desc->actual_length && dwc2_update_isoc_urb_state()
556 chan->ep_is_in) { dwc2_update_isoc_urb_state()
560 qtd->isoc_split_offset, chan->qh->dw_align_buf, dwc2_update_isoc_urb_state()
566 if (chan->ep_is_in) dwc2_update_isoc_urb_state()
581 chan, chnum, qtd, halt_status, NULL); dwc2_update_isoc_urb_state()
584 if (chan->align_buf && frame_desc->actual_length && dwc2_update_isoc_urb_state()
585 chan->ep_is_in) { dwc2_update_isoc_urb_state()
589 qtd->isoc_split_offset, chan->qh->dw_align_buf, dwc2_update_isoc_urb_state()
594 if (chan->qh->do_split && dwc2_update_isoc_urb_state()
595 chan->ep_type == USB_ENDPOINT_XFER_ISOC && chan->ep_is_in && dwc2_update_isoc_urb_state()
668 * @chan: The host channel to release
678 struct dwc2_host_chan *chan, dwc2_release_channel()
686 if (dbg_hc(chan)) dwc2_release_channel()
688 __func__, chan->hc_num, halt_status); dwc2_release_channel()
724 dwc2_deactivate_qh(hsotg, chan->qh, free_qtd); dwc2_release_channel()
732 if (!list_empty(&chan->hc_list_entry)) dwc2_release_channel()
733 list_del(&chan->hc_list_entry); dwc2_release_channel()
734 dwc2_hc_cleanup(hsotg, chan); dwc2_release_channel()
735 list_add_tail(&chan->hc_list_entry, &hsotg->free_hc_list); dwc2_release_channel()
740 switch (chan->ep_type) { dwc2_release_channel()
757 haintmsk &= ~(1 << chan->hc_num); dwc2_release_channel()
777 struct dwc2_host_chan *chan, struct dwc2_qtd *qtd, dwc2_halt_channel()
780 if (dbg_hc(chan)) dwc2_halt_channel()
784 if (dbg_hc(chan)) dwc2_halt_channel()
786 dwc2_release_channel(hsotg, chan, qtd, halt_status); dwc2_halt_channel()
791 dwc2_hc_halt(hsotg, chan, halt_status); dwc2_halt_channel()
793 if (chan->halt_on_queue) { dwc2_halt_channel()
797 if (chan->ep_type == USB_ENDPOINT_XFER_CONTROL || dwc2_halt_channel()
798 chan->ep_type == USB_ENDPOINT_XFER_BULK) { dwc2_halt_channel()
816 list_move(&chan->qh->qh_list_entry, dwc2_halt_channel()
837 struct dwc2_host_chan *chan, dwc2_complete_non_periodic_xfer()
845 if (chan->hcint & HCINTMSK_NYET) { dwc2_complete_non_periodic_xfer()
852 chan->qh->ping_state = 1; dwc2_complete_non_periodic_xfer()
865 if (chan->ep_is_in) { dwc2_complete_non_periodic_xfer()
871 dwc2_halt_channel(hsotg, chan, qtd, halt_status); dwc2_complete_non_periodic_xfer()
877 dwc2_release_channel(hsotg, chan, qtd, halt_status); dwc2_complete_non_periodic_xfer()
887 struct dwc2_host_chan *chan, int chnum, dwc2_complete_periodic_xfer()
895 if (!chan->ep_is_in || (hctsiz & TSIZ_PKTCNT_MASK) == 0) dwc2_complete_periodic_xfer()
897 dwc2_release_channel(hsotg, chan, qtd, halt_status); dwc2_complete_periodic_xfer()
900 dwc2_halt_channel(hsotg, chan, qtd, halt_status); dwc2_complete_periodic_xfer()
904 struct dwc2_host_chan *chan, int chnum, dwc2_xfercomp_isoc_split_in()
914 len = dwc2_get_actual_xfer_length(hsotg, chan, chnum, qtd, dwc2_xfercomp_isoc_split_in()
924 if (chan->align_buf) { dwc2_xfercomp_isoc_split_in()
927 qtd->isoc_split_offset, chan->qh->dw_align_buf, len); dwc2_xfercomp_isoc_split_in()
941 dwc2_release_channel(hsotg, chan, qtd, dwc2_xfercomp_isoc_split_in()
944 dwc2_release_channel(hsotg, chan, qtd, dwc2_xfercomp_isoc_split_in()
956 struct dwc2_host_chan *chan, int chnum, dwc2_hc_xfercomp_intr()
964 if (dbg_hc(chan)) dwc2_hc_xfercomp_intr()
975 dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum, halt_status); dwc2_hc_xfercomp_intr()
983 if (chan->qh->do_split) { dwc2_hc_xfercomp_intr()
984 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC && chan->ep_is_in && dwc2_hc_xfercomp_intr()
987 dwc2_xfercomp_isoc_split_in(hsotg, chan, chnum, dwc2_hc_xfercomp_intr()
1009 urb_xfer_done = dwc2_update_urb_state(hsotg, chan, dwc2_hc_xfercomp_intr()
1016 dwc2_hcd_save_data_toggle(hsotg, chan, chnum, dwc2_hc_xfercomp_intr()
1030 dwc2_complete_non_periodic_xfer(hsotg, chan, chnum, qtd, dwc2_hc_xfercomp_intr()
1035 urb_xfer_done = dwc2_update_urb_state(hsotg, chan, chnum, urb, dwc2_hc_xfercomp_intr()
1044 dwc2_hcd_save_data_toggle(hsotg, chan, chnum, qtd); dwc2_hc_xfercomp_intr()
1045 dwc2_complete_non_periodic_xfer(hsotg, chan, chnum, qtd, dwc2_hc_xfercomp_intr()
1050 urb_xfer_done = dwc2_update_urb_state(hsotg, chan, chnum, urb, dwc2_hc_xfercomp_intr()
1064 dwc2_hcd_save_data_toggle(hsotg, chan, chnum, qtd); dwc2_hc_xfercomp_intr()
1065 dwc2_complete_periodic_xfer(hsotg, chan, chnum, qtd, dwc2_hc_xfercomp_intr()
1072 halt_status = dwc2_update_isoc_urb_state(hsotg, chan, dwc2_hc_xfercomp_intr()
1074 dwc2_complete_periodic_xfer(hsotg, chan, chnum, qtd, dwc2_hc_xfercomp_intr()
1088 struct dwc2_host_chan *chan, int chnum, dwc2_hc_stall_intr()
1098 dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum, dwc2_hc_stall_intr()
1121 chan->qh->data_toggle = 0; dwc2_hc_stall_intr()
1125 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_STALL); dwc2_hc_stall_intr()
1138 struct dwc2_host_chan *chan, int chnum, dwc2_update_urb_state_abn()
1143 u32 xfer_length = dwc2_get_actual_xfer_length(hsotg, chan, chnum, dwc2_update_urb_state_abn()
1153 if (chan->align_buf && xfer_length && chan->ep_is_in) { dwc2_update_urb_state_abn()
1155 memcpy(urb->buf + urb->actual_length, chan->qh->dw_align_buf, dwc2_update_urb_state_abn()
1163 __func__, (chan->ep_is_in ? "IN" : "OUT"), chnum); dwc2_update_urb_state_abn()
1164 dev_vdbg(hsotg->dev, " chan->start_pkt_count %d\n", dwc2_update_urb_state_abn()
1165 chan->start_pkt_count); dwc2_update_urb_state_abn()
1168 dev_vdbg(hsotg->dev, " chan->max_packet %d\n", chan->max_packet); dwc2_update_urb_state_abn()
1182 struct dwc2_host_chan *chan, int chnum, dwc2_hc_nak_intr()
1185 if (dbg_hc(chan)) dwc2_hc_nak_intr()
1193 if (chan->do_split) { dwc2_hc_nak_intr()
1194 if (chan->complete_split) dwc2_hc_nak_intr()
1197 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_NAK); dwc2_hc_nak_intr()
1204 if (hsotg->core_params->dma_enable > 0 && chan->ep_is_in) { dwc2_hc_nak_intr()
1222 if (!chan->qh->ping_state) { dwc2_hc_nak_intr()
1223 dwc2_update_urb_state_abn(hsotg, chan, chnum, qtd->urb, dwc2_hc_nak_intr()
1225 dwc2_hcd_save_data_toggle(hsotg, chan, chnum, qtd); dwc2_hc_nak_intr()
1227 if (chan->speed == USB_SPEED_HIGH) dwc2_hc_nak_intr()
1228 chan->qh->ping_state = 1; dwc2_hc_nak_intr()
1236 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_NAK); dwc2_hc_nak_intr()
1240 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_NAK); dwc2_hc_nak_intr()
1258 struct dwc2_host_chan *chan, int chnum, dwc2_hc_ack_intr()
1263 if (dbg_hc(chan)) dwc2_hc_ack_intr()
1267 if (chan->do_split) { dwc2_hc_ack_intr()
1269 if (!chan->ep_is_in && dwc2_hc_ack_intr()
1270 chan->data_pid_start != DWC2_HC_PID_SETUP) dwc2_hc_ack_intr()
1271 qtd->ssplit_out_xfer_count = chan->xfer_len; dwc2_hc_ack_intr()
1273 if (chan->ep_type != USB_ENDPOINT_XFER_ISOC || chan->ep_is_in) { dwc2_hc_ack_intr()
1275 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_ACK); dwc2_hc_ack_intr()
1278 switch (chan->xact_pos) { dwc2_hc_ack_intr()
1309 if (chan->qh->ping_state) { dwc2_hc_ack_intr()
1310 chan->qh->ping_state = 0; dwc2_hc_ack_intr()
1318 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_ACK); dwc2_hc_ack_intr()
1337 struct dwc2_host_chan *chan, int chnum, dwc2_hc_nyet_intr()
1340 if (dbg_hc(chan)) dwc2_hc_nyet_intr()
1348 if (chan->do_split && chan->complete_split) { dwc2_hc_nyet_intr()
1349 if (chan->ep_is_in && chan->ep_type == USB_ENDPOINT_XFER_ISOC && dwc2_hc_nyet_intr()
1357 dwc2_release_channel(hsotg, chan, qtd, dwc2_hc_nyet_intr()
1360 dwc2_release_channel(hsotg, chan, qtd, dwc2_hc_nyet_intr()
1366 if (chan->ep_type == USB_ENDPOINT_XFER_INT || dwc2_hc_nyet_intr()
1367 chan->ep_type == USB_ENDPOINT_XFER_ISOC) { dwc2_hc_nyet_intr()
1371 dwc2_full_frame_num(chan->qh->sched_frame)) { dwc2_hc_nyet_intr()
1387 dwc2_halt_channel(hsotg, chan, qtd, dwc2_hc_nyet_intr()
1394 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_NYET); dwc2_hc_nyet_intr()
1398 chan->qh->ping_state = 1; dwc2_hc_nyet_intr()
1401 dwc2_update_urb_state_abn(hsotg, chan, chnum, qtd->urb, qtd, dwc2_hc_nyet_intr()
1403 dwc2_hcd_save_data_toggle(hsotg, chan, chnum, qtd); dwc2_hc_nyet_intr()
1409 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_NYET); dwc2_hc_nyet_intr()
1420 struct dwc2_host_chan *chan, int chnum, dwc2_hc_babble_intr()
1426 dwc2_hc_handle_tt_clear(hsotg, chan, qtd); dwc2_hc_babble_intr()
1429 dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum, dwc2_hc_babble_intr()
1434 if (chan->ep_type != USB_ENDPOINT_XFER_ISOC) { dwc2_hc_babble_intr()
1436 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_BABBLE_ERR); dwc2_hc_babble_intr()
1440 halt_status = dwc2_update_isoc_urb_state(hsotg, chan, chnum, dwc2_hc_babble_intr()
1442 dwc2_halt_channel(hsotg, chan, qtd, halt_status); dwc2_hc_babble_intr()
1454 struct dwc2_host_chan *chan, int chnum, dwc2_hc_ahberr_intr()
1470 dwc2_hc_handle_tt_clear(hsotg, chan, qtd); dwc2_hc_ahberr_intr()
1506 switch (chan->speed) { dwc2_hc_ahberr_intr()
1534 dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum, dwc2_hc_ahberr_intr()
1546 dwc2_hc_halt(hsotg, chan, DWC2_HC_XFER_AHB_ERR); dwc2_hc_ahberr_intr()
1557 struct dwc2_host_chan *chan, int chnum, dwc2_hc_xacterr_intr()
1563 dwc2_hc_handle_tt_clear(hsotg, chan, qtd); dwc2_hc_xacterr_intr()
1566 dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum, dwc2_hc_xacterr_intr()
1575 if (!chan->qh->ping_state) { dwc2_hc_xacterr_intr()
1577 dwc2_update_urb_state_abn(hsotg, chan, chnum, qtd->urb, dwc2_hc_xacterr_intr()
1579 dwc2_hcd_save_data_toggle(hsotg, chan, chnum, qtd); dwc2_hc_xacterr_intr()
1580 if (!chan->ep_is_in && chan->speed == USB_SPEED_HIGH) dwc2_hc_xacterr_intr()
1581 chan->qh->ping_state = 1; dwc2_hc_xacterr_intr()
1588 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_XACT_ERR); dwc2_hc_xacterr_intr()
1592 if (chan->do_split && chan->complete_split) dwc2_hc_xacterr_intr()
1594 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_XACT_ERR); dwc2_hc_xacterr_intr()
1600 halt_status = dwc2_update_isoc_urb_state(hsotg, chan, dwc2_hc_xacterr_intr()
1602 dwc2_halt_channel(hsotg, chan, qtd, halt_status); dwc2_hc_xacterr_intr()
1616 struct dwc2_host_chan *chan, int chnum, dwc2_hc_frmovrun_intr()
1621 if (dbg_hc(chan)) dwc2_hc_frmovrun_intr()
1625 dwc2_hc_handle_tt_clear(hsotg, chan, qtd); dwc2_hc_frmovrun_intr()
1632 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_FRAME_OVERRUN); dwc2_hc_frmovrun_intr()
1635 halt_status = dwc2_update_isoc_urb_state(hsotg, chan, chnum, dwc2_hc_frmovrun_intr()
1637 dwc2_halt_channel(hsotg, chan, qtd, halt_status); dwc2_hc_frmovrun_intr()
1649 struct dwc2_host_chan *chan, int chnum, dwc2_hc_datatglerr_intr()
1655 if (chan->ep_is_in) dwc2_hc_datatglerr_intr()
1662 dwc2_hc_handle_tt_clear(hsotg, chan, qtd); dwc2_hc_datatglerr_intr()
1674 struct dwc2_host_chan *chan, int chnum, dwc2_halt_status_ok()
1683 if (chan->halt_status == DWC2_HC_XFER_NO_HALT_STATUS) { dwc2_halt_status_ok()
1693 "%s: chan->halt_status DWC2_HC_XFER_NO_HALT_STATUS,\n", dwc2_halt_status_ok()
1700 chan->hcint, hcintmsk, hcsplt); dwc2_halt_status_ok()
1720 chan->halt_pending = 0; dwc2_halt_status_ok()
1721 dwc2_halt_channel(hsotg, chan, qtd, chan->halt_status); dwc2_halt_status_ok()
1734 struct dwc2_host_chan *chan, int chnum, dwc2_hc_chhltd_intr_dma()
1740 if (dbg_hc(chan)) dwc2_hc_chhltd_intr_dma()
1750 if (chan->speed == USB_SPEED_HIGH && !chan->ep_is_in && dwc2_hc_chhltd_intr_dma()
1751 (chan->ep_type == USB_ENDPOINT_XFER_CONTROL || dwc2_hc_chhltd_intr_dma()
1752 chan->ep_type == USB_ENDPOINT_XFER_BULK)) { dwc2_hc_chhltd_intr_dma()
1757 if (chan->halt_status == DWC2_HC_XFER_URB_DEQUEUE || dwc2_hc_chhltd_intr_dma()
1758 (chan->halt_status == DWC2_HC_XFER_AHB_ERR && dwc2_hc_chhltd_intr_dma()
1761 dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum, dwc2_hc_chhltd_intr_dma()
1762 chan->halt_status); dwc2_hc_chhltd_intr_dma()
1770 dwc2_release_channel(hsotg, chan, qtd, dwc2_hc_chhltd_intr_dma()
1771 chan->halt_status); dwc2_hc_chhltd_intr_dma()
1777 if (chan->hcint & HCINTMSK_XFERCOMPL) { dwc2_hc_chhltd_intr_dma()
1785 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC && !chan->ep_is_in) dwc2_hc_chhltd_intr_dma()
1786 dwc2_hc_ack_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1787 dwc2_hc_xfercomp_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1788 } else if (chan->hcint & HCINTMSK_STALL) { dwc2_hc_chhltd_intr_dma()
1789 dwc2_hc_stall_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1790 } else if ((chan->hcint & HCINTMSK_XACTERR) && dwc2_hc_chhltd_intr_dma()
1793 if (chan->hcint & dwc2_hc_chhltd_intr_dma()
1809 dwc2_hc_xacterr_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1810 } else if ((chan->hcint & HCINTMSK_XCS_XACT) && dwc2_hc_chhltd_intr_dma()
1812 dwc2_hc_xacterr_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1813 } else if ((chan->hcint & HCINTMSK_AHBERR) && dwc2_hc_chhltd_intr_dma()
1815 dwc2_hc_ahberr_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1816 } else if (chan->hcint & HCINTMSK_BBLERR) { dwc2_hc_chhltd_intr_dma()
1817 dwc2_hc_babble_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1818 } else if (chan->hcint & HCINTMSK_FRMOVRUN) { dwc2_hc_chhltd_intr_dma()
1819 dwc2_hc_frmovrun_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1821 if (chan->hcint & HCINTMSK_NYET) { dwc2_hc_chhltd_intr_dma()
1828 dwc2_hc_nyet_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1829 } else if ((chan->hcint & HCINTMSK_NAK) && dwc2_hc_chhltd_intr_dma()
1838 dwc2_hc_nak_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1839 } else if ((chan->hcint & HCINTMSK_ACK) && dwc2_hc_chhltd_intr_dma()
1848 dwc2_hc_ack_intr(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1850 if (chan->ep_type == USB_ENDPOINT_XFER_INT || dwc2_hc_chhltd_intr_dma()
1851 chan->ep_type == USB_ENDPOINT_XFER_ISOC) { dwc2_hc_chhltd_intr_dma()
1861 dwc2_halt_channel(hsotg, chan, qtd, dwc2_hc_chhltd_intr_dma()
1869 chan->hcint, dwc2_hc_chhltd_intr_dma()
1877 chan->hcint); dwc2_hc_chhltd_intr_dma()
1881 dwc2_update_urb_state_abn(hsotg, chan, chnum, qtd->urb, dwc2_hc_chhltd_intr_dma()
1883 dwc2_hcd_save_data_toggle(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr_dma()
1884 dwc2_halt_channel(hsotg, chan, qtd, DWC2_HC_XFER_XACT_ERR); dwc2_hc_chhltd_intr_dma()
1900 struct dwc2_host_chan *chan, int chnum, dwc2_hc_chhltd_intr()
1903 if (dbg_hc(chan)) dwc2_hc_chhltd_intr()
1908 dwc2_hc_chhltd_intr_dma(hsotg, chan, chnum, qtd); dwc2_hc_chhltd_intr()
1910 if (!dwc2_halt_status_ok(hsotg, chan, chnum, qtd)) dwc2_hc_chhltd_intr()
1912 dwc2_release_channel(hsotg, chan, qtd, chan->halt_status); dwc2_hc_chhltd_intr()
1920 struct dwc2_host_chan *chan; dwc2_hc_n_intr() local
1923 chan = hsotg->hc_ptr_array[chnum]; dwc2_hc_n_intr()
1927 if (!chan) { dwc2_hc_n_intr()
1933 if (dbg_hc(chan)) { dwc2_hc_n_intr()
1942 chan->hcint = hcint; dwc2_hc_n_intr()
1950 if (chan->halt_status == DWC2_HC_XFER_URB_DEQUEUE) { dwc2_hc_n_intr()
1957 dwc2_hcd_complete_xfer_ddma(hsotg, chan, chnum, dwc2_hc_n_intr()
1958 chan->halt_status); dwc2_hc_n_intr()
1960 dwc2_release_channel(hsotg, chan, NULL, dwc2_hc_n_intr()
1961 chan->halt_status); dwc2_hc_n_intr()
1965 if (list_empty(&chan->qh->qtd_list)) { dwc2_hc_n_intr()
1974 chan->hcint, hcintmsk, hcint); dwc2_hc_n_intr()
1975 chan->halt_status = DWC2_HC_XFER_NO_HALT_STATUS; dwc2_hc_n_intr()
1977 chan->hcint = 0; dwc2_hc_n_intr()
1981 qtd = list_first_entry(&chan->qh->qtd_list, struct dwc2_qtd, dwc2_hc_n_intr()
1990 dwc2_hc_xfercomp_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
1999 dwc2_hc_chhltd_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
2001 dwc2_hc_ahberr_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
2003 dwc2_hc_stall_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
2005 dwc2_hc_nak_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
2007 dwc2_hc_ack_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
2009 dwc2_hc_nyet_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
2011 dwc2_hc_xacterr_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
2013 dwc2_hc_babble_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
2015 dwc2_hc_frmovrun_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
2017 dwc2_hc_datatglerr_intr(hsotg, chan, chnum, qtd); dwc2_hc_n_intr()
2019 chan->hcint = 0; dwc2_hc_n_intr()
86 dwc2_hc_handle_tt_clear(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, struct dwc2_qtd *qtd) dwc2_hc_handle_tt_clear() argument
400 dwc2_get_actual_xfer_length(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd, enum dwc2_halt_status halt_status, int *short_read) dwc2_get_actual_xfer_length() argument
448 dwc2_update_urb_state(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_hcd_urb *urb, struct dwc2_qtd *qtd) dwc2_update_urb_state() argument
505 dwc2_hcd_save_data_toggle(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hcd_save_data_toggle() argument
535 dwc2_update_isoc_urb_state( struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd, enum dwc2_halt_status halt_status) dwc2_update_isoc_urb_state() argument
677 dwc2_release_channel(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, struct dwc2_qtd *qtd, enum dwc2_halt_status halt_status) dwc2_release_channel() argument
776 dwc2_halt_channel(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, struct dwc2_qtd *qtd, enum dwc2_halt_status halt_status) dwc2_halt_channel() argument
836 dwc2_complete_non_periodic_xfer(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd, enum dwc2_halt_status halt_status) dwc2_complete_non_periodic_xfer() argument
886 dwc2_complete_periodic_xfer(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd, enum dwc2_halt_status halt_status) dwc2_complete_periodic_xfer() argument
903 dwc2_xfercomp_isoc_split_in(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_xfercomp_isoc_split_in() argument
955 dwc2_hc_xfercomp_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_xfercomp_intr() argument
1087 dwc2_hc_stall_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_stall_intr() argument
1137 dwc2_update_urb_state_abn(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_hcd_urb *urb, struct dwc2_qtd *qtd, enum dwc2_halt_status halt_status) dwc2_update_urb_state_abn() argument
1181 dwc2_hc_nak_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_nak_intr() argument
1257 dwc2_hc_ack_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_ack_intr() argument
1336 dwc2_hc_nyet_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_nyet_intr() argument
1419 dwc2_hc_babble_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_babble_intr() argument
1453 dwc2_hc_ahberr_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_ahberr_intr() argument
1556 dwc2_hc_xacterr_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_xacterr_intr() argument
1615 dwc2_hc_frmovrun_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_frmovrun_intr() argument
1648 dwc2_hc_datatglerr_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_datatglerr_intr() argument
1673 dwc2_halt_status_ok(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_halt_status_ok() argument
1733 dwc2_hc_chhltd_intr_dma(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_chhltd_intr_dma() argument
1899 dwc2_hc_chhltd_intr(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd) dwc2_hc_chhltd_intr() argument
H A Dcore.c772 struct dwc2_host_chan *chan) dwc2_hc_enable_slave_ints()
776 switch (chan->ep_type) { dwc2_hc_enable_slave_ints()
784 if (chan->ep_is_in) { dwc2_hc_enable_slave_ints()
789 if (chan->do_ping) dwc2_hc_enable_slave_ints()
793 if (chan->do_split) { dwc2_hc_enable_slave_ints()
795 if (chan->complete_split) dwc2_hc_enable_slave_ints()
801 if (chan->error_state) dwc2_hc_enable_slave_ints()
815 if (chan->ep_is_in) dwc2_hc_enable_slave_ints()
817 if (chan->error_state) dwc2_hc_enable_slave_ints()
819 if (chan->do_split) { dwc2_hc_enable_slave_ints()
820 if (chan->complete_split) dwc2_hc_enable_slave_ints()
834 if (chan->ep_is_in) { dwc2_hc_enable_slave_ints()
844 writel(hcintmsk, hsotg->regs + HCINTMSK(chan->hc_num)); dwc2_hc_enable_slave_ints()
845 if (dbg_hc(chan)) dwc2_hc_enable_slave_ints()
850 struct dwc2_host_chan *chan) dwc2_hc_enable_dma_ints()
859 if (dbg_hc(chan)) dwc2_hc_enable_dma_ints()
863 if (dbg_hc(chan)) dwc2_hc_enable_dma_ints()
865 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC) dwc2_hc_enable_dma_ints()
869 if (chan->error_state && !chan->do_split && dwc2_hc_enable_dma_ints()
870 chan->ep_type != USB_ENDPOINT_XFER_ISOC) { dwc2_hc_enable_dma_ints()
871 if (dbg_hc(chan)) dwc2_hc_enable_dma_ints()
874 if (chan->ep_is_in) { dwc2_hc_enable_dma_ints()
876 if (chan->ep_type != USB_ENDPOINT_XFER_INT) dwc2_hc_enable_dma_ints()
881 writel(hcintmsk, hsotg->regs + HCINTMSK(chan->hc_num)); dwc2_hc_enable_dma_ints()
882 if (dbg_hc(chan)) dwc2_hc_enable_dma_ints()
887 struct dwc2_host_chan *chan) dwc2_hc_enable_ints()
892 if (dbg_hc(chan)) dwc2_hc_enable_ints()
894 dwc2_hc_enable_dma_ints(hsotg, chan); dwc2_hc_enable_ints()
896 if (dbg_hc(chan)) dwc2_hc_enable_ints()
898 dwc2_hc_enable_slave_ints(hsotg, chan); dwc2_hc_enable_ints()
903 intmsk |= 1 << chan->hc_num; dwc2_hc_enable_ints()
905 if (dbg_hc(chan)) dwc2_hc_enable_ints()
912 if (dbg_hc(chan)) dwc2_hc_enable_ints()
921 * @chan: Information needed to initialize the host channel
923 * The HCCHARn register is set up with the characteristics specified in chan.
927 void dwc2_hc_init(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_hc_init() argument
929 u8 hc_num = chan->hc_num; dwc2_hc_init()
934 if (dbg_hc(chan)) dwc2_hc_init()
943 dwc2_hc_enable_ints(hsotg, chan); dwc2_hc_init()
949 hcchar = chan->dev_addr << HCCHAR_DEVADDR_SHIFT & HCCHAR_DEVADDR_MASK; dwc2_hc_init()
950 hcchar |= chan->ep_num << HCCHAR_EPNUM_SHIFT & HCCHAR_EPNUM_MASK; dwc2_hc_init()
951 if (chan->ep_is_in) dwc2_hc_init()
953 if (chan->speed == USB_SPEED_LOW) dwc2_hc_init()
955 hcchar |= chan->ep_type << HCCHAR_EPTYPE_SHIFT & HCCHAR_EPTYPE_MASK; dwc2_hc_init()
956 hcchar |= chan->max_packet << HCCHAR_MPS_SHIFT & HCCHAR_MPS_MASK; dwc2_hc_init()
958 if (dbg_hc(chan)) { dwc2_hc_init()
965 chan->dev_addr); dwc2_hc_init()
967 chan->ep_num); dwc2_hc_init()
969 chan->ep_is_in); dwc2_hc_init()
971 chan->speed == USB_SPEED_LOW); dwc2_hc_init()
973 chan->ep_type); dwc2_hc_init()
975 chan->max_packet); dwc2_hc_init()
979 if (chan->do_split) { dwc2_hc_init()
980 if (dbg_hc(chan)) dwc2_hc_init()
984 chan->complete_split ? "CSPLIT" : "SSPLIT"); dwc2_hc_init()
985 if (chan->complete_split) dwc2_hc_init()
987 hcsplt |= chan->xact_pos << HCSPLT_XACTPOS_SHIFT & dwc2_hc_init()
989 hcsplt |= chan->hub_addr << HCSPLT_HUBADDR_SHIFT & dwc2_hc_init()
991 hcsplt |= chan->hub_port << HCSPLT_PRTADDR_SHIFT & dwc2_hc_init()
993 if (dbg_hc(chan)) { dwc2_hc_init()
995 chan->complete_split); dwc2_hc_init()
997 chan->xact_pos); dwc2_hc_init()
999 chan->hub_addr); dwc2_hc_init()
1001 chan->hub_port); dwc2_hc_init()
1003 chan->ep_is_in); dwc2_hc_init()
1005 chan->max_packet); dwc2_hc_init()
1007 chan->xfer_len); dwc2_hc_init()
1018 * @chan: Host channel to halt
1042 void dwc2_hc_halt(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, dwc2_hc_halt() argument
1047 if (dbg_hc(chan)) dwc2_hc_halt()
1063 writel(hcintmsk, hsotg->regs + HCINTMSK(chan->hc_num)); dwc2_hc_halt()
1070 writel(~hcintmsk, hsotg->regs + HCINT(chan->hc_num)); dwc2_hc_halt()
1077 chan->halt_status = halt_status; dwc2_hc_halt()
1079 hcchar = readl(hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_halt()
1095 if (chan->halt_pending) { dwc2_hc_halt()
1102 "*** %s: Channel %d, chan->halt_pending already set ***\n", dwc2_hc_halt()
1103 __func__, chan->hc_num); dwc2_hc_halt()
1107 hcchar = readl(hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_halt()
1112 if (dbg_hc(chan)) dwc2_hc_halt()
1116 if (dbg_hc(chan)) dwc2_hc_halt()
1122 if (dbg_hc(chan)) dwc2_hc_halt()
1127 if (chan->ep_type == USB_ENDPOINT_XFER_CONTROL || dwc2_hc_halt()
1128 chan->ep_type == USB_ENDPOINT_XFER_BULK) { dwc2_hc_halt()
1147 if (dbg_hc(chan)) dwc2_hc_halt()
1151 writel(hcchar, hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_halt()
1152 chan->halt_status = halt_status; dwc2_hc_halt()
1155 if (dbg_hc(chan)) dwc2_hc_halt()
1157 chan->halt_pending = 1; dwc2_hc_halt()
1158 chan->halt_on_queue = 0; dwc2_hc_halt()
1160 if (dbg_hc(chan)) dwc2_hc_halt()
1162 chan->halt_on_queue = 1; dwc2_hc_halt()
1165 if (dbg_hc(chan)) { dwc2_hc_halt()
1167 chan->hc_num); dwc2_hc_halt()
1171 chan->halt_pending); dwc2_hc_halt()
1173 chan->halt_on_queue); dwc2_hc_halt()
1175 chan->halt_status); dwc2_hc_halt()
1183 * @chan: Identifies the host channel to clean up
1188 void dwc2_hc_cleanup(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_hc_cleanup() argument
1192 chan->xfer_started = 0; dwc2_hc_cleanup()
1198 writel(0, hsotg->regs + HCINTMSK(chan->hc_num)); dwc2_hc_cleanup()
1201 writel(hcintmsk, hsotg->regs + HCINT(chan->hc_num)); dwc2_hc_cleanup()
1209 * @chan: Identifies the host channel to set up and its properties
1215 struct dwc2_host_chan *chan, u32 *hcchar) dwc2_hc_set_even_odd_frame()
1217 if (chan->ep_type == USB_ENDPOINT_XFER_INT || dwc2_hc_set_even_odd_frame()
1218 chan->ep_type == USB_ENDPOINT_XFER_ISOC) { dwc2_hc_set_even_odd_frame()
1225 static void dwc2_set_pid_isoc(struct dwc2_host_chan *chan) dwc2_set_pid_isoc() argument
1228 if (chan->speed == USB_SPEED_HIGH) { dwc2_set_pid_isoc()
1229 if (chan->ep_is_in) { dwc2_set_pid_isoc()
1230 if (chan->multi_count == 1) dwc2_set_pid_isoc()
1231 chan->data_pid_start = DWC2_HC_PID_DATA0; dwc2_set_pid_isoc()
1232 else if (chan->multi_count == 2) dwc2_set_pid_isoc()
1233 chan->data_pid_start = DWC2_HC_PID_DATA1; dwc2_set_pid_isoc()
1235 chan->data_pid_start = DWC2_HC_PID_DATA2; dwc2_set_pid_isoc()
1237 if (chan->multi_count == 1) dwc2_set_pid_isoc()
1238 chan->data_pid_start = DWC2_HC_PID_DATA0; dwc2_set_pid_isoc()
1240 chan->data_pid_start = DWC2_HC_PID_MDATA; dwc2_set_pid_isoc()
1243 chan->data_pid_start = DWC2_HC_PID_DATA0; dwc2_set_pid_isoc()
1252 * @chan: Information needed to initialize the host channel
1258 * Upon return the xfer_buf and xfer_count fields in chan are incremented by
1262 struct dwc2_host_chan *chan) dwc2_hc_write_packet()
1269 u32 *data_buf = (u32 *)chan->xfer_buf; dwc2_hc_write_packet()
1271 if (dbg_hc(chan)) dwc2_hc_write_packet()
1274 data_fifo = (u32 __iomem *)(hsotg->regs + HCFIFO(chan->hc_num)); dwc2_hc_write_packet()
1276 remaining_count = chan->xfer_len - chan->xfer_count; dwc2_hc_write_packet()
1277 if (remaining_count > chan->max_packet) dwc2_hc_write_packet()
1278 byte_count = chan->max_packet; dwc2_hc_write_packet()
1297 chan->xfer_count += byte_count; dwc2_hc_write_packet()
1298 chan->xfer_buf += byte_count; dwc2_hc_write_packet()
1306 * @chan: Information needed to initialize the host channel. The xfer_len value
1335 struct dwc2_host_chan *chan) dwc2_hc_start_transfer()
1343 if (dbg_hc(chan)) dwc2_hc_start_transfer()
1346 if (chan->do_ping) { dwc2_hc_start_transfer()
1348 if (dbg_hc(chan)) dwc2_hc_start_transfer()
1350 dwc2_hc_do_ping(hsotg, chan); dwc2_hc_start_transfer()
1351 chan->xfer_started = 1; dwc2_hc_start_transfer()
1354 if (dbg_hc(chan)) dwc2_hc_start_transfer()
1360 if (chan->do_split) { dwc2_hc_start_transfer()
1361 if (dbg_hc(chan)) dwc2_hc_start_transfer()
1365 if (chan->complete_split && !chan->ep_is_in) dwc2_hc_start_transfer()
1370 chan->xfer_len = 0; dwc2_hc_start_transfer()
1371 else if (chan->ep_is_in || chan->xfer_len > chan->max_packet) dwc2_hc_start_transfer()
1372 chan->xfer_len = chan->max_packet; dwc2_hc_start_transfer()
1373 else if (!chan->ep_is_in && chan->xfer_len > 188) dwc2_hc_start_transfer()
1374 chan->xfer_len = 188; dwc2_hc_start_transfer()
1376 hctsiz |= chan->xfer_len << TSIZ_XFERSIZE_SHIFT & dwc2_hc_start_transfer()
1379 if (dbg_hc(chan)) dwc2_hc_start_transfer()
1385 if (chan->ep_type == USB_ENDPOINT_XFER_INT || dwc2_hc_start_transfer()
1386 chan->ep_type == USB_ENDPOINT_XFER_ISOC) { dwc2_hc_start_transfer()
1395 chan->multi_count * chan->max_packet; dwc2_hc_start_transfer()
1397 if (chan->xfer_len > max_periodic_len) dwc2_hc_start_transfer()
1398 chan->xfer_len = max_periodic_len; dwc2_hc_start_transfer()
1399 } else if (chan->xfer_len > max_hc_xfer_size) { dwc2_hc_start_transfer()
1404 chan->xfer_len = dwc2_hc_start_transfer()
1405 max_hc_xfer_size - chan->max_packet + 1; dwc2_hc_start_transfer()
1408 if (chan->xfer_len > 0) { dwc2_hc_start_transfer()
1409 num_packets = (chan->xfer_len + chan->max_packet - 1) / dwc2_hc_start_transfer()
1410 chan->max_packet; dwc2_hc_start_transfer()
1413 chan->xfer_len = num_packets * chan->max_packet; dwc2_hc_start_transfer()
1420 if (chan->ep_is_in) dwc2_hc_start_transfer()
1425 chan->xfer_len = num_packets * chan->max_packet; dwc2_hc_start_transfer()
1427 if (chan->ep_type == USB_ENDPOINT_XFER_INT || dwc2_hc_start_transfer()
1428 chan->ep_type == USB_ENDPOINT_XFER_ISOC) dwc2_hc_start_transfer()
1433 chan->multi_count = num_packets; dwc2_hc_start_transfer()
1435 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC) dwc2_hc_start_transfer()
1436 dwc2_set_pid_isoc(chan); dwc2_hc_start_transfer()
1438 hctsiz |= chan->xfer_len << TSIZ_XFERSIZE_SHIFT & dwc2_hc_start_transfer()
1442 chan->start_pkt_count = num_packets; dwc2_hc_start_transfer()
1444 hctsiz |= chan->data_pid_start << TSIZ_SC_MC_PID_SHIFT & dwc2_hc_start_transfer()
1446 writel(hctsiz, hsotg->regs + HCTSIZ(chan->hc_num)); dwc2_hc_start_transfer()
1447 if (dbg_hc(chan)) { dwc2_hc_start_transfer()
1449 hctsiz, chan->hc_num); dwc2_hc_start_transfer()
1452 chan->hc_num); dwc2_hc_start_transfer()
1467 if (chan->align_buf) { dwc2_hc_start_transfer()
1468 if (dbg_hc(chan)) dwc2_hc_start_transfer()
1470 dma_addr = chan->align_buf; dwc2_hc_start_transfer()
1472 dma_addr = chan->xfer_dma; dwc2_hc_start_transfer()
1474 writel((u32)dma_addr, hsotg->regs + HCDMA(chan->hc_num)); dwc2_hc_start_transfer()
1475 if (dbg_hc(chan)) dwc2_hc_start_transfer()
1477 (unsigned long)dma_addr, chan->hc_num); dwc2_hc_start_transfer()
1481 if (chan->do_split) { dwc2_hc_start_transfer()
1482 u32 hcsplt = readl(hsotg->regs + HCSPLT(chan->hc_num)); dwc2_hc_start_transfer()
1485 writel(hcsplt, hsotg->regs + HCSPLT(chan->hc_num)); dwc2_hc_start_transfer()
1488 hcchar = readl(hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_start_transfer()
1490 hcchar |= chan->multi_count << HCCHAR_MULTICNT_SHIFT & dwc2_hc_start_transfer()
1492 dwc2_hc_set_even_odd_frame(hsotg, chan, &hcchar); dwc2_hc_start_transfer()
1497 __func__, chan->hc_num, hcchar); dwc2_hc_start_transfer()
1503 if (dbg_hc(chan)) dwc2_hc_start_transfer()
1508 writel(hcchar, hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_start_transfer()
1509 if (dbg_hc(chan)) dwc2_hc_start_transfer()
1511 chan->hc_num); dwc2_hc_start_transfer()
1513 chan->xfer_started = 1; dwc2_hc_start_transfer()
1514 chan->requests++; dwc2_hc_start_transfer()
1517 !chan->ep_is_in && chan->xfer_len > 0) dwc2_hc_start_transfer()
1519 dwc2_hc_write_packet(hsotg, chan); dwc2_hc_start_transfer()
1527 * @chan: Information needed to initialize the host channel
1537 struct dwc2_host_chan *chan) dwc2_hc_start_transfer_ddma()
1543 if (chan->do_ping) dwc2_hc_start_transfer_ddma()
1546 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC) dwc2_hc_start_transfer_ddma()
1547 dwc2_set_pid_isoc(chan); dwc2_hc_start_transfer_ddma()
1550 hctsiz |= chan->data_pid_start << TSIZ_SC_MC_PID_SHIFT & dwc2_hc_start_transfer_ddma()
1554 hctsiz |= (chan->ntd - 1) << TSIZ_NTD_SHIFT & TSIZ_NTD_MASK; dwc2_hc_start_transfer_ddma()
1557 hctsiz |= chan->schinfo << TSIZ_SCHINFO_SHIFT & TSIZ_SCHINFO_MASK; dwc2_hc_start_transfer_ddma()
1559 if (dbg_hc(chan)) { dwc2_hc_start_transfer_ddma()
1561 chan->hc_num); dwc2_hc_start_transfer_ddma()
1563 chan->data_pid_start); dwc2_hc_start_transfer_ddma()
1564 dev_vdbg(hsotg->dev, " NTD: %d\n", chan->ntd - 1); dwc2_hc_start_transfer_ddma()
1567 writel(hctsiz, hsotg->regs + HCTSIZ(chan->hc_num)); dwc2_hc_start_transfer_ddma()
1569 hc_dma = (u32)chan->desc_list_addr & HCDMA_DMA_ADDR_MASK; dwc2_hc_start_transfer_ddma()
1573 writel(hc_dma, hsotg->regs + HCDMA(chan->hc_num)); dwc2_hc_start_transfer_ddma()
1574 if (dbg_hc(chan)) dwc2_hc_start_transfer_ddma()
1576 hc_dma, chan->hc_num); dwc2_hc_start_transfer_ddma()
1578 hcchar = readl(hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_start_transfer_ddma()
1580 hcchar |= chan->multi_count << HCCHAR_MULTICNT_SHIFT & dwc2_hc_start_transfer_ddma()
1586 __func__, chan->hc_num, hcchar); dwc2_hc_start_transfer_ddma()
1592 if (dbg_hc(chan)) dwc2_hc_start_transfer_ddma()
1597 writel(hcchar, hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_start_transfer_ddma()
1598 if (dbg_hc(chan)) dwc2_hc_start_transfer_ddma()
1600 chan->hc_num); dwc2_hc_start_transfer_ddma()
1602 chan->xfer_started = 1; dwc2_hc_start_transfer_ddma()
1603 chan->requests++; dwc2_hc_start_transfer_ddma()
1611 * @chan: Information needed to initialize the host channel
1627 struct dwc2_host_chan *chan) dwc2_hc_continue_transfer()
1629 if (dbg_hc(chan)) dwc2_hc_continue_transfer()
1631 chan->hc_num); dwc2_hc_continue_transfer()
1633 if (chan->do_split) dwc2_hc_continue_transfer()
1637 if (chan->data_pid_start == DWC2_HC_PID_SETUP) dwc2_hc_continue_transfer()
1641 if (chan->ep_is_in) { dwc2_hc_continue_transfer()
1654 u32 hcchar = readl(hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_continue_transfer()
1656 dwc2_hc_set_even_odd_frame(hsotg, chan, &hcchar); dwc2_hc_continue_transfer()
1659 if (dbg_hc(chan)) dwc2_hc_continue_transfer()
1662 writel(hcchar, hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_continue_transfer()
1663 chan->requests++; dwc2_hc_continue_transfer()
1669 if (chan->xfer_count < chan->xfer_len) { dwc2_hc_continue_transfer()
1670 if (chan->ep_type == USB_ENDPOINT_XFER_INT || dwc2_hc_continue_transfer()
1671 chan->ep_type == USB_ENDPOINT_XFER_ISOC) { dwc2_hc_continue_transfer()
1673 HCCHAR(chan->hc_num)); dwc2_hc_continue_transfer()
1675 dwc2_hc_set_even_odd_frame(hsotg, chan, dwc2_hc_continue_transfer()
1680 dwc2_hc_write_packet(hsotg, chan); dwc2_hc_continue_transfer()
1681 chan->requests++; dwc2_hc_continue_transfer()
1692 * @chan: Information needed to initialize the host channel
1697 void dwc2_hc_do_ping(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_hc_do_ping() argument
1702 if (dbg_hc(chan)) dwc2_hc_do_ping()
1704 chan->hc_num); dwc2_hc_do_ping()
1709 writel(hctsiz, hsotg->regs + HCTSIZ(chan->hc_num)); dwc2_hc_do_ping()
1711 hcchar = readl(hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_do_ping()
1714 writel(hcchar, hsotg->regs + HCCHAR(chan->hc_num)); dwc2_hc_do_ping()
771 dwc2_hc_enable_slave_ints(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_hc_enable_slave_ints() argument
849 dwc2_hc_enable_dma_ints(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_hc_enable_dma_ints() argument
886 dwc2_hc_enable_ints(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_hc_enable_ints() argument
1214 dwc2_hc_set_even_odd_frame(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, u32 *hcchar) dwc2_hc_set_even_odd_frame() argument
1261 dwc2_hc_write_packet(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_hc_write_packet() argument
1334 dwc2_hc_start_transfer(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_hc_start_transfer() argument
1536 dwc2_hc_start_transfer_ddma(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_hc_start_transfer_ddma() argument
1626 dwc2_hc_continue_transfer(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_hc_continue_transfer() argument
H A Dhcd_ddma.c217 struct dwc2_host_chan *chan; dwc2_update_frame_list() local
236 chan = qh->channel; dwc2_update_frame_list()
246 hsotg->frame_list[j] |= 1 << chan->hc_num; dwc2_update_frame_list()
248 hsotg->frame_list[j] &= ~(1 << chan->hc_num); dwc2_update_frame_list()
255 chan->schinfo = 0; dwc2_update_frame_list()
256 if (chan->speed == USB_SPEED_HIGH && qh->interval) { dwc2_update_frame_list()
261 chan->schinfo |= j; dwc2_update_frame_list()
265 chan->schinfo = 0xff; dwc2_update_frame_list()
272 struct dwc2_host_chan *chan = qh->channel; dwc2_release_channel_ddma() local
287 if (chan->qh) { dwc2_release_channel_ddma()
288 if (!list_empty(&chan->hc_list_entry)) dwc2_release_channel_ddma()
289 list_del(&chan->hc_list_entry); dwc2_release_channel_ddma()
290 dwc2_hc_cleanup(hsotg, chan); dwc2_release_channel_ddma()
291 list_add_tail(&chan->hc_list_entry, &hsotg->free_hc_list); dwc2_release_channel_ddma()
292 chan->qh = NULL; dwc2_release_channel_ddma()
617 struct dwc2_host_chan *chan, dwc2_fill_host_dma_desc()
622 int len = chan->xfer_len; dwc2_fill_host_dma_desc()
624 if (len > MAX_DMA_DESC_SIZE - (chan->max_packet - 1)) dwc2_fill_host_dma_desc()
625 len = MAX_DMA_DESC_SIZE - (chan->max_packet - 1); dwc2_fill_host_dma_desc()
627 if (chan->ep_is_in) { dwc2_fill_host_dma_desc()
630 if (len > 0 && chan->max_packet) dwc2_fill_host_dma_desc()
631 num_packets = (len + chan->max_packet - 1) dwc2_fill_host_dma_desc()
632 / chan->max_packet; dwc2_fill_host_dma_desc()
638 len = num_packets * chan->max_packet; dwc2_fill_host_dma_desc()
648 dma_desc->buf = (u32)chan->xfer_dma; dwc2_fill_host_dma_desc()
654 if (len > chan->xfer_len) { dwc2_fill_host_dma_desc()
655 chan->xfer_len = 0; dwc2_fill_host_dma_desc()
657 chan->xfer_dma += len; dwc2_fill_host_dma_desc()
658 chan->xfer_len -= len; dwc2_fill_host_dma_desc()
666 struct dwc2_host_chan *chan = qh->channel; dwc2_init_non_isoc_dma_desc() local
670 (unsigned long)chan->xfer_dma, chan->xfer_len); dwc2_init_non_isoc_dma_desc()
673 * Start with chan->xfer_dma initialized in assign_and_init_hc(), then dwc2_init_non_isoc_dma_desc()
684 chan->xfer_dma = qtd->urb->dma + dwc2_init_non_isoc_dma_desc()
686 chan->xfer_len = qtd->urb->length - dwc2_init_non_isoc_dma_desc()
689 (unsigned long)chan->xfer_dma, chan->xfer_len); dwc2_init_non_isoc_dma_desc()
701 dwc2_fill_host_dma_desc(hsotg, chan, qtd, qh, n_desc); dwc2_init_non_isoc_dma_desc()
709 } while (chan->xfer_len > 0 && dwc2_init_non_isoc_dma_desc()
730 chan->ntd = n_desc; dwc2_init_non_isoc_dma_desc()
756 struct dwc2_host_chan *chan = qh->channel; dwc2_hcd_start_xfer_ddma() local
759 switch (chan->ep_type) { dwc2_hcd_start_xfer_ddma()
763 dwc2_hc_start_transfer_ddma(hsotg, chan); dwc2_hcd_start_xfer_ddma()
768 dwc2_hc_start_transfer_ddma(hsotg, chan); dwc2_hcd_start_xfer_ddma()
775 if (!chan->xfer_started) { dwc2_hcd_start_xfer_ddma()
783 chan->ntd = dwc2_max_desc_num(qh); dwc2_hcd_start_xfer_ddma()
786 dwc2_hc_start_transfer_ddma(hsotg, chan); dwc2_hcd_start_xfer_ddma()
799 struct dwc2_host_chan *chan, dwc2_cmpl_host_isoc_dma_desc()
813 if (chan->ep_is_in) dwc2_cmpl_host_isoc_dma_desc()
846 if (chan->halt_status == DWC2_HC_XFER_URB_DEQUEUE) dwc2_cmpl_host_isoc_dma_desc()
861 struct dwc2_host_chan *chan, dwc2_complete_isoc_xfer_ddma()
870 qh = chan->qh; dwc2_complete_isoc_xfer_ddma()
873 if (chan->halt_status == DWC2_HC_XFER_URB_DEQUEUE) { dwc2_complete_isoc_xfer_ddma()
915 rc = dwc2_cmpl_host_isoc_dma_desc(hsotg, chan, qtd, qh, dwc2_complete_isoc_xfer_ddma()
920 chan->speed); dwc2_complete_isoc_xfer_ddma()
933 struct dwc2_host_chan *chan, dwc2_update_non_isoc_urb_state_ddma()
942 if (chan->ep_is_in) dwc2_update_non_isoc_urb_state_ddma()
980 chan->hc_num); dwc2_update_non_isoc_urb_state_ddma()
984 if (chan->ep_type == USB_ENDPOINT_XFER_CONTROL) { dwc2_update_non_isoc_urb_state_ddma()
1015 struct dwc2_host_chan *chan, dwc2_process_non_isoc_desc()
1021 struct dwc2_qh *qh = chan->qh; dwc2_process_non_isoc_desc()
1037 failed = dwc2_update_non_isoc_urb_state_ddma(hsotg, chan, qtd, dma_desc, dwc2_process_non_isoc_desc()
1068 dwc2_hcd_save_data_toggle(hsotg, chan, chnum, dwc2_process_non_isoc_desc()
1081 struct dwc2_host_chan *chan, dwc2_complete_non_isoc_xfer_ddma()
1086 struct dwc2_qh *qh = chan->qh; dwc2_complete_non_isoc_xfer_ddma()
1091 if (chan->halt_status == DWC2_HC_XFER_URB_DEQUEUE) { dwc2_complete_non_isoc_xfer_ddma()
1104 if (dwc2_process_non_isoc_desc(hsotg, chan, chnum, qtd, dwc2_complete_non_isoc_xfer_ddma()
1122 dwc2_hcd_save_data_toggle(hsotg, chan, chnum, qtd); dwc2_complete_non_isoc_xfer_ddma()
1126 if (chan->hcint & HCINTMSK_NYET) { dwc2_complete_non_isoc_xfer_ddma()
1143 * @chan: Host channel the transfer is completed on
1155 struct dwc2_host_chan *chan, int chnum, dwc2_hcd_complete_xfer_ddma()
1158 struct dwc2_qh *qh = chan->qh; dwc2_hcd_complete_xfer_ddma()
1162 if (chan->ep_type == USB_ENDPOINT_XFER_ISOC) { dwc2_hcd_complete_xfer_ddma()
1163 dwc2_complete_isoc_xfer_ddma(hsotg, chan, halt_status); dwc2_hcd_complete_xfer_ddma()
1170 dwc2_hc_halt(hsotg, chan, halt_status); dwc2_hcd_complete_xfer_ddma()
1188 dwc2_complete_non_isoc_xfer_ddma(hsotg, chan, chnum, dwc2_hcd_complete_xfer_ddma()
616 dwc2_fill_host_dma_desc(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, struct dwc2_qtd *qtd, struct dwc2_qh *qh, int n_desc) dwc2_fill_host_dma_desc() argument
798 dwc2_cmpl_host_isoc_dma_desc(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, struct dwc2_qtd *qtd, struct dwc2_qh *qh, u16 idx) dwc2_cmpl_host_isoc_dma_desc() argument
860 dwc2_complete_isoc_xfer_ddma(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, enum dwc2_halt_status halt_status) dwc2_complete_isoc_xfer_ddma() argument
932 dwc2_update_non_isoc_urb_state_ddma(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, struct dwc2_qtd *qtd, struct dwc2_hcd_dma_desc *dma_desc, enum dwc2_halt_status halt_status, u32 n_bytes, int *xfer_done) dwc2_update_non_isoc_urb_state_ddma() argument
1014 dwc2_process_non_isoc_desc(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, struct dwc2_qtd *qtd, int desc_num, enum dwc2_halt_status halt_status, int *xfer_done) dwc2_process_non_isoc_desc() argument
1080 dwc2_complete_non_isoc_xfer_ddma(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, enum dwc2_halt_status halt_status) dwc2_complete_non_isoc_xfer_ddma() argument
1154 dwc2_hcd_complete_xfer_ddma(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, int chnum, enum dwc2_halt_status halt_status) dwc2_hcd_complete_xfer_ddma() argument
H A Dhcd.c61 * @chan: Pointer to the channel to dump
69 struct dwc2_host_chan *chan) dwc2_dump_channel_info()
80 if (chan == NULL) dwc2_dump_channel_info()
83 hcchar = readl(hsotg->regs + HCCHAR(chan->hc_num)); dwc2_dump_channel_info()
84 hcsplt = readl(hsotg->regs + HCSPLT(chan->hc_num)); dwc2_dump_channel_info()
85 hctsiz = readl(hsotg->regs + HCTSIZ(chan->hc_num)); dwc2_dump_channel_info()
86 hc_dma = readl(hsotg->regs + HCDMA(chan->hc_num)); dwc2_dump_channel_info()
88 dev_dbg(hsotg->dev, " Assigned to channel %p:\n", chan); dwc2_dump_channel_info()
94 chan->dev_addr, chan->ep_num, chan->ep_is_in); dwc2_dump_channel_info()
95 dev_dbg(hsotg->dev, " ep_type: %d\n", chan->ep_type); dwc2_dump_channel_info()
96 dev_dbg(hsotg->dev, " max_packet: %d\n", chan->max_packet); dwc2_dump_channel_info()
97 dev_dbg(hsotg->dev, " data_pid_start: %d\n", chan->data_pid_start); dwc2_dump_channel_info()
98 dev_dbg(hsotg->dev, " xfer_started: %d\n", chan->xfer_started); dwc2_dump_channel_info()
99 dev_dbg(hsotg->dev, " halt_status: %d\n", chan->halt_status); dwc2_dump_channel_info()
100 dev_dbg(hsotg->dev, " xfer_buf: %p\n", chan->xfer_buf); dwc2_dump_channel_info()
102 (unsigned long)chan->xfer_dma); dwc2_dump_channel_info()
103 dev_dbg(hsotg->dev, " xfer_len: %d\n", chan->xfer_len); dwc2_dump_channel_info()
104 dev_dbg(hsotg->dev, " qh: %p\n", chan->qh); dwc2_dump_channel_info()
115 struct dwc2_host_chan *chan = hsotg->hc_ptr_array[i]; dwc2_dump_channel_info() local
117 dev_dbg(hsotg->dev, " %2d: %p\n", i, chan); dwc2_dump_channel_info()
559 struct dwc2_host_chan *chan, *chan_tmp; dwc2_hcd_reinit() local
578 list_for_each_entry_safe(chan, chan_tmp, &hsotg->free_hc_list, dwc2_hcd_reinit()
580 list_del_init(&chan->hc_list_entry); dwc2_hcd_reinit()
584 chan = hsotg->hc_ptr_array[i]; dwc2_hcd_reinit()
585 list_add_tail(&chan->hc_list_entry, &hsotg->free_hc_list); dwc2_hcd_reinit()
586 dwc2_hc_cleanup(hsotg, chan); dwc2_hcd_reinit()
594 struct dwc2_host_chan *chan, dwc2_hc_init_split()
599 chan->do_split = 1; dwc2_hc_init_split()
600 chan->xact_pos = qtd->isoc_split_pos; dwc2_hc_init_split()
601 chan->complete_split = qtd->complete_split; dwc2_hc_init_split()
603 chan->hub_addr = (u8)hub_addr; dwc2_hc_init_split()
604 chan->hub_port = (u8)hub_port; dwc2_hc_init_split()
608 struct dwc2_host_chan *chan, dwc2_hc_init_xfer()
616 chan->ep_type = USB_ENDPOINT_XFER_CONTROL; dwc2_hc_init_xfer()
621 chan->do_ping = 0; dwc2_hc_init_xfer()
622 chan->ep_is_in = 0; dwc2_hc_init_xfer()
623 chan->data_pid_start = DWC2_HC_PID_SETUP; dwc2_hc_init_xfer()
625 chan->xfer_dma = urb->setup_dma; dwc2_hc_init_xfer()
627 chan->xfer_buf = urb->setup_packet; dwc2_hc_init_xfer()
628 chan->xfer_len = 8; dwc2_hc_init_xfer()
634 chan->data_pid_start = qtd->data_toggle; dwc2_hc_init_xfer()
644 chan->ep_is_in = 1; dwc2_hc_init_xfer()
646 chan->ep_is_in = dwc2_hc_init_xfer()
648 if (chan->ep_is_in) dwc2_hc_init_xfer()
649 chan->do_ping = 0; dwc2_hc_init_xfer()
650 chan->data_pid_start = DWC2_HC_PID_DATA1; dwc2_hc_init_xfer()
651 chan->xfer_len = 0; dwc2_hc_init_xfer()
653 chan->xfer_dma = hsotg->status_buf_dma; dwc2_hc_init_xfer()
655 chan->xfer_buf = hsotg->status_buf; dwc2_hc_init_xfer()
662 chan->ep_type = USB_ENDPOINT_XFER_BULK; dwc2_hc_init_xfer()
666 chan->ep_type = USB_ENDPOINT_XFER_INT; dwc2_hc_init_xfer()
670 chan->ep_type = USB_ENDPOINT_XFER_ISOC; dwc2_hc_init_xfer()
678 chan->xfer_dma = urb->dma; dwc2_hc_init_xfer()
679 chan->xfer_dma += frame_desc->offset + dwc2_hc_init_xfer()
682 chan->xfer_buf = urb->buf; dwc2_hc_init_xfer()
683 chan->xfer_buf += frame_desc->offset + dwc2_hc_init_xfer()
687 chan->xfer_len = frame_desc->length - qtd->isoc_split_offset; dwc2_hc_init_xfer()
691 (chan->xfer_dma & 0x3)) dwc2_hc_init_xfer()
697 if (chan->xact_pos == DWC2_HCSPLT_XACTPOS_ALL) { dwc2_hc_init_xfer()
698 if (chan->xfer_len <= 188) dwc2_hc_init_xfer()
699 chan->xact_pos = DWC2_HCSPLT_XACTPOS_ALL; dwc2_hc_init_xfer()
701 chan->xact_pos = DWC2_HCSPLT_XACTPOS_BEGIN; dwc2_hc_init_xfer()
710 struct dwc2_host_chan *chan, dwc2_hc_setup_align_buf()
718 if (chan->ep_type != USB_ENDPOINT_XFER_ISOC) dwc2_hc_setup_align_buf()
732 if (chan->xfer_len) { dwc2_hc_setup_align_buf()
743 if (!chan->ep_is_in) dwc2_hc_setup_align_buf()
745 chan->xfer_len); dwc2_hc_setup_align_buf()
751 chan->align_buf = qh->dw_align_buf_dma; dwc2_hc_setup_align_buf()
766 struct dwc2_host_chan *chan; dwc2_assign_and_init_hc() local
784 chan = list_first_entry(&hsotg->free_hc_list, struct dwc2_host_chan, dwc2_assign_and_init_hc()
788 list_del_init(&chan->hc_list_entry); dwc2_assign_and_init_hc()
792 qh->channel = chan; dwc2_assign_and_init_hc()
799 chan->dev_addr = dwc2_hcd_get_dev_addr(&urb->pipe_info); dwc2_assign_and_init_hc()
800 chan->ep_num = dwc2_hcd_get_ep_num(&urb->pipe_info); dwc2_assign_and_init_hc()
801 chan->speed = qh->dev_speed; dwc2_assign_and_init_hc()
802 chan->max_packet = dwc2_max_packet(qh->maxp); dwc2_assign_and_init_hc()
804 chan->xfer_started = 0; dwc2_assign_and_init_hc()
805 chan->halt_status = DWC2_HC_XFER_NO_HALT_STATUS; dwc2_assign_and_init_hc()
806 chan->error_state = (qtd->error_count > 0); dwc2_assign_and_init_hc()
807 chan->halt_on_queue = 0; dwc2_assign_and_init_hc()
808 chan->halt_pending = 0; dwc2_assign_and_init_hc()
809 chan->requests = 0; dwc2_assign_and_init_hc()
818 chan->ep_is_in = (dwc2_hcd_is_pipe_in(&urb->pipe_info) != 0); dwc2_assign_and_init_hc()
819 if (chan->ep_is_in) dwc2_assign_and_init_hc()
820 chan->do_ping = 0; dwc2_assign_and_init_hc()
822 chan->do_ping = qh->ping_state; dwc2_assign_and_init_hc()
824 chan->data_pid_start = qh->data_toggle; dwc2_assign_and_init_hc()
825 chan->multi_count = 1; dwc2_assign_and_init_hc()
832 chan->xfer_dma = urb->dma + urb->actual_length; dwc2_assign_and_init_hc()
836 (chan->xfer_dma & 0x3)) dwc2_assign_and_init_hc()
839 chan->xfer_buf = (u8 *)urb->buf + urb->actual_length; dwc2_assign_and_init_hc()
842 chan->xfer_len = urb->length - urb->actual_length; dwc2_assign_and_init_hc()
843 chan->xfer_count = 0; dwc2_assign_and_init_hc()
847 dwc2_hc_init_split(hsotg, chan, qtd, urb); dwc2_assign_and_init_hc()
849 chan->do_split = 0; dwc2_assign_and_init_hc()
852 bufptr = dwc2_hc_init_xfer(hsotg, chan, qtd, bufptr); dwc2_assign_and_init_hc()
857 if (dwc2_hc_setup_align_buf(hsotg, qh, chan, urb, bufptr)) { dwc2_assign_and_init_hc()
862 chan->align_buf = 0; dwc2_assign_and_init_hc()
863 chan->multi_count = 0; dwc2_assign_and_init_hc()
864 list_add_tail(&chan->hc_list_entry, dwc2_assign_and_init_hc()
871 chan->align_buf = 0; dwc2_assign_and_init_hc()
874 if (chan->ep_type == USB_ENDPOINT_XFER_INT || dwc2_assign_and_init_hc()
875 chan->ep_type == USB_ENDPOINT_XFER_ISOC) dwc2_assign_and_init_hc()
880 chan->multi_count = dwc2_hb_mult(qh->maxp); dwc2_assign_and_init_hc()
883 chan->desc_list_addr = qh->desc_list_dma; dwc2_assign_and_init_hc()
885 dwc2_hc_init(hsotg, chan); dwc2_assign_and_init_hc()
886 chan->qh = qh; dwc2_assign_and_init_hc()
984 * @chan: Host channel descriptor associated with either a periodic or
1001 struct dwc2_host_chan *chan, dwc2_queue_transaction()
1008 if (!chan->xfer_started || dwc2_queue_transaction()
1009 chan->ep_type == USB_ENDPOINT_XFER_ISOC) { dwc2_queue_transaction()
1010 dwc2_hcd_start_xfer_ddma(hsotg, chan->qh); dwc2_queue_transaction()
1011 chan->qh->ping_state = 0; dwc2_queue_transaction()
1013 } else if (!chan->xfer_started) { dwc2_queue_transaction()
1014 dwc2_hc_start_transfer(hsotg, chan); dwc2_queue_transaction()
1015 chan->qh->ping_state = 0; dwc2_queue_transaction()
1017 } else if (chan->halt_pending) { dwc2_queue_transaction()
1019 } else if (chan->halt_on_queue) { dwc2_queue_transaction()
1020 dwc2_hc_halt(hsotg, chan, chan->halt_status); dwc2_queue_transaction()
1021 } else if (chan->do_ping) { dwc2_queue_transaction()
1022 if (!chan->xfer_started) dwc2_queue_transaction()
1023 dwc2_hc_start_transfer(hsotg, chan); dwc2_queue_transaction()
1024 } else if (!chan->ep_is_in || dwc2_queue_transaction()
1025 chan->data_pid_start == DWC2_HC_PID_SETUP) { dwc2_queue_transaction()
1026 if ((fifo_dwords_avail * 4) >= chan->max_packet) { dwc2_queue_transaction()
1027 if (!chan->xfer_started) { dwc2_queue_transaction()
1028 dwc2_hc_start_transfer(hsotg, chan); dwc2_queue_transaction()
1031 retval = dwc2_hc_continue_transfer(hsotg, chan); dwc2_queue_transaction()
1037 if (!chan->xfer_started) { dwc2_queue_transaction()
1038 dwc2_hc_start_transfer(hsotg, chan); dwc2_queue_transaction()
1041 retval = dwc2_hc_continue_transfer(hsotg, chan); dwc2_queue_transaction()
1883 struct dwc2_host_chan *chan; dwc2_hcd_dump_state() local
1899 chan = hsotg->hc_ptr_array[i]; dwc2_hcd_dump_state()
1903 chan->dev_addr, chan->ep_num, chan->ep_is_in); dwc2_hcd_dump_state()
1904 dev_dbg(hsotg->dev, " speed: %d\n", chan->speed); dwc2_hcd_dump_state()
1905 dev_dbg(hsotg->dev, " ep_type: %d\n", chan->ep_type); dwc2_hcd_dump_state()
1906 dev_dbg(hsotg->dev, " max_packet: %d\n", chan->max_packet); dwc2_hcd_dump_state()
1908 chan->data_pid_start); dwc2_hcd_dump_state()
1909 dev_dbg(hsotg->dev, " multi_count: %d\n", chan->multi_count); dwc2_hcd_dump_state()
1911 chan->xfer_started); dwc2_hcd_dump_state()
1912 dev_dbg(hsotg->dev, " xfer_buf: %p\n", chan->xfer_buf); dwc2_hcd_dump_state()
1914 (unsigned long)chan->xfer_dma); dwc2_hcd_dump_state()
1915 dev_dbg(hsotg->dev, " xfer_len: %d\n", chan->xfer_len); dwc2_hcd_dump_state()
1916 dev_dbg(hsotg->dev, " xfer_count: %d\n", chan->xfer_count); dwc2_hcd_dump_state()
1918 chan->halt_on_queue); dwc2_hcd_dump_state()
1920 chan->halt_pending); dwc2_hcd_dump_state()
1921 dev_dbg(hsotg->dev, " halt_status: %d\n", chan->halt_status); dwc2_hcd_dump_state()
1922 dev_dbg(hsotg->dev, " do_split: %d\n", chan->do_split); dwc2_hcd_dump_state()
1924 chan->complete_split); dwc2_hcd_dump_state()
1925 dev_dbg(hsotg->dev, " hub_addr: %d\n", chan->hub_addr); dwc2_hcd_dump_state()
1926 dev_dbg(hsotg->dev, " hub_port: %d\n", chan->hub_port); dwc2_hcd_dump_state()
1927 dev_dbg(hsotg->dev, " xact_pos: %d\n", chan->xact_pos); dwc2_hcd_dump_state()
1928 dev_dbg(hsotg->dev, " requests: %d\n", chan->requests); dwc2_hcd_dump_state()
1929 dev_dbg(hsotg->dev, " qh: %p\n", chan->qh); dwc2_hcd_dump_state()
1931 if (chan->xfer_started) { dwc2_hcd_dump_state()
1946 if (!(chan->xfer_started && chan->qh)) dwc2_hcd_dump_state()
1949 list_for_each_entry(qtd, &chan->qh->qtd_list, qtd_list_entry) { dwc2_hcd_dump_state()
2710 struct dwc2_host_chan *chan = hsotg->hc_ptr_array[i]; dwc2_hcd_free() local
2712 if (chan != NULL) { dwc2_hcd_free()
2713 dev_dbg(hsotg->dev, "HCD Free channel #%i, chan=%p\n", dwc2_hcd_free()
2714 i, chan); dwc2_hcd_free()
2716 kfree(chan); dwc2_hcd_free()
68 dwc2_dump_channel_info(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan) dwc2_dump_channel_info() argument
593 dwc2_hc_init_split(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, struct dwc2_qtd *qtd, struct dwc2_hcd_urb *urb) dwc2_hc_init_split() argument
607 dwc2_hc_init_xfer(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, struct dwc2_qtd *qtd, void *bufptr) dwc2_hc_init_xfer() argument
709 dwc2_hc_setup_align_buf(struct dwc2_hsotg *hsotg, struct dwc2_qh *qh, struct dwc2_host_chan *chan, struct dwc2_hcd_urb *urb, void *bufptr) dwc2_hc_setup_align_buf() argument
1000 dwc2_queue_transaction(struct dwc2_hsotg *hsotg, struct dwc2_host_chan *chan, u16 fifo_dwords_avail) dwc2_queue_transaction() argument
/linux-4.1.27/drivers/iio/adc/
H A Dxilinx-xadc-events.c33 const struct iio_chan_spec *chan; xadc_handle_event() local
39 chan = xadc_event_to_channel(indio_dev, event); xadc_handle_event()
41 if (chan->type == IIO_TEMP) { xadc_handle_event()
47 IIO_UNMOD_EVENT_CODE(chan->type, chan->channel, xadc_handle_event()
57 IIO_UNMOD_EVENT_CODE(chan->type, chan->channel, xadc_handle_event()
71 static unsigned xadc_get_threshold_offset(const struct iio_chan_spec *chan, xadc_get_threshold_offset() argument
76 if (chan->type == IIO_TEMP) { xadc_get_threshold_offset()
79 if (chan->channel < 2) xadc_get_threshold_offset()
80 offset = chan->channel + 1; xadc_get_threshold_offset()
82 offset = chan->channel + 6; xadc_get_threshold_offset()
91 static unsigned int xadc_get_alarm_mask(const struct iio_chan_spec *chan) xadc_get_alarm_mask() argument
93 if (chan->type == IIO_TEMP) { xadc_get_alarm_mask()
96 switch (chan->channel) { xadc_get_alarm_mask()
117 const struct iio_chan_spec *chan, enum iio_event_type type, xadc_read_event_config()
122 return (bool)(xadc->alarm_mask & xadc_get_alarm_mask(chan)); xadc_read_event_config()
126 const struct iio_chan_spec *chan, enum iio_event_type type, xadc_write_event_config()
129 unsigned int alarm = xadc_get_alarm_mask(chan); xadc_write_event_config()
165 const struct iio_chan_spec *chan, enum iio_event_type type, xadc_read_event_value()
169 unsigned int offset = xadc_get_threshold_offset(chan, dir); xadc_read_event_value()
189 const struct iio_chan_spec *chan, enum iio_event_type type, xadc_write_event_value()
193 unsigned int offset = xadc_get_threshold_offset(chan, dir); xadc_write_event_value()
216 if (chan->type == IIO_TEMP) { xadc_write_event_value()
116 xadc_read_event_config(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, enum iio_event_type type, enum iio_event_direction dir) xadc_read_event_config() argument
125 xadc_write_event_config(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, enum iio_event_type type, enum iio_event_direction dir, int state) xadc_write_event_config() argument
164 xadc_read_event_value(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, enum iio_event_type type, enum iio_event_direction dir, enum iio_event_info info, int *val, int *val2) xadc_read_event_value() argument
188 xadc_write_event_value(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, enum iio_event_type type, enum iio_event_direction dir, enum iio_event_info info, int val, int val2) xadc_write_event_value() argument
H A Dcc10001_adc.c193 struct iio_chan_spec const *chan) cc10001_adc_read_raw_voltage()
204 cc10001_adc_start(adc_dev, chan->channel); cc10001_adc_read_raw_voltage()
206 val = cc10001_adc_poll_done(indio_dev, chan->channel, delay_ns); cc10001_adc_read_raw_voltage()
214 struct iio_chan_spec const *chan, cc10001_adc_read_raw()
225 *val = cc10001_adc_read_raw_voltage(indio_dev, chan); cc10001_adc_read_raw()
238 *val2 = chan->scan_type.realbits; cc10001_adc_read_raw()
281 struct iio_chan_spec *chan = &chan_array[idx]; cc10001_adc_channel_init() local
283 chan->type = IIO_VOLTAGE; cc10001_adc_channel_init()
284 chan->indexed = 1; cc10001_adc_channel_init()
285 chan->channel = bit; cc10001_adc_channel_init()
286 chan->scan_index = idx; cc10001_adc_channel_init()
287 chan->scan_type.sign = 'u'; cc10001_adc_channel_init()
288 chan->scan_type.realbits = 10; cc10001_adc_channel_init()
289 chan->scan_type.storagebits = 16; cc10001_adc_channel_init()
290 chan->info_mask_shared_by_type = BIT(IIO_CHAN_INFO_SCALE); cc10001_adc_channel_init()
291 chan->info_mask_separate = BIT(IIO_CHAN_INFO_RAW); cc10001_adc_channel_init()
192 cc10001_adc_read_raw_voltage(struct iio_dev *indio_dev, struct iio_chan_spec const *chan) cc10001_adc_read_raw_voltage() argument
213 cc10001_adc_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long mask) cc10001_adc_read_raw() argument
/linux-4.1.27/drivers/media/pci/ngene/
H A Dngene-cards.c53 static int tuner_attach_stv6110(struct ngene_channel *chan) tuner_attach_stv6110() argument
57 chan->dev->card_info->fe_config[chan->number]; tuner_attach_stv6110()
59 chan->dev->card_info->tuner_config[chan->number]; tuner_attach_stv6110()
63 if (chan->number < 2) tuner_attach_stv6110()
64 i2c = &chan->dev->channel[0].i2c_adapter; tuner_attach_stv6110()
66 i2c = &chan->dev->channel[1].i2c_adapter; tuner_attach_stv6110()
68 ctl = dvb_attach(stv6110x_attach, chan->fe, tunerconf, i2c); tuner_attach_stv6110()
92 struct ngene_channel *chan = fe->sec_priv; drxk_gate_ctrl() local
96 down(&chan->dev->pll_mutex); drxk_gate_ctrl()
97 status = chan->gate_ctrl(fe, 1); drxk_gate_ctrl()
99 status = chan->gate_ctrl(fe, 0); drxk_gate_ctrl()
100 up(&chan->dev->pll_mutex); drxk_gate_ctrl()
105 static int tuner_attach_tda18271(struct ngene_channel *chan) tuner_attach_tda18271() argument
110 i2c = &chan->dev->channel[0].i2c_adapter; tuner_attach_tda18271()
111 if (chan->fe->ops.i2c_gate_ctrl) tuner_attach_tda18271()
112 chan->fe->ops.i2c_gate_ctrl(chan->fe, 1); tuner_attach_tda18271()
113 fe = dvb_attach(tda18271c2dd_attach, chan->fe, i2c, 0x60); tuner_attach_tda18271()
114 if (chan->fe->ops.i2c_gate_ctrl) tuner_attach_tda18271()
115 chan->fe->ops.i2c_gate_ctrl(chan->fe, 0); tuner_attach_tda18271()
124 static int tuner_attach_probe(struct ngene_channel *chan) tuner_attach_probe() argument
126 if (chan->demod_type == 0) tuner_attach_probe()
127 return tuner_attach_stv6110(chan); tuner_attach_probe()
128 if (chan->demod_type == 1) tuner_attach_probe()
129 return tuner_attach_tda18271(chan); tuner_attach_probe()
133 static int demod_attach_stv0900(struct ngene_channel *chan) demod_attach_stv0900() argument
137 chan->dev->card_info->fe_config[chan->number]; demod_attach_stv0900()
142 if (chan->number < 2) demod_attach_stv0900()
143 i2c = &chan->dev->channel[0].i2c_adapter; demod_attach_stv0900()
145 i2c = &chan->dev->channel[1].i2c_adapter; demod_attach_stv0900()
147 chan->fe = dvb_attach(stv090x_attach, feconf, i2c, demod_attach_stv0900()
148 (chan->number & 1) == 0 ? STV090x_DEMODULATOR_0 demod_attach_stv0900()
150 if (chan->fe == NULL) { demod_attach_stv0900()
157 chan->fe->analog_demod_priv = chan; demod_attach_stv0900()
159 if (!dvb_attach(lnbh24_attach, chan->fe, i2c, 0, demod_attach_stv0900()
160 0, chan->dev->card_info->lnb[chan->number])) { demod_attach_stv0900()
162 dvb_frontend_detach(chan->fe); demod_attach_stv0900()
163 chan->fe = NULL; demod_attach_stv0900()
172 struct ngene_channel *chan = fe->analog_demod_priv; cineS2_tuner_i2c_lock() local
175 down(&chan->dev->pll_mutex); cineS2_tuner_i2c_lock()
177 up(&chan->dev->pll_mutex); cineS2_tuner_i2c_lock()
215 static int demod_attach_drxk(struct ngene_channel *chan, demod_attach_drxk() argument
223 config.adr = 0x29 + (chan->number ^ 2); demod_attach_drxk()
225 chan->fe = dvb_attach(drxk_attach, &config, i2c); demod_attach_drxk()
226 if (!chan->fe) { demod_attach_drxk()
230 chan->fe->sec_priv = chan; demod_attach_drxk()
231 chan->gate_ctrl = chan->fe->ops.i2c_gate_ctrl; demod_attach_drxk()
232 chan->fe->ops.i2c_gate_ctrl = drxk_gate_ctrl; demod_attach_drxk()
236 static int cineS2_probe(struct ngene_channel *chan) cineS2_probe() argument
245 if (chan->number < 2) cineS2_probe()
246 i2c = &chan->dev->channel[0].i2c_adapter; cineS2_probe()
248 i2c = &chan->dev->channel[1].i2c_adapter; cineS2_probe()
250 if (port_has_stv0900(i2c, chan->number)) { cineS2_probe()
251 chan->demod_type = 0; cineS2_probe()
252 fe_conf = chan->dev->card_info->fe_config[chan->number]; cineS2_probe()
254 rc = demod_attach_stv0900(chan); cineS2_probe()
255 if (rc < 0 || chan->number < 2) cineS2_probe()
262 switch (chan->number) { cineS2_probe()
279 } else if (port_has_drxk(i2c, chan->number^2)) { cineS2_probe()
280 chan->demod_type = 1; cineS2_probe()
281 demod_attach_drxk(chan, i2c); cineS2_probe()
283 printk(KERN_ERR "No demod found on chan %d\n", chan->number); cineS2_probe()
304 static int demod_attach_lg330x(struct ngene_channel *chan) demod_attach_lg330x() argument
306 chan->fe = dvb_attach(lgdt330x_attach, &aver_m780, &chan->i2c_adapter); demod_attach_lg330x()
307 if (chan->fe == NULL) { demod_attach_lg330x()
312 dvb_attach(mt2131_attach, chan->fe, &chan->i2c_adapter, demod_attach_lg330x()
315 return (chan->fe) ? 0 : -ENODEV; demod_attach_lg330x()
318 static int demod_attach_drxd(struct ngene_channel *chan) demod_attach_drxd() argument
322 feconf = chan->dev->card_info->fe_config[chan->number]; demod_attach_drxd()
324 chan->fe = dvb_attach(drxd_attach, feconf, chan, demod_attach_drxd()
325 &chan->i2c_adapter, &chan->dev->pci_dev->dev); demod_attach_drxd()
326 if (!chan->fe) { demod_attach_drxd()
333 static int tuner_attach_dtt7520x(struct ngene_channel *chan) tuner_attach_dtt7520x() argument
337 feconf = chan->dev->card_info->fe_config[chan->number]; tuner_attach_dtt7520x()
339 if (!dvb_attach(dvb_pll_attach, chan->fe, feconf->pll_address, tuner_attach_dtt7520x()
340 &chan->i2c_adapter, tuner_attach_dtt7520x()
538 struct ngene_channel *chan = priv; osc_deviation() local
539 struct i2c_adapter *adap = &chan->i2c_adapter; osc_deviation()
546 eeprom_write_ushort(adap, 0x1000 + chan->number, data); osc_deviation()
548 if (eeprom_read_ushort(adap, 0x1000 + chan->number, &data)) osc_deviation()
H A Dngene-core.c92 struct ngene_channel *chan = (struct ngene_channel *)data; demux_tasklet() local
93 struct SBufferHeader *Cur = chan->nextBuffer; demux_tasklet()
95 spin_lock_irq(&chan->state_lock); demux_tasklet()
98 if (chan->mode & NGENE_IO_TSOUT) { demux_tasklet()
99 u32 Flags = chan->DataFormatFlags; demux_tasklet()
102 if (chan->pBufferExchange) { demux_tasklet()
103 if (!chan->pBufferExchange(chan, demux_tasklet()
105 chan->Capture1Length, demux_tasklet()
119 if (chan->HWState == HWSTATE_RUN) { demux_tasklet()
128 chan->HWState = HWSTATE_RUN; demux_tasklet()
132 if (chan->HWState == HWSTATE_RUN) { demux_tasklet()
137 if (chan->AudioDTOUpdated) { demux_tasklet()
140 chan->AudioDTOValue); demux_tasklet()
142 chan->AudioDTOValue; demux_tasklet()
143 chan->AudioDTOUpdated = 0; demux_tasklet()
146 if (chan->HWState == HWSTATE_RUN) { demux_tasklet()
147 u32 Flags = chan->DataFormatFlags; demux_tasklet()
148 IBufferExchange *exch1 = chan->pBufferExchange; demux_tasklet()
149 IBufferExchange *exch2 = chan->pBufferExchange2; demux_tasklet()
154 spin_unlock_irq(&chan->state_lock); demux_tasklet()
156 exch1(chan, Cur->Buffer1, demux_tasklet()
157 chan->Capture1Length, demux_tasklet()
161 exch2(chan, Cur->Buffer2, demux_tasklet()
162 chan->Capture2Length, demux_tasklet()
165 spin_lock_irq(&chan->state_lock); demux_tasklet()
166 } else if (chan->HWState != HWSTATE_STOP) demux_tasklet()
167 chan->HWState = HWSTATE_RUN; demux_tasklet()
172 chan->nextBuffer = Cur; demux_tasklet()
174 spin_unlock_irq(&chan->state_lock); demux_tasklet()
514 static void flush_buffers(struct ngene_channel *chan) flush_buffers() argument
520 spin_lock_irq(&chan->state_lock); flush_buffers()
521 val = chan->nextBuffer->ngeneBuffer.SR.Flags & 0x80; flush_buffers()
522 spin_unlock_irq(&chan->state_lock); flush_buffers()
526 static void clear_buffers(struct ngene_channel *chan) clear_buffers() argument
528 struct SBufferHeader *Cur = chan->nextBuffer; clear_buffers()
532 if (chan->mode & NGENE_IO_TSOUT) clear_buffers()
534 chan->Capture1Length, clear_buffers()
535 chan->DataFormatFlags); clear_buffers()
537 } while (Cur != chan->nextBuffer); clear_buffers()
539 if (chan->mode & NGENE_IO_TSOUT) { clear_buffers()
540 chan->nextBuffer->ngeneBuffer.SR.DTOUpdate = clear_buffers()
541 chan->AudioDTOValue; clear_buffers()
542 chan->AudioDTOUpdated = 0; clear_buffers()
544 Cur = chan->TSIdleBuffer.Head; clear_buffers()
550 chan->Capture1Length, clear_buffers()
551 chan->DataFormatFlags); clear_buffers()
553 } while (Cur != chan->TSIdleBuffer.Head); clear_buffers()
560 struct ngene_channel *chan = &dev->channel[stream]; ngene_command_stream_control() local
572 if (chan->mode & NGENE_IO_TSOUT) ngene_command_stream_control()
585 chan->Mode = mode; ngene_command_stream_control()
588 spin_lock_irq(&chan->state_lock); ngene_command_stream_control()
589 if (chan->State == KSSTATE_RUN) { ngene_command_stream_control()
590 chan->State = KSSTATE_ACQUIRE; ngene_command_stream_control()
591 chan->HWState = HWSTATE_STOP; ngene_command_stream_control()
592 spin_unlock_irq(&chan->state_lock); ngene_command_stream_control()
597 /* clear_buffers(chan); */ ngene_command_stream_control()
598 flush_buffers(chan); ngene_command_stream_control()
602 spin_unlock_irq(&chan->state_lock); ngene_command_stream_control()
609 chan->Capture1Length / AUDIO_BLOCK_SIZE; ngene_command_stream_control()
610 com.cmd.StreamControl.Buffer_Address = chan->RingBuffer.PAHead; ngene_command_stream_control()
613 chan->Capture1Length / TS_BLOCK_SIZE; ngene_command_stream_control()
615 chan->Capture1Length / TS_BLOCK_SIZE; ngene_command_stream_control()
617 chan->TSRingBuffer.PAHead; ngene_command_stream_control()
618 if (chan->mode & NGENE_IO_TSOUT) { ngene_command_stream_control()
620 chan->Capture1Length / TS_BLOCK_SIZE; ngene_command_stream_control()
624 com.cmd.StreamControl.BytesPerVideoLine = chan->nBytesPerLine; ngene_command_stream_control()
625 com.cmd.StreamControl.MaxLinesPerField = chan->nLines; ngene_command_stream_control()
627 com.cmd.StreamControl.Buffer_Address = chan->RingBuffer.PAHead; ngene_command_stream_control()
631 chan->nVBILines; ngene_command_stream_control()
634 chan->nBytesPerVBILine; ngene_command_stream_control()
640 spin_lock_irq(&chan->state_lock); ngene_command_stream_control()
642 chan->nextBuffer = chan->RingBuffer.Head; ngene_command_stream_control()
657 chan->nextBuffer = chan->TSRingBuffer.Head; ngene_command_stream_control()
659 if (chan->mode & NGENE_IO_TSOUT) { ngene_command_stream_control()
682 chan->nextBuffer = chan->RingBuffer.Head; ngene_command_stream_control()
687 ITUDecoderSetup[chan->itumode], 16); ngene_command_stream_control()
691 clear_buffers(chan); ngene_command_stream_control()
692 chan->State = KSSTATE_RUN; ngene_command_stream_control()
694 chan->HWState = HWSTATE_RUN; ngene_command_stream_control()
696 chan->HWState = HWSTATE_STARTUP; ngene_command_stream_control()
697 spin_unlock_irq(&chan->state_lock); ngene_command_stream_control()
707 void set_transfer(struct ngene_channel *chan, int state) set_transfer() argument
710 struct ngene *dev = chan->dev; set_transfer()
719 if (chan->running) { set_transfer()
724 if (!chan->running) { set_transfer()
731 dev->card_info->switch_ctrl(chan, 1, state ^ 1); set_transfer()
734 spin_lock_irq(&chan->state_lock); set_transfer()
740 if (chan->mode & (NGENE_IO_TSIN | NGENE_IO_TSOUT)) { set_transfer()
741 chan->Capture1Length = 512 * 188; set_transfer()
744 if (chan->mode & NGENE_IO_TSOUT) { set_transfer()
745 chan->pBufferExchange = tsout_exchange; set_transfer()
747 chan->AudioDTOValue = 0x80000000; set_transfer()
748 chan->AudioDTOUpdated = 1; set_transfer()
750 if (chan->mode & NGENE_IO_TSIN) set_transfer()
751 chan->pBufferExchange = tsin_exchange; set_transfer()
752 spin_unlock_irq(&chan->state_lock); set_transfer()
757 ret = ngene_command_stream_control(dev, chan->number, set_transfer()
760 chan->running = state; set_transfer()
765 spin_lock_irq(&chan->state_lock); set_transfer()
766 chan->pBufferExchange = NULL; set_transfer()
768 spin_unlock_irq(&chan->state_lock); set_transfer()
827 struct ngene_channel *chan; free_common_buffers() local
830 chan = &dev->channel[i]; free_common_buffers()
831 free_idlebuffer(dev, &chan->TSIdleBuffer, &chan->TSRingBuffer); free_common_buffers()
832 free_ringbuffer(dev, &chan->RingBuffer); free_common_buffers()
833 free_ringbuffer(dev, &chan->TSRingBuffer); free_common_buffers()
1417 static void release_channel(struct ngene_channel *chan) release_channel() argument
1419 struct dvb_demux *dvbdemux = &chan->demux; release_channel()
1420 struct ngene *dev = chan->dev; release_channel()
1422 if (chan->running) release_channel()
1423 set_transfer(chan, 0); release_channel()
1425 tasklet_kill(&chan->demux_tasklet); release_channel()
1427 if (chan->ci_dev) { release_channel()
1428 dvb_unregister_device(chan->ci_dev); release_channel()
1429 chan->ci_dev = NULL; release_channel()
1432 if (chan->fe2) release_channel()
1433 dvb_unregister_frontend(chan->fe2); release_channel()
1435 if (chan->fe) { release_channel()
1436 dvb_unregister_frontend(chan->fe); release_channel()
1437 dvb_frontend_detach(chan->fe); release_channel()
1438 chan->fe = NULL; release_channel()
1441 if (chan->has_demux) { release_channel()
1442 dvb_net_release(&chan->dvbnet); release_channel()
1445 &chan->hw_frontend); release_channel()
1447 &chan->mem_frontend); release_channel()
1448 dvb_dmxdev_release(&chan->dmxdev); release_channel()
1449 dvb_dmx_release(&chan->demux); release_channel()
1450 chan->has_demux = false; release_channel()
1453 if (chan->has_adapter) { release_channel()
1454 dvb_unregister_adapter(&dev->adapter[chan->number]); release_channel()
1455 chan->has_adapter = false; release_channel()
1459 static int init_channel(struct ngene_channel *chan) init_channel() argument
1461 int ret = 0, nr = chan->number; init_channel()
1463 struct dvb_demux *dvbdemux = &chan->demux; init_channel()
1464 struct ngene *dev = chan->dev; init_channel()
1468 tasklet_init(&chan->demux_tasklet, demux_tasklet, (unsigned long)chan); init_channel()
1469 chan->users = 0; init_channel()
1470 chan->type = io; init_channel()
1471 chan->mode = chan->type; /* for now only one mode */ init_channel()
1474 chan->fe = NULL; init_channel()
1476 ret = ni->demod_attach[nr](chan); init_channel()
1480 if (chan->fe && ni->tuner_attach[nr]) { init_channel()
1481 ret = ni->tuner_attach[nr](chan); init_channel()
1492 chan->DataFormatFlags = DF_SWAP32; init_channel()
1498 &chan->dev->pci_dev->dev, init_channel()
1504 chan->has_adapter = true; init_channel()
1511 set_transfer(chan, 1); init_channel()
1512 chan->dev->channel[2].DataFormatFlags = DF_SWAP32; init_channel()
1513 set_transfer(&chan->dev->channel[2], 1); init_channel()
1514 dvb_register_device(adapter, &chan->ci_dev, init_channel()
1515 &ngene_dvbdev_ci, (void *) chan, init_channel()
1517 if (!chan->ci_dev) init_channel()
1521 if (chan->fe) { init_channel()
1522 if (dvb_register_frontend(adapter, chan->fe) < 0) init_channel()
1524 chan->has_demux = true; init_channel()
1526 if (chan->fe2) { init_channel()
1527 if (dvb_register_frontend(adapter, chan->fe2) < 0) init_channel()
1529 chan->fe2->tuner_priv = chan->fe->tuner_priv; init_channel()
1530 memcpy(&chan->fe2->ops.tuner_ops, init_channel()
1531 &chan->fe->ops.tuner_ops, init_channel()
1535 if (chan->has_demux) { init_channel()
1538 ngene_stop_feed, chan); init_channel()
1539 ret = my_dvb_dmxdev_ts_card_init(&chan->dmxdev, &chan->demux, init_channel()
1540 &chan->hw_frontend, init_channel()
1541 &chan->mem_frontend, adapter); init_channel()
1542 ret = dvb_net_init(adapter, &chan->dvbnet, &chan->demux.dmx); init_channel()
1548 if (chan->fe) { init_channel()
1549 dvb_frontend_detach(chan->fe); init_channel()
1550 chan->fe = NULL; init_channel()
1552 release_channel(chan); init_channel()
H A Dngene-dvb.c54 struct ngene_channel *chan = dvbdev->priv; ts_write() local
55 struct ngene *dev = chan->dev; ts_write()
71 struct ngene_channel *chan = dvbdev->priv; ts_read() local
72 struct ngene *dev = chan->dev; ts_read()
132 struct ngene_channel *chan = priv; tsin_exchange() local
133 struct ngene *dev = chan->dev; tsin_exchange()
139 if (dev->ci.en && chan->number == 2) { tsin_exchange()
167 if (chan->users > 0) tsin_exchange()
168 dvb_dmx_swfilter(&chan->demux, buf, len); tsin_exchange()
175 struct ngene_channel *chan = priv; tsout_exchange() local
176 struct ngene *dev = chan->dev; tsout_exchange()
198 struct ngene_channel *chan = dvbdmx->priv; ngene_start_feed() local
200 if (chan->users == 0) { ngene_start_feed()
201 if (!chan->dev->cmd_timeout_workaround || !chan->running) ngene_start_feed()
202 set_transfer(chan, 1); ngene_start_feed()
205 return ++chan->users; ngene_start_feed()
211 struct ngene_channel *chan = dvbdmx->priv; ngene_stop_feed() local
213 if (--chan->users) ngene_stop_feed()
214 return chan->users; ngene_stop_feed()
216 if (!chan->dev->cmd_timeout_workaround) ngene_stop_feed()
217 set_transfer(chan, 0); ngene_stop_feed()
/linux-4.1.27/drivers/iio/
H A Dinkern.c85 const struct iio_chan_spec *chan = NULL; iio_chan_spec_from_name() local
90 chan = &indio_dev->channels[i]; iio_chan_spec_from_name()
93 return chan; iio_chan_spec_from_name()
191 struct iio_channel *chan = NULL; of_iio_channel_get_by_name() local
206 chan = of_iio_channel_get(np, index); of_iio_channel_get_by_name()
207 if (!IS_ERR(chan) || PTR_ERR(chan) == -EPROBE_DEFER) of_iio_channel_get_by_name()
225 return chan; of_iio_channel_get_by_name()
434 struct iio_channel *chan = &channels[0]; iio_channel_release_all() local
436 while (chan->indio_dev) { iio_channel_release_all()
437 iio_device_put(chan->indio_dev); iio_channel_release_all()
438 chan++; iio_channel_release_all()
444 static int iio_channel_read(struct iio_channel *chan, int *val, int *val2, iio_channel_read() argument
455 if(!iio_channel_has_info(chan->channel, info)) iio_channel_read()
458 if (chan->indio_dev->info->read_raw_multi) { iio_channel_read()
459 ret = chan->indio_dev->info->read_raw_multi(chan->indio_dev, iio_channel_read()
460 chan->channel, INDIO_MAX_RAW_ELEMENTS, iio_channel_read()
465 ret = chan->indio_dev->info->read_raw(chan->indio_dev, iio_channel_read()
466 chan->channel, val, val2, info); iio_channel_read()
471 int iio_read_channel_raw(struct iio_channel *chan, int *val) iio_read_channel_raw() argument
475 mutex_lock(&chan->indio_dev->info_exist_lock); iio_read_channel_raw()
476 if (chan->indio_dev->info == NULL) { iio_read_channel_raw()
481 ret = iio_channel_read(chan, val, NULL, IIO_CHAN_INFO_RAW); iio_read_channel_raw()
483 mutex_unlock(&chan->indio_dev->info_exist_lock); iio_read_channel_raw()
489 int iio_read_channel_average_raw(struct iio_channel *chan, int *val) iio_read_channel_average_raw() argument
493 mutex_lock(&chan->indio_dev->info_exist_lock); iio_read_channel_average_raw()
494 if (chan->indio_dev->info == NULL) { iio_read_channel_average_raw()
499 ret = iio_channel_read(chan, val, NULL, IIO_CHAN_INFO_AVERAGE_RAW); iio_read_channel_average_raw()
501 mutex_unlock(&chan->indio_dev->info_exist_lock); iio_read_channel_average_raw()
507 static int iio_convert_raw_to_processed_unlocked(struct iio_channel *chan, iio_convert_raw_to_processed_unlocked() argument
514 ret = iio_channel_read(chan, &offset, NULL, IIO_CHAN_INFO_OFFSET); iio_convert_raw_to_processed_unlocked()
518 scale_type = iio_channel_read(chan, &scale_val, &scale_val2, iio_convert_raw_to_processed_unlocked()
557 int iio_convert_raw_to_processed(struct iio_channel *chan, int raw, iio_convert_raw_to_processed() argument
562 mutex_lock(&chan->indio_dev->info_exist_lock); iio_convert_raw_to_processed()
563 if (chan->indio_dev->info == NULL) { iio_convert_raw_to_processed()
568 ret = iio_convert_raw_to_processed_unlocked(chan, raw, processed, iio_convert_raw_to_processed()
571 mutex_unlock(&chan->indio_dev->info_exist_lock); iio_convert_raw_to_processed()
577 int iio_read_channel_processed(struct iio_channel *chan, int *val) iio_read_channel_processed() argument
581 mutex_lock(&chan->indio_dev->info_exist_lock); iio_read_channel_processed()
582 if (chan->indio_dev->info == NULL) { iio_read_channel_processed()
587 if (iio_channel_has_info(chan->channel, IIO_CHAN_INFO_PROCESSED)) { iio_read_channel_processed()
588 ret = iio_channel_read(chan, val, NULL, iio_read_channel_processed()
591 ret = iio_channel_read(chan, val, NULL, IIO_CHAN_INFO_RAW); iio_read_channel_processed()
594 ret = iio_convert_raw_to_processed_unlocked(chan, *val, val, 1); iio_read_channel_processed()
598 mutex_unlock(&chan->indio_dev->info_exist_lock); iio_read_channel_processed()
604 int iio_read_channel_scale(struct iio_channel *chan, int *val, int *val2) iio_read_channel_scale() argument
608 mutex_lock(&chan->indio_dev->info_exist_lock); iio_read_channel_scale()
609 if (chan->indio_dev->info == NULL) { iio_read_channel_scale()
614 ret = iio_channel_read(chan, val, val2, IIO_CHAN_INFO_SCALE); iio_read_channel_scale()
616 mutex_unlock(&chan->indio_dev->info_exist_lock); iio_read_channel_scale()
622 int iio_get_channel_type(struct iio_channel *chan, enum iio_chan_type *type) iio_get_channel_type() argument
627 mutex_lock(&chan->indio_dev->info_exist_lock); iio_get_channel_type()
628 if (chan->indio_dev->info == NULL) { iio_get_channel_type()
633 *type = chan->channel->type; iio_get_channel_type()
635 mutex_unlock(&chan->indio_dev->info_exist_lock); iio_get_channel_type()
641 static int iio_channel_write(struct iio_channel *chan, int val, int val2, iio_channel_write() argument
644 return chan->indio_dev->info->write_raw(chan->indio_dev, iio_channel_write()
645 chan->channel, val, val2, info); iio_channel_write()
648 int iio_write_channel_raw(struct iio_channel *chan, int val) iio_write_channel_raw() argument
652 mutex_lock(&chan->indio_dev->info_exist_lock); iio_write_channel_raw()
653 if (chan->indio_dev->info == NULL) { iio_write_channel_raw()
658 ret = iio_channel_write(chan, val, 0, IIO_CHAN_INFO_RAW); iio_write_channel_raw()
660 mutex_unlock(&chan->indio_dev->info_exist_lock); iio_write_channel_raw()
H A Dbuffer_cb.c46 struct iio_channel *chan; iio_channel_get_all_cb() local
73 chan = &cb_buff->channels[0]; iio_channel_get_all_cb()
74 while (chan->indio_dev) { iio_channel_get_all_cb()
75 if (chan->indio_dev != indio_dev) { iio_channel_get_all_cb()
79 set_bit(chan->channel->scan_index, iio_channel_get_all_cb()
81 chan++; iio_channel_get_all_cb()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/mpeg/
H A Dnv31.h11 struct nv31_mpeg_chan *chan; member in struct:nv31_mpeg_priv
/linux-4.1.27/net/9p/
H A Dtrans_virtio.c122 struct virtio_chan *chan = client->trans; p9_virtio_close() local
125 if (chan) p9_virtio_close()
126 chan->inuse = false; p9_virtio_close()
145 struct virtio_chan *chan = vq->vdev->priv; req_done() local
154 spin_lock_irqsave(&chan->lock, flags); req_done()
155 rc = virtqueue_get_buf(chan->vq, &len); req_done()
157 spin_unlock_irqrestore(&chan->lock, flags); req_done()
160 chan->ring_bufs_avail = 1; req_done()
161 spin_unlock_irqrestore(&chan->lock, flags); req_done()
163 wake_up(chan->vc_wq); req_done()
166 req = p9_tag_lookup(chan->client, rc->tag); req_done()
167 p9_client_cb(chan->client, req, REQ_STATUS_RCVD); req_done()
266 struct virtio_chan *chan = client->trans; p9_virtio_request() local
273 spin_lock_irqsave(&chan->lock, flags); p9_virtio_request()
277 out = pack_sg_list(chan->sg, 0, p9_virtio_request()
280 sgs[out_sgs++] = chan->sg; p9_virtio_request()
282 in = pack_sg_list(chan->sg, out, p9_virtio_request()
285 sgs[out_sgs + in_sgs++] = chan->sg + out; p9_virtio_request()
287 err = virtqueue_add_sgs(chan->vq, sgs, out_sgs, in_sgs, req->tc, p9_virtio_request()
291 chan->ring_bufs_avail = 0; p9_virtio_request()
292 spin_unlock_irqrestore(&chan->lock, flags); p9_virtio_request()
293 err = wait_event_interruptible(*chan->vc_wq, p9_virtio_request()
294 chan->ring_bufs_avail); p9_virtio_request()
301 spin_unlock_irqrestore(&chan->lock, flags); p9_virtio_request()
307 virtqueue_kick(chan->vq); p9_virtio_request()
308 spin_unlock_irqrestore(&chan->lock, flags); p9_virtio_request()
314 static int p9_get_mapped_pages(struct virtio_chan *chan, p9_get_mapped_pages() argument
333 if (atomic_read(&vp_pinned) >= chan->p9_max_pages) { p9_get_mapped_pages()
335 (atomic_read(&vp_pinned) < chan->p9_max_pages)); p9_get_mapped_pages()
404 struct virtio_chan *chan = client->trans; p9_virtio_zc_request() local
412 int n = p9_get_mapped_pages(chan, &out_pages, uodata, p9_virtio_zc_request()
423 int n = p9_get_mapped_pages(chan, &in_pages, uidata, p9_virtio_zc_request()
436 spin_lock_irqsave(&chan->lock, flags); p9_virtio_zc_request()
441 out = pack_sg_list(chan->sg, 0, p9_virtio_zc_request()
445 sgs[out_sgs++] = chan->sg; p9_virtio_zc_request()
448 sgs[out_sgs++] = chan->sg + out; p9_virtio_zc_request()
449 out += pack_sg_list_p(chan->sg, out, VIRTQUEUE_NUM, p9_virtio_zc_request()
460 in = pack_sg_list(chan->sg, out, p9_virtio_zc_request()
463 sgs[out_sgs + in_sgs++] = chan->sg + out; p9_virtio_zc_request()
466 sgs[out_sgs + in_sgs++] = chan->sg + out + in; p9_virtio_zc_request()
467 in += pack_sg_list_p(chan->sg, out + in, VIRTQUEUE_NUM, p9_virtio_zc_request()
472 err = virtqueue_add_sgs(chan->vq, sgs, out_sgs, in_sgs, req->tc, p9_virtio_zc_request()
476 chan->ring_bufs_avail = 0; p9_virtio_zc_request()
477 spin_unlock_irqrestore(&chan->lock, flags); p9_virtio_zc_request()
478 err = wait_event_interruptible(*chan->vc_wq, p9_virtio_zc_request()
479 chan->ring_bufs_avail); p9_virtio_zc_request()
486 spin_unlock_irqrestore(&chan->lock, flags); p9_virtio_zc_request()
493 virtqueue_kick(chan->vq); p9_virtio_zc_request()
494 spin_unlock_irqrestore(&chan->lock, flags); p9_virtio_zc_request()
522 struct virtio_chan *chan; p9_mount_tag_show() local
526 chan = vdev->priv; p9_mount_tag_show()
528 memcpy(buf, chan->tag, chan->tag_len); p9_mount_tag_show()
529 buf[chan->tag_len] = 0; p9_mount_tag_show()
531 return chan->tag_len + 1; p9_mount_tag_show()
549 struct virtio_chan *chan; p9_virtio_probe() local
557 chan = kmalloc(sizeof(struct virtio_chan), GFP_KERNEL); p9_virtio_probe()
558 if (!chan) { p9_virtio_probe()
564 chan->vdev = vdev; p9_virtio_probe()
567 chan->vq = virtio_find_single_vq(vdev, req_done, "requests"); p9_virtio_probe()
568 if (IS_ERR(chan->vq)) { p9_virtio_probe()
569 err = PTR_ERR(chan->vq); p9_virtio_probe()
572 chan->vq->vdev->priv = chan; p9_virtio_probe()
573 spin_lock_init(&chan->lock); p9_virtio_probe()
575 sg_init_table(chan->sg, VIRTQUEUE_NUM); p9_virtio_probe()
577 chan->inuse = false; p9_virtio_probe()
592 chan->tag = tag; p9_virtio_probe()
593 chan->tag_len = tag_len; p9_virtio_probe()
598 chan->vc_wq = kmalloc(sizeof(wait_queue_head_t), GFP_KERNEL); p9_virtio_probe()
599 if (!chan->vc_wq) { p9_virtio_probe()
603 init_waitqueue_head(chan->vc_wq); p9_virtio_probe()
604 chan->ring_bufs_avail = 1; p9_virtio_probe()
606 chan->p9_max_pages = nr_free_buffer_pages()/4; p9_virtio_probe()
611 list_add_tail(&chan->chan_list, &virtio_chan_list); p9_virtio_probe()
623 kfree(chan); p9_virtio_probe()
646 struct virtio_chan *chan; p9_virtio_create() local
651 list_for_each_entry(chan, &virtio_chan_list, chan_list) { p9_virtio_create()
652 if (!strncmp(devname, chan->tag, chan->tag_len) && p9_virtio_create()
653 strlen(devname) == chan->tag_len) { p9_virtio_create()
654 if (!chan->inuse) { p9_virtio_create()
655 chan->inuse = true; p9_virtio_create()
669 client->trans = (void *)chan; p9_virtio_create()
671 chan->client = client; p9_virtio_create()
684 struct virtio_chan *chan = vdev->priv; p9_virtio_remove() local
690 list_del(&chan->chan_list); p9_virtio_remove()
694 while (chan->inuse) { p9_virtio_remove()
711 kfree(chan->tag); p9_virtio_remove()
712 kfree(chan->vc_wq); p9_virtio_remove()
713 kfree(chan); p9_virtio_remove()
/linux-4.1.27/kernel/
H A Drelay.c36 buf->chan->cb->buf_unmapped(buf, vma->vm_file); relay_file_mmap_close()
107 if (length != (unsigned long)buf->chan->alloc_size) relay_mmap_buf()
113 buf->chan->cb->buf_mapped(buf, filp); relay_mmap_buf()
161 * @chan: the relay channel
165 static struct rchan_buf *relay_create_buf(struct rchan *chan) relay_create_buf() argument
169 if (chan->n_subbufs > UINT_MAX / sizeof(size_t *)) relay_create_buf()
175 buf->padding = kmalloc(chan->n_subbufs * sizeof(size_t *), GFP_KERNEL); relay_create_buf()
179 buf->start = relay_alloc_buf(buf, &chan->alloc_size); relay_create_buf()
183 buf->chan = chan; relay_create_buf()
184 kref_get(&buf->chan->kref); relay_create_buf()
201 struct rchan *chan = container_of(kref, struct rchan, kref); relay_destroy_channel() local
202 kfree(chan); relay_destroy_channel()
211 struct rchan *chan = buf->chan; relay_destroy_buf() local
220 chan->buf[buf->cpu] = NULL; relay_destroy_buf()
223 kref_put(&chan->kref, relay_destroy_channel); relay_destroy_buf()
260 return (ready >= buf->chan->n_subbufs) ? 1 : 0; relay_buf_full()
369 for (i = 0; i < buf->chan->n_subbufs; i++) __relay_reset()
372 buf->chan->cb->subbuf_start(buf, buf->data, NULL, 0); __relay_reset()
377 * @chan: the channel
386 void relay_reset(struct rchan *chan) relay_reset() argument
390 if (!chan) relay_reset()
393 if (chan->is_global && chan->buf[0]) { relay_reset()
394 __relay_reset(chan->buf[0], 0); relay_reset()
400 if (chan->buf[i]) relay_reset()
401 __relay_reset(chan->buf[i], 0); relay_reset()
413 static struct dentry *relay_create_buf_file(struct rchan *chan, relay_create_buf_file() argument
423 snprintf(tmpname, NAME_MAX, "%s%d", chan->base_filename, cpu); relay_create_buf_file()
426 dentry = chan->cb->create_buf_file(tmpname, chan->parent, relay_create_buf_file()
428 &chan->is_global); relay_create_buf_file()
440 static struct rchan_buf *relay_open_buf(struct rchan *chan, unsigned int cpu) relay_open_buf() argument
445 if (chan->is_global) relay_open_buf()
446 return chan->buf[0]; relay_open_buf()
448 buf = relay_create_buf(chan); relay_open_buf()
452 if (chan->has_base_filename) { relay_open_buf()
453 dentry = relay_create_buf_file(chan, buf, cpu); relay_open_buf()
462 if(chan->is_global) { relay_open_buf()
463 chan->buf[0] = buf; relay_open_buf()
486 buf->chan->cb->remove_buf_file(buf->dentry); relay_close_buf()
490 static void setup_callbacks(struct rchan *chan, setup_callbacks() argument
494 chan->cb = &default_channel_callbacks; setup_callbacks()
508 chan->cb = cb; setup_callbacks()
524 struct rchan *chan; relay_hotcpu_callback() local
530 list_for_each_entry(chan, &relay_channels, list) { relay_hotcpu_callback()
531 if (chan->buf[hotcpu]) relay_hotcpu_callback()
533 chan->buf[hotcpu] = relay_open_buf(chan, hotcpu); relay_hotcpu_callback()
534 if(!chan->buf[hotcpu]) { relay_hotcpu_callback()
577 struct rchan *chan; relay_open() local
584 chan = kzalloc(sizeof(struct rchan), GFP_KERNEL); relay_open()
585 if (!chan) relay_open()
588 chan->version = RELAYFS_CHANNEL_VERSION; relay_open()
589 chan->n_subbufs = n_subbufs; relay_open()
590 chan->subbuf_size = subbuf_size; relay_open()
591 chan->alloc_size = PAGE_ALIGN(subbuf_size * n_subbufs); relay_open()
592 chan->parent = parent; relay_open()
593 chan->private_data = private_data; relay_open()
595 chan->has_base_filename = 1; relay_open()
596 strlcpy(chan->base_filename, base_filename, NAME_MAX); relay_open()
598 setup_callbacks(chan, cb); relay_open()
599 kref_init(&chan->kref); relay_open()
603 chan->buf[i] = relay_open_buf(chan, i); for_each_online_cpu()
604 if (!chan->buf[i]) for_each_online_cpu()
607 list_add(&chan->list, &relay_channels);
610 return chan;
614 if (chan->buf[i]) for_each_possible_cpu()
615 relay_close_buf(chan->buf[i]); for_each_possible_cpu()
618 kref_put(&chan->kref, relay_destroy_channel);
639 * @chan: channel to operate on
648 int relay_late_setup_files(struct rchan *chan, relay_late_setup_files() argument
658 if (!chan || !base_filename) relay_late_setup_files()
661 strlcpy(chan->base_filename, base_filename, NAME_MAX); relay_late_setup_files()
664 /* Is chan already set up? */ relay_late_setup_files()
665 if (unlikely(chan->has_base_filename)) { relay_late_setup_files()
669 chan->has_base_filename = 1; relay_late_setup_files()
670 chan->parent = parent; relay_late_setup_files()
678 if (unlikely(!chan->buf[i])) { for_each_online_cpu()
684 dentry = relay_create_buf_file(chan, chan->buf[i], i); for_each_online_cpu()
692 relay_set_buf_dentry(chan->buf[i], dentry); for_each_online_cpu()
695 disp.buf = chan->buf[i]; for_each_online_cpu()
727 if (unlikely(length > buf->chan->subbuf_size)) relay_switch_subbuf()
730 if (buf->offset != buf->chan->subbuf_size + 1) { relay_switch_subbuf()
731 buf->prev_padding = buf->chan->subbuf_size - buf->offset; relay_switch_subbuf()
732 old_subbuf = buf->subbufs_produced % buf->chan->n_subbufs; relay_switch_subbuf()
737 buf->chan->subbuf_size - relay_switch_subbuf()
740 buf->early_bytes += buf->chan->subbuf_size - relay_switch_subbuf()
754 new_subbuf = buf->subbufs_produced % buf->chan->n_subbufs; relay_switch_subbuf()
755 new = buf->start + new_subbuf * buf->chan->subbuf_size; relay_switch_subbuf()
757 if (!buf->chan->cb->subbuf_start(buf, new, old, buf->prev_padding)) { relay_switch_subbuf()
758 buf->offset = buf->chan->subbuf_size + 1; relay_switch_subbuf()
764 if (unlikely(length + buf->offset > buf->chan->subbuf_size)) relay_switch_subbuf()
770 buf->chan->last_toobig = length; relay_switch_subbuf()
777 * @chan: the channel
788 void relay_subbufs_consumed(struct rchan *chan, relay_subbufs_consumed() argument
794 if (!chan) relay_subbufs_consumed()
797 if (cpu >= NR_CPUS || !chan->buf[cpu] || relay_subbufs_consumed()
798 subbufs_consumed > chan->n_subbufs) relay_subbufs_consumed()
801 buf = chan->buf[cpu]; relay_subbufs_consumed()
811 * @chan: the channel
815 void relay_close(struct rchan *chan) relay_close() argument
819 if (!chan) relay_close()
823 if (chan->is_global && chan->buf[0]) relay_close()
824 relay_close_buf(chan->buf[0]); relay_close()
827 if (chan->buf[i]) relay_close()
828 relay_close_buf(chan->buf[i]); relay_close()
830 if (chan->last_toobig) relay_close()
833 chan->last_toobig, chan->subbuf_size); relay_close()
835 list_del(&chan->list); relay_close()
836 kref_put(&chan->kref, relay_destroy_channel); relay_close()
843 * @chan: the channel
847 void relay_flush(struct rchan *chan) relay_flush() argument
851 if (!chan) relay_flush()
854 if (chan->is_global && chan->buf[0]) { relay_flush()
855 relay_switch_subbuf(chan->buf[0], 0); relay_flush()
861 if (chan->buf[i]) relay_flush()
862 relay_switch_subbuf(chan->buf[i], 0); relay_flush()
943 size_t subbuf_size = buf->chan->subbuf_size; relay_file_read_consume()
944 size_t n_subbufs = buf->chan->n_subbufs; relay_file_read_consume()
952 relay_subbufs_consumed(buf->chan, buf->cpu, 1); relay_file_read_consume()
960 read_subbuf = read_pos / buf->chan->subbuf_size; relay_file_read_consume()
965 relay_subbufs_consumed(buf->chan, buf->cpu, 1); relay_file_read_consume()
975 size_t subbuf_size = buf->chan->subbuf_size; relay_file_read_avail()
976 size_t n_subbufs = buf->chan->n_subbufs; relay_file_read_avail()
1022 size_t subbuf_size = buf->chan->subbuf_size; relay_file_read_subbuf_avail()
1052 size_t subbuf_size = buf->chan->subbuf_size; relay_file_read_start_pos()
1053 size_t n_subbufs = buf->chan->n_subbufs; relay_file_read_start_pos()
1081 size_t subbuf_size = buf->chan->subbuf_size; relay_file_read_end_pos()
1082 size_t n_subbufs = buf->chan->n_subbufs; relay_file_read_end_pos()
1181 if (rbuf->bytes_consumed >= rbuf->chan->subbuf_size) { relay_consume_bytes()
1182 relay_subbufs_consumed(rbuf->chan, rbuf->cpu, 1); relay_consume_bytes()
1183 rbuf->bytes_consumed %= rbuf->chan->subbuf_size; relay_consume_bytes()
1220 unsigned int subbuf_size = rbuf->chan->subbuf_size; subbuf_splice_actor()
1222 uint32_t alloc_size = (uint32_t) rbuf->chan->alloc_size; subbuf_splice_actor()
1251 subbuf_pages = rbuf->chan->alloc_size >> PAGE_SHIFT; subbuf_splice_actor()
/linux-4.1.27/drivers/pwm/
H A Dpwm-bcm-kona.c49 #define PWM_CONTROL_SMOOTH_SHIFT(chan) (24 + (chan))
50 #define PWM_CONTROL_TYPE_SHIFT(chan) (16 + (chan))
51 #define PWM_CONTROL_POLARITY_SHIFT(chan) (8 + (chan))
52 #define PWM_CONTROL_TRIGGER_SHIFT(chan) (chan)
55 #define PRESCALE_SHIFT(chan) ((chan) << 2)
56 #define PRESCALE_MASK(chan) (0x7 << PRESCALE_SHIFT(chan))
60 #define PERIOD_COUNT_OFFSET(chan) (0x00000008 + ((chan) << 3))
64 #define DUTY_CYCLE_HIGH_OFFSET(chan) (0x0000000c + ((chan) << 3))
79 static void kona_pwmc_apply_settings(struct kona_pwmc *kp, unsigned int chan) kona_pwmc_apply_settings() argument
84 value |= 1 << PWM_CONTROL_SMOOTH_SHIFT(chan); kona_pwmc_apply_settings()
85 value &= ~(1 << PWM_CONTROL_TRIGGER_SHIFT(chan)); kona_pwmc_apply_settings()
89 value &= ~(1 << PWM_CONTROL_SMOOTH_SHIFT(chan)); kona_pwmc_apply_settings()
90 value |= 1 << PWM_CONTROL_TRIGGER_SHIFT(chan); kona_pwmc_apply_settings()
100 unsigned int value, chan = pwm->hwpwm; kona_pwmc_config() local
139 value &= ~PRESCALE_MASK(chan); kona_pwmc_config()
140 value |= prescale << PRESCALE_SHIFT(chan); kona_pwmc_config()
143 writel(pc, kp->base + PERIOD_COUNT_OFFSET(chan)); kona_pwmc_config()
145 writel(dc, kp->base + DUTY_CYCLE_HIGH_OFFSET(chan)); kona_pwmc_config()
147 kona_pwmc_apply_settings(kp, chan); kona_pwmc_config()
157 unsigned int chan = pwm->hwpwm; kona_pwmc_set_polarity() local
170 value |= 1 << PWM_CONTROL_POLARITY_SHIFT(chan); kona_pwmc_set_polarity()
172 value &= ~(1 << PWM_CONTROL_POLARITY_SHIFT(chan)); kona_pwmc_set_polarity()
176 kona_pwmc_apply_settings(kp, chan); kona_pwmc_set_polarity()
209 unsigned int chan = pwm->hwpwm; kona_pwmc_disable() local
212 writel(0, kp->base + DUTY_CYCLE_HIGH_OFFSET(chan)); kona_pwmc_disable()
213 kona_pwmc_apply_settings(kp, chan); kona_pwmc_disable()
233 unsigned int chan; kona_pwmc_probe() local
270 for (chan = 0; chan < kp->chip.npwm; chan++) { kona_pwmc_probe()
271 value |= (1 << PWM_CONTROL_SMOOTH_SHIFT(chan)); kona_pwmc_probe()
272 value |= (1 << PWM_CONTROL_TYPE_SHIFT(chan)); kona_pwmc_probe()
273 value |= (1 << PWM_CONTROL_POLARITY_SHIFT(chan)); kona_pwmc_probe()
290 unsigned int chan; kona_pwmc_remove() local
292 for (chan = 0; chan < kp->chip.npwm; chan++) kona_pwmc_remove()
293 if (test_bit(PWMF_ENABLED, &kp->chip.pwms[chan].flags)) kona_pwmc_remove()
H A Dpwm-samsung.c35 #define REG_TCNTB(chan) (0x0c + ((chan) * 0xc))
36 #define REG_TCMPB(chan) (0x10 + ((chan) * 0xc))
42 #define TCFG1_SHIFT(chan) (4 * (chan))
52 #define TCON_START(chan) BIT(4 * (chan) + 0)
53 #define TCON_MANUALUPDATE(chan) BIT(4 * (chan) + 1)
54 #define TCON_INVERT(chan) BIT(4 * (chan) + 2)
55 #define _TCON_AUTORELOAD(chan) BIT(4 * (chan) + 3)
56 #define _TCON_AUTORELOAD4(chan) BIT(4 * (chan) + 2)
57 #define TCON_AUTORELOAD(chan) \
58 ((chan < 5) ? _TCON_AUTORELOAD(chan) : _TCON_AUTORELOAD4(chan))
140 static int pwm_samsung_is_tdiv(struct samsung_pwm_chip *chip, unsigned int chan) pwm_samsung_is_tdiv() argument
146 reg >>= TCFG1_SHIFT(chan); pwm_samsung_is_tdiv()
153 unsigned int chan) pwm_samsung_get_tin_rate()
161 if (chan >= 2) pwm_samsung_get_tin_rate()
169 unsigned int chan, unsigned long freq) pwm_samsung_calc_tin()
176 if (!pwm_samsung_is_tdiv(chip, chan)) { pwm_samsung_calc_tin()
177 clk = (chan < 2) ? chip->tclk0 : chip->tclk1; pwm_samsung_calc_tin()
185 "tclk of PWM %d is inoperational, using tdiv\n", chan); pwm_samsung_calc_tin()
188 rate = pwm_samsung_get_tin_rate(chip, chan); pwm_samsung_calc_tin()
200 pwm_samsung_set_divisor(chip, chan, BIT(div)); pwm_samsung_calc_tin()
295 struct samsung_pwm_channel *chan = pwm_get_chip_data(pwm); pwm_samsung_config() local
296 u32 tin_ns = chan->tin_ns, tcnt, tcmp, oldtcmp; pwm_samsung_config()
306 if (period_ns == chan->period_ns && duty_ns == chan->duty_ns) pwm_samsung_config()
316 if (chan->period_ns != period_ns) { pwm_samsung_config()
368 chan->period_ns = period_ns; pwm_samsung_config()
369 chan->tin_ns = tin_ns; pwm_samsung_config()
370 chan->duty_ns = duty_ns; pwm_samsung_config()
498 unsigned int chan; pwm_samsung_probe() local
545 for (chan = 0; chan < SAMSUNG_PWM_NUM; ++chan) pwm_samsung_probe()
546 if (chip->variant.output_mask & BIT(chan)) pwm_samsung_probe()
547 pwm_samsung_set_invert(chip, chan, true); pwm_samsung_probe()
597 struct samsung_pwm_channel *chan = pwm_get_chip_data(pwm); pwm_samsung_suspend() local
599 if (!chan) pwm_samsung_suspend()
602 chan->period_ns = 0; pwm_samsung_suspend()
603 chan->duty_ns = 0; pwm_samsung_suspend()
612 unsigned int chan; pwm_samsung_resume() local
618 for (chan = 0; chan < SAMSUNG_PWM_NUM; ++chan) { pwm_samsung_resume()
619 if (chip->variant.output_mask & BIT(chan)) pwm_samsung_resume()
620 pwm_samsung_set_invert(chip, chan, pwm_samsung_resume()
621 chip->inverter_mask & BIT(chan)); pwm_samsung_resume()
152 pwm_samsung_get_tin_rate(struct samsung_pwm_chip *chip, unsigned int chan) pwm_samsung_get_tin_rate() argument
168 pwm_samsung_calc_tin(struct samsung_pwm_chip *chip, unsigned int chan, unsigned long freq) pwm_samsung_calc_tin() argument
/linux-4.1.27/arch/arm/include/asm/hardware/
H A Diop3xx-adma.h26 #define DMA_CCR(chan) (chan->mmr_base + 0x0)
27 #define DMA_CSR(chan) (chan->mmr_base + 0x4)
28 #define DMA_DAR(chan) (chan->mmr_base + 0xc)
29 #define DMA_NDAR(chan) (chan->mmr_base + 0x10)
30 #define DMA_PADR(chan) (chan->mmr_base + 0x14)
31 #define DMA_PUADR(chan) (chan->mmr_base + 0x18)
32 #define DMA_LADR(chan) (chan->mmr_base + 0x1c)
33 #define DMA_BCR(chan) (chan->mmr_base + 0x20)
34 #define DMA_DCR(chan) (chan->mmr_base + 0x24)
37 #define AAU_ACR(chan) (chan->mmr_base + 0x0)
38 #define AAU_ASR(chan) (chan->mmr_base + 0x4)
39 #define AAU_ADAR(chan) (chan->mmr_base + 0x8)
40 #define AAU_ANDAR(chan) (chan->mmr_base + 0xc)
41 #define AAU_SAR(src, chan) (chan->mmr_base + (0x10 + ((src) << 2)))
42 #define AAU_DAR(chan) (chan->mmr_base + 0x20)
43 #define AAU_ABCR(chan) (chan->mmr_base + 0x24)
44 #define AAU_ADCR(chan) (chan->mmr_base + 0x28)
45 #define AAU_SAR_EDCR(src_edc) (chan->mmr_base + (0x02c + ((src_edc-4) << 2)))
258 static inline u32 iop_chan_get_current_descriptor(struct iop_adma_chan *chan) iop_chan_get_current_descriptor() argument
260 int id = chan->device->id; iop_chan_get_current_descriptor()
265 return __raw_readl(DMA_DAR(chan)); iop_chan_get_current_descriptor()
267 return __raw_readl(AAU_ADAR(chan)); iop_chan_get_current_descriptor()
274 static inline void iop_chan_set_next_descriptor(struct iop_adma_chan *chan, iop_chan_set_next_descriptor() argument
277 int id = chan->device->id; iop_chan_set_next_descriptor()
282 __raw_writel(next_desc_addr, DMA_NDAR(chan)); iop_chan_set_next_descriptor()
285 __raw_writel(next_desc_addr, AAU_ANDAR(chan)); iop_chan_set_next_descriptor()
296 static inline int iop_chan_is_busy(struct iop_adma_chan *chan) iop_chan_is_busy() argument
298 u32 status = __raw_readl(DMA_CSR(chan)); iop_chan_is_busy()
341 iop_chan_interrupt_slot_count(int *slots_per_op, struct iop_adma_chan *chan) iop_chan_interrupt_slot_count() argument
343 switch (chan->device->id) { iop_chan_interrupt_slot_count()
397 struct iop_adma_chan *chan) iop_desc_get_byte_count()
401 switch (chan->device->id) { iop_desc_get_byte_count()
430 struct iop_adma_chan *chan, iop_desc_get_src_addr()
435 switch (chan->device->id) { iop_desc_get_src_addr()
647 struct iop_adma_chan *chan, iop_desc_set_byte_count()
652 switch (chan->device->id) { iop_desc_set_byte_count()
667 struct iop_adma_chan *chan) iop_desc_init_interrupt()
671 switch (chan->device->id) { iop_desc_init_interrupt()
714 struct iop_adma_chan *chan, iop_desc_set_dest_addr()
719 switch (chan->device->id) { iop_desc_set_dest_addr()
811 static inline void iop_chan_append(struct iop_adma_chan *chan) iop_chan_append() argument
815 dma_chan_ctrl = __raw_readl(DMA_CCR(chan)); iop_chan_append()
817 __raw_writel(dma_chan_ctrl, DMA_CCR(chan)); iop_chan_append()
820 static inline u32 iop_chan_get_status(struct iop_adma_chan *chan) iop_chan_get_status() argument
822 return __raw_readl(DMA_CSR(chan)); iop_chan_get_status()
825 static inline void iop_chan_disable(struct iop_adma_chan *chan) iop_chan_disable() argument
827 u32 dma_chan_ctrl = __raw_readl(DMA_CCR(chan)); iop_chan_disable()
829 __raw_writel(dma_chan_ctrl, DMA_CCR(chan)); iop_chan_disable()
832 static inline void iop_chan_enable(struct iop_adma_chan *chan) iop_chan_enable() argument
834 u32 dma_chan_ctrl = __raw_readl(DMA_CCR(chan)); iop_chan_enable()
837 __raw_writel(dma_chan_ctrl, DMA_CCR(chan)); iop_chan_enable()
840 static inline void iop_adma_device_clear_eot_status(struct iop_adma_chan *chan) iop_adma_device_clear_eot_status() argument
842 u32 status = __raw_readl(DMA_CSR(chan)); iop_adma_device_clear_eot_status()
844 __raw_writel(status, DMA_CSR(chan)); iop_adma_device_clear_eot_status()
847 static inline void iop_adma_device_clear_eoc_status(struct iop_adma_chan *chan) iop_adma_device_clear_eoc_status() argument
849 u32 status = __raw_readl(DMA_CSR(chan)); iop_adma_device_clear_eoc_status()
851 __raw_writel(status, DMA_CSR(chan)); iop_adma_device_clear_eoc_status()
854 static inline void iop_adma_device_clear_err_status(struct iop_adma_chan *chan) iop_adma_device_clear_err_status() argument
856 u32 status = __raw_readl(DMA_CSR(chan)); iop_adma_device_clear_err_status()
858 switch (chan->device->id) { iop_adma_device_clear_err_status()
870 __raw_writel(status, DMA_CSR(chan)); iop_adma_device_clear_err_status()
874 iop_is_err_int_parity(unsigned long status, struct iop_adma_chan *chan) iop_is_err_int_parity() argument
880 iop_is_err_mcu_abort(unsigned long status, struct iop_adma_chan *chan) iop_is_err_mcu_abort() argument
886 iop_is_err_int_tabort(unsigned long status, struct iop_adma_chan *chan) iop_is_err_int_tabort() argument
892 iop_is_err_int_mabort(unsigned long status, struct iop_adma_chan *chan) iop_is_err_int_mabort() argument
898 iop_is_err_pci_tabort(unsigned long status, struct iop_adma_chan *chan) iop_is_err_pci_tabort() argument
900 switch (chan->device->id) { iop_is_err_pci_tabort()
910 iop_is_err_pci_mabort(unsigned long status, struct iop_adma_chan *chan) iop_is_err_pci_mabort() argument
912 switch (chan->device->id) { iop_is_err_pci_mabort()
922 iop_is_err_split_tx(unsigned long status, struct iop_adma_chan *chan) iop_is_err_split_tx() argument
924 switch (chan->device->id) { iop_is_err_split_tx()
396 iop_desc_get_byte_count(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan) iop_desc_get_byte_count() argument
429 iop_desc_get_src_addr(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan, int src_idx) iop_desc_get_src_addr() argument
646 iop_desc_set_byte_count(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan, u32 byte_count) iop_desc_set_byte_count() argument
666 iop_desc_init_interrupt(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan) iop_desc_init_interrupt() argument
713 iop_desc_set_dest_addr(struct iop_adma_desc_slot *desc, struct iop_adma_chan *chan, dma_addr_t addr) iop_desc_set_dest_addr() argument
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/sw/
H A Dnv50.c42 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); nv50_sw_mthd_dma_vblsem() local
43 struct nvkm_fifo_chan *fifo = (void *)nv_object(chan)->parent; nv50_sw_mthd_dma_vblsem()
53 chan->vblank.ctxdma = gpuobj->node->offset >> 4; nv50_sw_mthd_dma_vblsem()
64 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); nv50_sw_mthd_vblsem_offset() local
65 chan->vblank.offset = *(u32 *)args; nv50_sw_mthd_vblsem_offset()
73 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); nv50_sw_mthd_vblsem_value() local
74 chan->vblank.value = *(u32 *)args; nv50_sw_mthd_vblsem_value()
82 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); nv50_sw_mthd_vblsem_release() local
84 if (head >= nvkm_disp(chan)->vblank.index_nr) nv50_sw_mthd_vblsem_release()
87 nvkm_notify_get(&chan->vblank.notify[head]); nv50_sw_mthd_vblsem_release()
94 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); nv50_sw_mthd_flip() local
95 if (chan->base.flip) nv50_sw_mthd_flip()
96 return chan->base.flip(chan->base.flip_data); nv50_sw_mthd_flip()
123 struct nv50_sw_chan *chan = nv50_sw_vblsem_release() local
124 container_of(notify, typeof(*chan), vblank.notify[notify->index]); nv50_sw_vblsem_release()
125 struct nv50_sw_priv *priv = (void *)nv_object(chan)->engine; nv50_sw_vblsem_release()
128 nv_wr32(priv, 0x001704, chan->vblank.channel); nv50_sw_vblsem_release()
129 nv_wr32(priv, 0x001710, 0x80000000 | chan->vblank.ctxdma); nv50_sw_vblsem_release()
133 nv_wr32(priv, 0x001570, chan->vblank.offset); nv50_sw_vblsem_release()
134 nv_wr32(priv, 0x001574, chan->vblank.value); nv50_sw_vblsem_release()
136 nv_wr32(priv, 0x060010, chan->vblank.offset); nv50_sw_vblsem_release()
137 nv_wr32(priv, 0x060014, chan->vblank.value); nv50_sw_vblsem_release()
146 struct nv50_sw_chan *chan = (void *)object; nv50_sw_context_dtor() local
149 for (i = 0; i < ARRAY_SIZE(chan->vblank.notify); i++) nv50_sw_context_dtor()
150 nvkm_notify_fini(&chan->vblank.notify[i]); nv50_sw_context_dtor()
152 nvkm_sw_context_destroy(&chan->base); nv50_sw_context_dtor()
162 struct nv50_sw_chan *chan; nv50_sw_context_ctor() local
165 ret = nvkm_sw_context_create(parent, engine, oclass, &chan); nv50_sw_context_ctor()
166 *pobject = nv_object(chan); nv50_sw_context_ctor()
178 &chan->vblank.notify[i]); nv50_sw_context_ctor()
183 chan->vblank.channel = nv_gpuobj(parent->parent)->addr >> 12; nv50_sw_context_ctor()
H A Dgf100.c36 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); gf100_sw_mthd_vblsem_offset() local
39 chan->vblank.offset &= 0x00ffffffffULL; gf100_sw_mthd_vblsem_offset()
40 chan->vblank.offset |= data << 32; gf100_sw_mthd_vblsem_offset()
42 chan->vblank.offset &= 0xff00000000ULL; gf100_sw_mthd_vblsem_offset()
43 chan->vblank.offset |= data; gf100_sw_mthd_vblsem_offset()
52 struct nv50_sw_chan *chan = (void *)nv_engctx(object->parent); gf100_sw_mthd_mp_control() local
53 struct nv50_sw_priv *priv = (void *)nv_object(chan)->engine; gf100_sw_mthd_mp_control()
100 struct nv50_sw_chan *chan = gf100_sw_vblsem_release() local
101 container_of(notify, typeof(*chan), vblank.notify[notify->index]); gf100_sw_vblsem_release()
102 struct nv50_sw_priv *priv = (void *)nv_object(chan)->engine; gf100_sw_vblsem_release()
105 nv_wr32(priv, 0x001718, 0x80000000 | chan->vblank.channel); gf100_sw_vblsem_release()
107 nv_wr32(priv, 0x06000c, upper_32_bits(chan->vblank.offset)); gf100_sw_vblsem_release()
108 nv_wr32(priv, 0x060010, lower_32_bits(chan->vblank.offset)); gf100_sw_vblsem_release()
109 nv_wr32(priv, 0x060014, chan->vblank.value); gf100_sw_vblsem_release()
H A Dnv04.c51 struct nv04_sw_chan *chan = (void *)nv_engctx(object->parent); nv04_sw_flip() local
52 if (chan->base.flip) nv04_sw_flip()
53 return chan->base.flip(chan->base.flip_data); nv04_sw_flip()
79 struct nv04_sw_chan *chan; nv04_sw_context_ctor() local
82 ret = nvkm_sw_context_create(parent, engine, oclass, &chan); nv04_sw_context_ctor()
83 *pobject = nv_object(chan); nv04_sw_context_ctor()
H A Dnv10.c41 struct nv10_sw_chan *chan = (void *)nv_engctx(object->parent); nv10_sw_flip() local
42 if (chan->base.flip) nv10_sw_flip()
43 return chan->base.flip(chan->base.flip_data); nv10_sw_flip()
68 struct nv10_sw_chan *chan; nv10_sw_context_ctor() local
71 ret = nvkm_sw_context_create(parent, engine, oclass, &chan); nv10_sw_context_ctor()
72 *pobject = nv_object(chan); nv10_sw_context_ctor()
/linux-4.1.27/arch/m68k/include/asm/
H A Dmac_asc.h17 #define ASC_FREQ(chan,byte) ((0x810)+((chan)<<3)+(byte))
/linux-4.1.27/arch/powerpc/platforms/pasemi/
H A Ddma_lib.c141 static void pasemi_free_tx_chan(int chan) pasemi_free_tx_chan() argument
143 BUG_ON(test_bit(chan, txch_free)); pasemi_free_tx_chan()
144 set_bit(chan, txch_free); pasemi_free_tx_chan()
160 static void pasemi_free_rx_chan(int chan) pasemi_free_rx_chan() argument
162 BUG_ON(test_bit(chan, rxch_free)); pasemi_free_rx_chan()
163 set_bit(chan, rxch_free); pasemi_free_rx_chan()
185 struct pasemi_dmachan *chan; pasemi_dma_alloc_chan() local
194 chan = buf + offset; pasemi_dma_alloc_chan()
196 chan->priv = buf; pasemi_dma_alloc_chan()
201 chan->chno = chno; pasemi_dma_alloc_chan()
202 chan->irq = irq_create_mapping(NULL, pasemi_dma_alloc_chan()
204 chan->status = &dma_status->rx_sta[chno]; pasemi_dma_alloc_chan()
208 chan->chno = chno; pasemi_dma_alloc_chan()
209 chan->irq = irq_create_mapping(NULL, base_hw_irq + chno); pasemi_dma_alloc_chan()
210 chan->status = &dma_status->tx_sta[chno]; pasemi_dma_alloc_chan()
214 chan->chan_type = type; pasemi_dma_alloc_chan()
216 return chan; pasemi_dma_alloc_chan()
221 * @chan: Channel to free
226 void pasemi_dma_free_chan(struct pasemi_dmachan *chan) pasemi_dma_free_chan() argument
228 if (chan->ring_virt) pasemi_dma_free_chan()
229 pasemi_dma_free_ring(chan); pasemi_dma_free_chan()
231 switch (chan->chan_type & (RXCHAN|TXCHAN)) { pasemi_dma_free_chan()
233 pasemi_free_rx_chan(chan->chno); pasemi_dma_free_chan()
236 pasemi_free_tx_chan(chan->chno); pasemi_dma_free_chan()
240 kfree(chan->priv); pasemi_dma_free_chan()
245 * @chan: Channel for which to allocate
252 int pasemi_dma_alloc_ring(struct pasemi_dmachan *chan, int ring_size) pasemi_dma_alloc_ring() argument
254 BUG_ON(chan->ring_virt); pasemi_dma_alloc_ring()
256 chan->ring_size = ring_size; pasemi_dma_alloc_ring()
258 chan->ring_virt = dma_alloc_coherent(&dma_pdev->dev, pasemi_dma_alloc_ring()
260 &chan->ring_dma, GFP_KERNEL); pasemi_dma_alloc_ring()
262 if (!chan->ring_virt) pasemi_dma_alloc_ring()
265 memset(chan->ring_virt, 0, ring_size * sizeof(u64)); pasemi_dma_alloc_ring()
272 * @chan: Channel for which to free the descriptor ring
276 void pasemi_dma_free_ring(struct pasemi_dmachan *chan) pasemi_dma_free_ring() argument
278 BUG_ON(!chan->ring_virt); pasemi_dma_free_ring()
280 dma_free_coherent(&dma_pdev->dev, chan->ring_size * sizeof(u64), pasemi_dma_free_ring()
281 chan->ring_virt, chan->ring_dma); pasemi_dma_free_ring()
282 chan->ring_virt = NULL; pasemi_dma_free_ring()
283 chan->ring_size = 0; pasemi_dma_free_ring()
284 chan->ring_dma = 0; pasemi_dma_free_ring()
289 * @chan: Channel to start
294 void pasemi_dma_start_chan(const struct pasemi_dmachan *chan, const u32 cmdsta) pasemi_dma_start_chan() argument
296 if (chan->chan_type == RXCHAN) pasemi_dma_start_chan()
297 pasemi_write_dma_reg(PAS_DMA_RXCHAN_CCMDSTA(chan->chno), pasemi_dma_start_chan()
300 pasemi_write_dma_reg(PAS_DMA_TXCHAN_TCMDSTA(chan->chno), pasemi_dma_start_chan()
306 * @chan: Channel to stop
318 int pasemi_dma_stop_chan(const struct pasemi_dmachan *chan) pasemi_dma_stop_chan() argument
323 if (chan->chan_type == RXCHAN) { pasemi_dma_stop_chan()
324 reg = PAS_DMA_RXCHAN_CCMDSTA(chan->chno); pasemi_dma_stop_chan()
335 reg = PAS_DMA_TXCHAN_TCMDSTA(chan->chno); pasemi_dma_stop_chan()
352 * @chan: Channel to allocate for
361 void *pasemi_dma_alloc_buf(struct pasemi_dmachan *chan, int size, pasemi_dma_alloc_buf() argument
369 * @chan: Channel the buffer was allocated for
375 void pasemi_dma_free_buf(struct pasemi_dmachan *chan, int size, pasemi_dma_free_buf() argument
/linux-4.1.27/drivers/isdn/act2000/
H A Dcapi.c154 actcapi_connect_req(act2000_card *card, act2000_chan *chan, char *phone, actcapi_connect_req() argument
163 chan->fsm_state = ACT2000_STATE_NULL; actcapi_connect_req()
175 chan->callref = m->hdr.msgnum; actcapi_connect_req()
181 actcapi_connect_b3_req(act2000_card *card, act2000_chan *chan) actcapi_connect_b3_req() argument
188 m->msg.connect_b3_req.plci = chan->plci; actcapi_connect_b3_req()
302 actcapi_select_b2_protocol_req(act2000_card *card, act2000_chan *chan) actcapi_select_b2_protocol_req() argument
309 m->msg.select_b2_protocol_req.plci = chan->plci; actcapi_select_b2_protocol_req()
313 switch (chan->l2prot) { actcapi_select_b2_protocol_req()
337 actcapi_select_b3_protocol_req(act2000_card *card, act2000_chan *chan) actcapi_select_b3_protocol_req() argument
344 m->msg.select_b3_protocol_req.plci = chan->plci; actcapi_select_b3_protocol_req()
347 switch (chan->l3prot) { actcapi_select_b3_protocol_req()
358 actcapi_listen_b3_req(act2000_card *card, act2000_chan *chan) actcapi_listen_b3_req() argument
365 m->msg.listen_b3_req.plci = chan->plci; actcapi_listen_b3_req()
370 actcapi_disconnect_req(act2000_card *card, act2000_chan *chan) actcapi_disconnect_req() argument
377 m->msg.disconnect_req.plci = chan->plci; actcapi_disconnect_req()
383 actcapi_disconnect_b3_req(act2000_card *card, act2000_chan *chan) actcapi_disconnect_b3_req() argument
390 m->msg.disconnect_b3_req.ncci = chan->ncci; actcapi_disconnect_b3_req()
395 chan->fsm_state = ACT2000_STATE_BHWAIT; actcapi_disconnect_b3_req()
400 actcapi_connect_resp(act2000_card *card, act2000_chan *chan, __u8 cause) actcapi_connect_resp() argument
407 m->msg.connect_resp.plci = chan->plci; actcapi_connect_resp()
410 chan->fsm_state = ACT2000_STATE_NULL; actcapi_connect_resp()
411 chan->plci = 0x8000; actcapi_connect_resp()
413 chan->fsm_state = ACT2000_STATE_IWAIT; actcapi_connect_resp()
418 actcapi_connect_active_resp(act2000_card *card, act2000_chan *chan) actcapi_connect_active_resp() argument
425 m->msg.connect_resp.plci = chan->plci; actcapi_connect_active_resp()
426 if (chan->fsm_state == ACT2000_STATE_IWAIT) actcapi_connect_active_resp()
427 chan->fsm_state = ACT2000_STATE_IBWAIT; actcapi_connect_active_resp()
432 actcapi_connect_b3_resp(act2000_card *card, act2000_chan *chan, __u8 rejectcause) actcapi_connect_b3_resp() argument
439 m->msg.connect_b3_resp.ncci = chan->ncci; actcapi_connect_b3_resp()
446 chan->fsm_state = ACT2000_STATE_BWAIT; actcapi_connect_b3_resp()
452 actcapi_connect_b3_active_resp(act2000_card *card, act2000_chan *chan) actcapi_connect_b3_active_resp() argument
459 m->msg.connect_b3_active_resp.ncci = chan->ncci; actcapi_connect_b3_active_resp()
460 chan->fsm_state = ACT2000_STATE_ACTIVE; actcapi_connect_b3_active_resp()
465 actcapi_info_resp(act2000_card *card, act2000_chan *chan) actcapi_info_resp() argument
472 m->msg.info_resp.plci = chan->plci; actcapi_info_resp()
477 actcapi_disconnect_b3_resp(act2000_card *card, act2000_chan *chan) actcapi_disconnect_b3_resp() argument
484 m->msg.disconnect_b3_resp.ncci = chan->ncci; actcapi_disconnect_b3_resp()
485 chan->ncci = 0x8000; actcapi_disconnect_b3_resp()
486 chan->queued = 0; actcapi_disconnect_b3_resp()
491 actcapi_disconnect_resp(act2000_card *card, act2000_chan *chan) actcapi_disconnect_resp() argument
498 m->msg.disconnect_resp.plci = chan->plci; actcapi_disconnect_resp()
499 chan->plci = 0x8000; actcapi_disconnect_resp()
552 int chan; actcapi_data_b3_ind() local
556 chan = find_ncci(card, ncci); actcapi_data_b3_ind()
557 if (chan < 0) actcapi_data_b3_ind()
559 if (card->bch[chan].fsm_state != ACT2000_STATE_ACTIVE) actcapi_data_b3_ind()
561 if (card->bch[chan].plci != plci) actcapi_data_b3_ind()
565 card->interface.rcvcallb_skb(card->myid, chan, skb); actcapi_data_b3_ind()
588 handle_ack(act2000_card *card, act2000_chan *chan, __u8 blocknr) { handle_ack() argument
605 if ((((m->msg.data_b3_req.fakencci >> 8) & 0xff) == chan->ncci) && handle_ack()
609 chan->queued -= m->msg.data_b3_req.datalen; handle_ack()
613 if (chan->queued < 0) handle_ack()
614 chan->queued = 0; handle_ack()
636 int chan; actcapi_dispatch() local
654 chan = find_ncci(card, msg->msg.data_b3_conf.ncci); actcapi_dispatch()
655 if ((chan >= 0) && (card->bch[chan].fsm_state == ACT2000_STATE_ACTIVE)) { actcapi_dispatch()
659 len = handle_ack(card, &card->bch[chan], actcapi_dispatch()
664 cmd.arg = chan; actcapi_dispatch()
672 chan = find_dialing(card, msg->hdr.msgnum); actcapi_dispatch()
673 if (chan >= 0) { actcapi_dispatch()
675 card->bch[chan].fsm_state = ACT2000_STATE_NULL; actcapi_dispatch()
678 cmd.arg = chan; actcapi_dispatch()
681 card->bch[chan].fsm_state = ACT2000_STATE_OWAIT; actcapi_dispatch()
682 card->bch[chan].plci = msg->msg.connect_conf.plci; actcapi_dispatch()
688 chan = new_plci(card, msg->msg.connect_ind.plci); actcapi_dispatch()
689 if (chan < 0) { actcapi_dispatch()
694 card->bch[chan].fsm_state = ACT2000_STATE_ICALL; actcapi_dispatch()
697 cmd.arg = chan; actcapi_dispatch()
713 actcapi_connect_resp(card, &card->bch[chan], 0x15); /* Reject Call */ actcapi_dispatch()
718 chan = find_plci(card, msg->msg.connect_active_ind.plci); actcapi_dispatch()
719 if (chan >= 0) actcapi_dispatch()
720 switch (card->bch[chan].fsm_state) { actcapi_dispatch()
722 actcapi_connect_active_resp(card, &card->bch[chan]); actcapi_dispatch()
725 actcapi_connect_active_resp(card, &card->bch[chan]); actcapi_dispatch()
726 actcapi_select_b2_protocol_req(card, &card->bch[chan]); actcapi_dispatch()
732 chan = find_plci(card, msg->msg.connect_b3_ind.plci); actcapi_dispatch()
733 if ((chan >= 0) && (card->bch[chan].fsm_state == ACT2000_STATE_IBWAIT)) { actcapi_dispatch()
734 card->bch[chan].ncci = msg->msg.connect_b3_ind.ncci; actcapi_dispatch()
735 actcapi_connect_b3_resp(card, &card->bch[chan], 0); actcapi_dispatch()
744 chan = find_ncci(card, msg->msg.connect_b3_active_ind.ncci); actcapi_dispatch()
745 if ((chan >= 0) && (card->bch[chan].fsm_state == ACT2000_STATE_BWAIT)) { actcapi_dispatch()
746 actcapi_connect_b3_active_resp(card, &card->bch[chan]); actcapi_dispatch()
749 cmd.arg = chan; actcapi_dispatch()
755 chan = find_ncci(card, msg->msg.disconnect_b3_ind.ncci); actcapi_dispatch()
756 if (chan >= 0) { actcapi_dispatch()
757 ctmp = &card->bch[chan]; actcapi_dispatch()
764 cmd.arg = chan; actcapi_dispatch()
772 cmd.arg = chan; actcapi_dispatch()
780 chan = find_plci(card, msg->msg.disconnect_ind.plci); actcapi_dispatch()
781 if (chan >= 0) { actcapi_dispatch()
782 ctmp = &card->bch[chan]; actcapi_dispatch()
787 cmd.arg = chan; actcapi_dispatch()
797 chan = find_plci(card, msg->msg.select_b2_protocol_conf.plci); actcapi_dispatch()
798 if (chan >= 0) actcapi_dispatch()
799 switch (card->bch[chan].fsm_state) { actcapi_dispatch()
802 ctmp = &card->bch[chan]; actcapi_dispatch()
809 cmd.arg = chan; actcapi_dispatch()
817 chan = find_plci(card, msg->msg.select_b3_protocol_conf.plci); actcapi_dispatch()
818 if (chan >= 0) actcapi_dispatch()
819 switch (card->bch[chan].fsm_state) { actcapi_dispatch()
822 ctmp = &card->bch[chan]; actcapi_dispatch()
829 cmd.arg = chan; actcapi_dispatch()
836 chan = find_plci(card, msg->msg.listen_b3_conf.plci); actcapi_dispatch()
837 if (chan >= 0) actcapi_dispatch()
838 switch (card->bch[chan].fsm_state) { actcapi_dispatch()
840 ctmp = &card->bch[chan]; actcapi_dispatch()
847 cmd.arg = chan; actcapi_dispatch()
852 ctmp = &card->bch[chan]; actcapi_dispatch()
858 cmd.arg = chan; actcapi_dispatch()
864 cmd.arg = chan; actcapi_dispatch()
872 chan = find_plci(card, msg->msg.connect_b3_conf.plci); actcapi_dispatch()
873 if ((chan >= 0) && (card->bch[chan].fsm_state == ACT2000_STATE_OBWAIT)) { actcapi_dispatch()
874 ctmp = &card->bch[chan]; actcapi_dispatch()
879 cmd.arg = chan; actcapi_dispatch()
889 chan = find_ncci(card, msg->msg.disconnect_b3_conf.ncci); actcapi_dispatch()
890 if ((chan >= 0) && (card->bch[chan].fsm_state == ACT2000_STATE_BHWAIT)) actcapi_dispatch()
891 card->bch[chan].fsm_state = ACT2000_STATE_BHWAIT2; actcapi_dispatch()
895 chan = find_plci(card, msg->msg.info_ind.plci); actcapi_dispatch()
896 if (chan >= 0) actcapi_dispatch()
898 actcapi_info_resp(card, &card->bch[chan]); actcapi_dispatch()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/
H A Danx9805.c35 struct anx9805_i2c_port *chan = (void *)port; anx9805_train() local
36 struct nvkm_i2c_port *mast = (void *)nv_object(chan)->parent; anx9805_train()
41 nv_wri2cr(mast, chan->addr, 0xa0, link_bw); anx9805_train()
42 nv_wri2cr(mast, chan->addr, 0xa1, link_nr | (enh ? 0x80 : 0x00)); anx9805_train()
43 nv_wri2cr(mast, chan->addr, 0xa2, 0x01); anx9805_train()
44 nv_wri2cr(mast, chan->addr, 0xa8, 0x01); anx9805_train()
47 while ((tmp = nv_rdi2cr(mast, chan->addr, 0xa8)) & 0x01) { anx9805_train()
67 struct anx9805_i2c_port *chan = (void *)port; anx9805_aux() local
68 struct nvkm_i2c_port *mast = (void *)nv_object(chan)->parent; anx9805_aux()
75 tmp = nv_rdi2cr(mast, chan->ctrl, 0x07) & ~0x04; anx9805_aux()
76 nv_wri2cr(mast, chan->ctrl, 0x07, tmp | 0x04); anx9805_aux()
77 nv_wri2cr(mast, chan->ctrl, 0x07, tmp); anx9805_aux()
78 nv_wri2cr(mast, chan->ctrl, 0xf7, 0x01); anx9805_aux()
80 nv_wri2cr(mast, chan->addr, 0xe4, 0x80); anx9805_aux()
85 nv_wri2cr(mast, chan->addr, 0xf0 + i, buf[i]); anx9805_aux()
87 nv_wri2cr(mast, chan->addr, 0xe5, ((size - 1) << 4) | type); anx9805_aux()
88 nv_wri2cr(mast, chan->addr, 0xe6, (addr & 0x000ff) >> 0); anx9805_aux()
89 nv_wri2cr(mast, chan->addr, 0xe7, (addr & 0x0ff00) >> 8); anx9805_aux()
90 nv_wri2cr(mast, chan->addr, 0xe8, (addr & 0xf0000) >> 16); anx9805_aux()
91 nv_wri2cr(mast, chan->addr, 0xe9, 0x01); anx9805_aux()
94 while ((tmp = nv_rdi2cr(mast, chan->addr, 0xe9)) & 0x01) { anx9805_aux()
100 if ((tmp = nv_rdi2cr(mast, chan->ctrl, 0xf7)) & 0x01) { anx9805_aux()
107 buf[i] = nv_rdi2cr(mast, chan->addr, 0xf0 + i); anx9805_aux()
114 nv_wri2cr(mast, chan->ctrl, 0xf7, 0x01); anx9805_aux()
131 struct anx9805_i2c_port *chan; anx9805_aux_chan_ctor() local
136 &chan); anx9805_aux_chan_ctor()
137 *pobject = nv_object(chan); anx9805_aux_chan_ctor()
143 chan->addr = 0x38; anx9805_aux_chan_ctor()
144 chan->ctrl = 0x39; anx9805_aux_chan_ctor()
147 chan->addr = 0x3c; anx9805_aux_chan_ctor()
148 chan->ctrl = 0x3b; anx9805_aux_chan_ctor()
/linux-4.1.27/arch/mips/alchemy/common/
H A Ddma.c104 struct dma_chan *chan; au1000_dma_read_proc() local
107 chan = get_dma_chan(i); au1000_dma_read_proc()
108 if (chan != NULL) au1000_dma_read_proc()
110 i, chan->dev_str); au1000_dma_read_proc()
136 struct dma_chan *chan; dump_au1000_dma_channel() local
140 chan = &au1000_dma_table[dmanr]; dump_au1000_dma_channel()
144 __raw_readl(chan->io + DMA_MODE_SET)); dump_au1000_dma_channel()
146 __raw_readl(chan->io + DMA_PERIPHERAL_ADDR)); dump_au1000_dma_channel()
148 __raw_readl(chan->io + DMA_BUFFER0_START)); dump_au1000_dma_channel()
150 __raw_readl(chan->io + DMA_BUFFER1_START)); dump_au1000_dma_channel()
152 __raw_readl(chan->io + DMA_BUFFER0_COUNT)); dump_au1000_dma_channel()
154 __raw_readl(chan->io + DMA_BUFFER1_COUNT)); dump_au1000_dma_channel()
167 struct dma_chan *chan; request_au1000_dma() local
186 chan = &au1000_dma_table[i]; request_au1000_dma()
195 chan->irq_dev = irq_dev_id; request_au1000_dma()
196 ret = request_irq(chan->irq, irqhandler, irqflags, dev_str, request_au1000_dma()
197 chan->irq_dev); request_au1000_dma()
199 chan->irq_dev = NULL; request_au1000_dma()
203 chan->irq_dev = NULL; request_au1000_dma()
207 chan->io = (void __iomem *)(KSEG1ADDR(AU1000_DMA_PHYS_ADDR) + request_au1000_dma()
209 chan->dev_id = dev_id; request_au1000_dma()
210 chan->dev_str = dev_str; request_au1000_dma()
211 chan->fifo_addr = dev->fifo_addr; request_au1000_dma()
212 chan->mode = dev->dma_mode; request_au1000_dma()
223 struct dma_chan *chan = get_dma_chan(dmanr); free_au1000_dma() local
225 if (!chan) { free_au1000_dma()
231 if (chan->irq_dev) free_au1000_dma()
232 free_irq(chan->irq, chan->irq_dev); free_au1000_dma()
234 chan->irq_dev = NULL; free_au1000_dma()
235 chan->dev_id = -1; free_au1000_dma()
/linux-4.1.27/drivers/net/wan/
H A Dcosa.c162 struct channel_data *chan; member in struct:cosa_data
273 static void cosa_enable_rx(struct channel_data *chan);
274 static void cosa_disable_rx(struct channel_data *chan);
277 static int cosa_dma_able(struct channel_data *chan, char *buf, int data);
418 unregister_hdlc_device(cosa->chan[i].netdev); cosa_exit()
419 free_netdev(cosa->chan[i].netdev); cosa_exit()
422 kfree(cosa->chan); cosa_exit()
562 cosa->chan = kcalloc(cosa->nchannels, sizeof(struct channel_data), GFP_KERNEL); cosa_probe()
563 if (!cosa->chan) { cosa_probe()
569 struct channel_data *chan = &cosa->chan[i]; cosa_probe() local
571 chan->cosa = cosa; cosa_probe()
572 chan->num = i; cosa_probe()
573 sprintf(chan->name, "cosa%dc%d", chan->cosa->num, i); cosa_probe()
576 mutex_init(&chan->rlock); cosa_probe()
577 sema_init(&chan->wsem, 1); cosa_probe()
580 if (!(chan->netdev = alloc_hdlcdev(chan))) { cosa_probe()
581 pr_warn("%s: alloc_hdlcdev failed\n", chan->name); cosa_probe()
585 dev_to_hdlc(chan->netdev)->attach = cosa_net_attach; cosa_probe()
586 dev_to_hdlc(chan->netdev)->xmit = cosa_net_tx; cosa_probe()
587 chan->netdev->netdev_ops = &cosa_ops; cosa_probe()
588 chan->netdev->watchdog_timeo = TX_TIMEOUT; cosa_probe()
589 chan->netdev->base_addr = chan->cosa->datareg; cosa_probe()
590 chan->netdev->irq = chan->cosa->irq; cosa_probe()
591 chan->netdev->dma = chan->cosa->dma; cosa_probe()
592 if (register_hdlc_device(chan->netdev)) { cosa_probe()
593 netdev_warn(chan->netdev, cosa_probe()
595 free_netdev(chan->netdev); cosa_probe()
608 unregister_hdlc_device(cosa->chan[i].netdev); cosa_probe()
609 free_netdev(cosa->chan[i].netdev); cosa_probe()
611 kfree(cosa->chan); cosa_probe()
638 struct channel_data *chan = dev_to_chan(dev); cosa_net_open()
642 if (!(chan->cosa->firmware_status & COSA_FW_START)) { cosa_net_open()
644 chan->cosa->name, chan->cosa->firmware_status); cosa_net_open()
647 spin_lock_irqsave(&chan->cosa->lock, flags); cosa_net_open()
648 if (chan->usage != 0) { cosa_net_open()
650 chan->name, chan->usage); cosa_net_open()
651 spin_unlock_irqrestore(&chan->cosa->lock, flags); cosa_net_open()
654 chan->setup_rx = cosa_net_setup_rx; cosa_net_open()
655 chan->tx_done = cosa_net_tx_done; cosa_net_open()
656 chan->rx_done = cosa_net_rx_done; cosa_net_open()
657 chan->usage = -1; cosa_net_open()
658 chan->cosa->usage++; cosa_net_open()
659 spin_unlock_irqrestore(&chan->cosa->lock, flags); cosa_net_open()
663 spin_lock_irqsave(&chan->cosa->lock, flags); cosa_net_open()
664 chan->usage = 0; cosa_net_open()
665 chan->cosa->usage--; cosa_net_open()
666 spin_unlock_irqrestore(&chan->cosa->lock, flags); cosa_net_open()
671 cosa_enable_rx(chan); cosa_net_open()
678 struct channel_data *chan = dev_to_chan(dev); cosa_net_tx()
682 chan->tx_skb = skb; cosa_net_tx()
683 cosa_start_tx(chan, skb->data, skb->len); cosa_net_tx()
689 struct channel_data *chan = dev_to_chan(dev); cosa_net_timeout()
691 if (test_bit(RXBIT, &chan->cosa->rxtx)) { cosa_net_timeout()
692 chan->netdev->stats.rx_errors++; cosa_net_timeout()
693 chan->netdev->stats.rx_missed_errors++; cosa_net_timeout()
695 chan->netdev->stats.tx_errors++; cosa_net_timeout()
696 chan->netdev->stats.tx_aborted_errors++; cosa_net_timeout()
698 cosa_kick(chan->cosa); cosa_net_timeout()
699 if (chan->tx_skb) { cosa_net_timeout()
700 dev_kfree_skb(chan->tx_skb); cosa_net_timeout()
701 chan->tx_skb = NULL; cosa_net_timeout()
708 struct channel_data *chan = dev_to_chan(dev); cosa_net_close()
713 cosa_disable_rx(chan); cosa_net_close()
714 spin_lock_irqsave(&chan->cosa->lock, flags); cosa_net_close()
715 if (chan->rx_skb) { cosa_net_close()
716 kfree_skb(chan->rx_skb); cosa_net_close()
717 chan->rx_skb = NULL; cosa_net_close()
719 if (chan->tx_skb) { cosa_net_close()
720 kfree_skb(chan->tx_skb); cosa_net_close()
721 chan->tx_skb = NULL; cosa_net_close()
723 chan->usage = 0; cosa_net_close()
724 chan->cosa->usage--; cosa_net_close()
725 spin_unlock_irqrestore(&chan->cosa->lock, flags); cosa_net_close()
729 static char *cosa_net_setup_rx(struct channel_data *chan, int size) cosa_net_setup_rx()
735 kfree_skb(chan->rx_skb); cosa_net_setup_rx()
736 chan->rx_skb = dev_alloc_skb(size); cosa_net_setup_rx()
737 if (chan->rx_skb == NULL) { cosa_net_setup_rx()
738 pr_notice("%s: Memory squeeze, dropping packet\n", chan->name); cosa_net_setup_rx()
739 chan->netdev->stats.rx_dropped++; cosa_net_setup_rx()
742 chan->netdev->trans_start = jiffies; cosa_net_setup_rx()
743 return skb_put(chan->rx_skb, size);
746 static int cosa_net_rx_done(struct channel_data *chan) cosa_net_rx_done()
748 if (!chan->rx_skb) { cosa_net_rx_done()
749 pr_warn("%s: rx_done with empty skb!\n", chan->name); cosa_net_rx_done()
750 chan->netdev->stats.rx_errors++; cosa_net_rx_done()
751 chan->netdev->stats.rx_frame_errors++; cosa_net_rx_done()
754 chan->rx_skb->protocol = hdlc_type_trans(chan->rx_skb, chan->netdev); cosa_net_rx_done()
755 chan->rx_skb->dev = chan->netdev; cosa_net_rx_done()
756 skb_reset_mac_header(chan->rx_skb); cosa_net_rx_done()
757 chan->netdev->stats.rx_packets++; cosa_net_rx_done()
758 chan->netdev->stats.rx_bytes += chan->cosa->rxsize; cosa_net_rx_done()
759 netif_rx(chan->rx_skb); cosa_net_rx_done()
760 chan->rx_skb = NULL; cosa_net_rx_done()
765 static int cosa_net_tx_done(struct channel_data *chan, int size) cosa_net_tx_done()
767 if (!chan->tx_skb) { cosa_net_tx_done()
768 pr_warn("%s: tx_done with empty skb!\n", chan->name); cosa_net_tx_done()
769 chan->netdev->stats.tx_errors++; cosa_net_tx_done()
770 chan->netdev->stats.tx_aborted_errors++; cosa_net_tx_done()
773 dev_kfree_skb_irq(chan->tx_skb); cosa_net_tx_done()
774 chan->tx_skb = NULL; cosa_net_tx_done()
775 chan->netdev->stats.tx_packets++; cosa_net_tx_done()
776 chan->netdev->stats.tx_bytes += size; cosa_net_tx_done()
777 netif_wake_queue(chan->netdev); cosa_net_tx_done()
788 struct channel_data *chan = file->private_data; cosa_read()
789 struct cosa_data *cosa = chan->cosa; cosa_read()
797 if (mutex_lock_interruptible(&chan->rlock)) cosa_read()
800 chan->rxdata = kmalloc(COSA_MTU, GFP_DMA|GFP_KERNEL); cosa_read()
801 if (chan->rxdata == NULL) { cosa_read()
802 mutex_unlock(&chan->rlock); cosa_read()
806 chan->rx_status = 0; cosa_read()
807 cosa_enable_rx(chan); cosa_read()
809 add_wait_queue(&chan->rxwaitq, &wait); cosa_read()
810 while (!chan->rx_status) { cosa_read()
815 if (signal_pending(current) && chan->rx_status == 0) { cosa_read()
816 chan->rx_status = 1; cosa_read()
817 remove_wait_queue(&chan->rxwaitq, &wait); cosa_read()
820 mutex_unlock(&chan->rlock); cosa_read()
824 remove_wait_queue(&chan->rxwaitq, &wait); cosa_read()
826 kbuf = chan->rxdata; cosa_read()
827 count = chan->rxsize; cosa_read()
829 mutex_unlock(&chan->rlock); cosa_read()
839 static char *chrdev_setup_rx(struct channel_data *chan, int size) chrdev_setup_rx()
842 chan->rxsize = size; chrdev_setup_rx()
843 return chan->rxdata;
846 static int chrdev_rx_done(struct channel_data *chan) chrdev_rx_done()
848 if (chan->rx_status) { /* Reader has died */ chrdev_rx_done()
849 kfree(chan->rxdata); chrdev_rx_done()
850 up(&chan->wsem); chrdev_rx_done()
852 chan->rx_status = 1; chrdev_rx_done()
853 wake_up_interruptible(&chan->rxwaitq); chrdev_rx_done()
862 struct channel_data *chan = file->private_data; cosa_write()
863 struct cosa_data *cosa = chan->cosa; cosa_write()
872 if (down_interruptible(&chan->wsem)) cosa_write()
881 up(&chan->wsem); cosa_write()
885 up(&chan->wsem); cosa_write()
889 chan->tx_status=0; cosa_write()
890 cosa_start_tx(chan, kbuf, count); cosa_write()
893 add_wait_queue(&chan->txwaitq, &wait); cosa_write()
894 while (!chan->tx_status) { cosa_write()
899 if (signal_pending(current) && chan->tx_status == 0) { cosa_write()
900 chan->tx_status = 1; cosa_write()
901 remove_wait_queue(&chan->txwaitq, &wait); cosa_write()
903 chan->tx_status = 1; cosa_write()
905 up(&chan->wsem); cosa_write()
909 remove_wait_queue(&chan->txwaitq, &wait); cosa_write()
911 up(&chan->wsem); cosa_write()
917 static int chrdev_tx_done(struct channel_data *chan, int size) chrdev_tx_done()
919 if (chan->tx_status) { /* Writer was interrupted */ chrdev_tx_done()
920 kfree(chan->txbuf); chrdev_tx_done()
921 up(&chan->wsem); chrdev_tx_done()
923 chan->tx_status = 1; chrdev_tx_done()
924 wake_up_interruptible(&chan->txwaitq); chrdev_tx_done()
937 struct channel_data *chan; cosa_open()
955 chan = cosa->chan + n; cosa_open()
957 file->private_data = chan; cosa_open()
961 if (chan->usage < 0) { /* in netdev mode */ cosa_open()
967 chan->usage++; cosa_open()
969 chan->tx_done = chrdev_tx_done; cosa_open()
970 chan->setup_rx = chrdev_setup_rx; cosa_open()
971 chan->rx_done = chrdev_rx_done; cosa_open()
1192 struct channel_data *chan = dev_to_chan(dev); cosa_net_ioctl()
1193 rv = cosa_ioctl_common(chan->cosa, chan, cmd, cosa_net_ioctl()
1222 static void cosa_enable_rx(struct channel_data *chan) cosa_enable_rx()
1224 struct cosa_data *cosa = chan->cosa; cosa_enable_rx()
1226 if (!test_and_set_bit(chan->num, &cosa->rxbitmap)) cosa_disable_rx()
1230 static void cosa_disable_rx(struct channel_data *chan) cosa_disable_rx()
1232 struct cosa_data *cosa = chan->cosa; cosa_disable_rx()
1234 if (test_and_clear_bit(chan->num, &cosa->rxbitmap))
1244 static int cosa_start_tx(struct channel_data *chan, char *buf, int len) cosa_start_tx()
1246 struct cosa_data *cosa = chan->cosa; cosa_start_tx()
1252 chan->cosa->num, chan->num, len); cosa_start_tx()
1258 chan->txbuf = buf; cosa_start_tx()
1259 chan->txsize = len; cosa_start_tx()
1261 chan->txsize = COSA_MTU; cosa_start_tx()
1265 set_bit(chan->num, &cosa->txbitmap); cosa_start_tx()
1375 static int cosa_dma_able(struct channel_data *chan, char *buf, int len) cosa_dma_able()
1384 chan->name); cosa_dma_able()
1718 cosa->txsize = cosa->chan[cosa->txchan].txsize; tx_interrupt()
1719 if (cosa_dma_able(cosa->chan+cosa->txchan, tx_interrupt()
1720 cosa->chan[cosa->txchan].txbuf, cosa->txsize)) { tx_interrupt()
1721 cosa->txbuf = cosa->chan[cosa->txchan].txbuf; tx_interrupt()
1723 memcpy(cosa->bouncebuf, cosa->chan[cosa->txchan].txbuf, tx_interrupt()
1860 cosa->rxchan = cosa->chan + ((cosa->rxsize & 0xe000) >> 13); rx_interrupt()
1909 struct channel_data *chan = cosa->chan+cosa->txchan; eot_interrupt()
1910 if (chan->tx_done) eot_interrupt()
1911 if (chan->tx_done(chan, cosa->txsize)) eot_interrupt()
1912 clear_bit(chan->num, &cosa->txbitmap); eot_interrupt()
637 struct channel_data *chan = dev_to_chan(dev); cosa_net_open() local
677 struct channel_data *chan = dev_to_chan(dev); cosa_net_tx() local
688 struct channel_data *chan = dev_to_chan(dev); cosa_net_timeout() local
707 struct channel_data *chan = dev_to_chan(dev); cosa_net_close() local
728 cosa_net_setup_rx(struct channel_data *chan, int size) cosa_net_setup_rx() argument
745 cosa_net_rx_done(struct channel_data *chan) cosa_net_rx_done() argument
764 cosa_net_tx_done(struct channel_data *chan, int size) cosa_net_tx_done() argument
787 struct channel_data *chan = file->private_data; cosa_read() local
838 chrdev_setup_rx(struct channel_data *chan, int size) chrdev_setup_rx() argument
845 chrdev_rx_done(struct channel_data *chan) chrdev_rx_done() argument
861 struct channel_data *chan = file->private_data; cosa_write() local
916 chrdev_tx_done(struct channel_data *chan, int size) chrdev_tx_done() argument
936 struct channel_data *chan; cosa_open() local
1190 struct channel_data *chan = dev_to_chan(dev); cosa_net_ioctl() local
1219 cosa_enable_rx(struct channel_data *chan) cosa_enable_rx() argument
1227 cosa_disable_rx(struct channel_data *chan) cosa_disable_rx() argument
1241 cosa_start_tx(struct channel_data *chan, char *buf, int len) cosa_start_tx() argument
1372 cosa_dma_able(struct channel_data *chan, char *buf, int len) cosa_dma_able() argument
1903 struct channel_data *chan = cosa->chan+cosa->txchan; eot_interrupt() local
H A Dsealevel.c40 struct z8530_channel *chan; member in struct:slvl_device
92 err = z8530_sync_dma_open(d, slvl->chan); sealevel_open()
95 err = z8530_sync_open(d, slvl->chan); sealevel_open()
106 z8530_sync_dma_close(d, slvl->chan); sealevel_open()
109 z8530_sync_close(d, slvl->chan); sealevel_open()
115 slvl->chan->rx_function = sealevel_input; sealevel_open()
133 slvl->chan->rx_function = z8530_null_rx; sealevel_close()
140 z8530_sync_dma_close(d, slvl->chan); sealevel_close()
143 z8530_sync_close(d, slvl->chan); sealevel_close()
163 return z8530_queue_xmit(dev_to_chan(d)->chan, skb); sealevel_queue_xmit()
200 sv->chan->netdevice = dev; slvl_setup()
228 b->dev[0].chan = &b->board.chanA; slvl_init()
231 b->dev[1].chan = &b->board.chanB; slvl_init()
324 unregister_hdlc_device(b->dev[0].chan->netdevice); slvl_init()
325 free_netdev(b->dev[0].chan->netdevice); slvl_init()
346 struct net_device *d = b->dev[u].chan->netdevice; slvl_shutdown()
/linux-4.1.27/arch/mips/kernel/
H A Drtlx.c36 struct rtlx_channel *chan = &rtlx->channel[i]; dump_rtlx() local
39 chan->rt_state, chan->lx_state, chan->buffer_size); dump_rtlx()
42 chan->rt_read, chan->rt_write); dump_rtlx()
45 chan->lx_read, chan->lx_write); dump_rtlx()
47 pr_info(" rt_buffer <%s>\n", chan->rt_buffer); dump_rtlx()
48 pr_info(" lx_buffer <%s>\n", chan->lx_buffer); dump_rtlx()
92 struct rtlx_channel *chan; rtlx_open() local
163 chan = &rtlx->channel[index]; rtlx_open()
165 state = xchg(&chan->lx_state, RTLX_STATE_OPENED); rtlx_open()
192 struct rtlx_channel *chan; rtlx_read_poll() local
197 chan = &rtlx->channel[index]; rtlx_read_poll()
200 if (chan->lx_read == chan->lx_write) { rtlx_read_poll()
204 (chan->lx_read != chan->lx_write) || rtlx_read_poll()
215 return (chan->lx_write + chan->buffer_size - chan->lx_read) rtlx_read_poll()
216 % chan->buffer_size; rtlx_read_poll()
234 struct rtlx_channel *chan = &rtlx->channel[index]; rtlx_write_poll() local
236 return write_spacefree(chan->rt_read, chan->rt_write, rtlx_write_poll()
237 chan->buffer_size); rtlx_write_poll()
/linux-4.1.27/sound/drivers/opl3/
H A Dopl3_midi.c33 struct snd_midi_channel *chan);
65 struct snd_midi_channel *chan) snd_opl3_calc_volume()
70 volume = (vel * chan->gm_volume * chan->gm_expression) / (127*127); snd_opl3_calc_volume()
98 int note, struct snd_midi_channel *chan) snd_opl3_calc_pitch()
104 if (chan->midi_pitchbend) { snd_opl3_calc_pitch()
105 int pitchbend = chan->midi_pitchbend; snd_opl3_calc_pitch()
143 struct snd_midi_channel *chan) { opl3_get_voice()
255 vp->chan); snd_opl3_timer_func()
294 void snd_opl3_note_on(void *p, int note, int vel, struct snd_midi_channel *chan) snd_opl3_note_on() argument
326 chan->number, chan->midi_program, note, vel); snd_opl3_note_on()
332 if (chan->drum_channel) { snd_opl3_note_on()
337 bank = chan->gm_bank_select; snd_opl3_note_on()
338 prg = chan->midi_program; snd_opl3_note_on()
342 if (chan->number >= MAX_OPL3_VOICES) snd_opl3_note_on()
347 prg = chan->midi_program; snd_opl3_note_on()
353 snd_opl3_drum_switch(opl3, note, vel, 1, chan); snd_opl3_note_on()
386 voice = opl3_get_voice(opl3, instr_4op, chan); snd_opl3_note_on()
389 voice = snd_opl3_oss_map[chan->number]; snd_opl3_note_on()
459 snd_opl3_calc_volume(&vol_op[3], vel, chan); snd_opl3_note_on()
462 snd_opl3_calc_volume(&vol_op[2], vel, chan); snd_opl3_note_on()
465 snd_opl3_calc_volume(&vol_op[0], vel, chan); snd_opl3_note_on()
468 snd_opl3_calc_volume(&vol_op[1], vel, chan); snd_opl3_note_on()
471 snd_opl3_calc_volume(&vol_op[1], vel, chan); snd_opl3_note_on()
473 snd_opl3_calc_volume(&vol_op[0], vel, chan); snd_opl3_note_on()
513 if (chan->gm_pan < 43) snd_opl3_note_on()
515 if (chan->gm_pan > 85) snd_opl3_note_on()
525 if (chan->gm_pan < 43) snd_opl3_note_on()
527 if (chan->gm_pan > 85) snd_opl3_note_on()
547 snd_opl3_calc_pitch(&fnum, &blocknum, note, chan); snd_opl3_note_on()
580 vp->chan = chan; snd_opl3_note_on()
588 vp2->chan = chan; snd_opl3_note_on()
672 struct snd_midi_channel *chan) snd_opl3_note_off_unsafe()
683 chan->number, chan->midi_program, note); snd_opl3_note_off_unsafe()
687 if (chan->drum_channel && use_internal_drums) { snd_opl3_note_off_unsafe()
688 snd_opl3_drum_switch(opl3, note, vel, 0, chan); snd_opl3_note_off_unsafe()
695 if (vp->state > 0 && vp->chan == chan && vp->note == note) { snd_opl3_note_off_unsafe()
701 if (chan->number < MAX_OPL3_VOICES) { snd_opl3_note_off_unsafe()
702 voice = snd_opl3_oss_map[chan->number]; snd_opl3_note_off_unsafe()
709 struct snd_midi_channel *chan) snd_opl3_note_off()
715 snd_opl3_note_off_unsafe(p, note, vel, chan); snd_opl3_note_off()
722 void snd_opl3_key_press(void *p, int note, int vel, struct snd_midi_channel *chan) snd_opl3_key_press() argument
729 chan->number, chan->midi_program); snd_opl3_key_press()
736 void snd_opl3_terminate_note(void *p, int note, struct snd_midi_channel *chan) snd_opl3_terminate_note() argument
743 chan->number, chan->midi_program); snd_opl3_terminate_note()
761 if (vp->chan == NULL) snd_opl3_update_pitch()
774 snd_opl3_calc_pitch(&fnum, &blocknum, vp->note, vp->chan); snd_opl3_update_pitch()
795 static void snd_opl3_pitch_ctrl(struct snd_opl3 *opl3, struct snd_midi_channel *chan) snd_opl3_pitch_ctrl() argument
807 if (vp->state > 0 && vp->chan == chan) { snd_opl3_pitch_ctrl()
813 if (chan->number < MAX_OPL3_VOICES) { snd_opl3_pitch_ctrl()
814 voice = snd_opl3_oss_map[chan->number]; snd_opl3_pitch_ctrl()
825 void snd_opl3_control(void *p, int type, struct snd_midi_channel *chan) snd_opl3_control() argument
832 type, chan->number, chan->midi_program); snd_opl3_control()
837 if (chan->control[MIDI_CTL_MSB_MODWHEEL] > 63) snd_opl3_control()
845 if (chan->control[MIDI_CTL_E2_TREMOLO_DEPTH] > 63) snd_opl3_control()
853 snd_opl3_pitch_ctrl(opl3, chan); snd_opl3_control()
861 void snd_opl3_nrpn(void *p, struct snd_midi_channel *chan, snd_opl3_nrpn() argument
869 chan->number, chan->midi_program); snd_opl3_nrpn()
64 snd_opl3_calc_volume(unsigned char *volbyte, int vel, struct snd_midi_channel *chan) snd_opl3_calc_volume() argument
97 snd_opl3_calc_pitch(unsigned char *fnum, unsigned char *blocknum, int note, struct snd_midi_channel *chan) snd_opl3_calc_pitch() argument
142 opl3_get_voice(struct snd_opl3 *opl3, int instr_4op, struct snd_midi_channel *chan) opl3_get_voice() argument
671 snd_opl3_note_off_unsafe(void *p, int note, int vel, struct snd_midi_channel *chan) snd_opl3_note_off_unsafe() argument
708 snd_opl3_note_off(void *p, int note, int vel, struct snd_midi_channel *chan) snd_opl3_note_off() argument
H A Dopl3_voice.h31 void snd_opl3_note_on(void *p, int note, int vel, struct snd_midi_channel *chan);
32 void snd_opl3_note_off(void *p, int note, int vel, struct snd_midi_channel *chan);
33 void snd_opl3_key_press(void *p, int note, int vel, struct snd_midi_channel *chan);
34 void snd_opl3_terminate_note(void *p, int note, struct snd_midi_channel *chan);
35 void snd_opl3_control(void *p, int type, struct snd_midi_channel *chan);
36 void snd_opl3_nrpn(void *p, struct snd_midi_channel *chan, struct snd_midi_channel_set *chset);
39 void snd_opl3_calc_volume(unsigned char *reg, int vel, struct snd_midi_channel *chan);
44 void snd_opl3_drum_switch(struct snd_opl3 *opl3, int note, int on_off, int vel, struct snd_midi_channel *chan);
/linux-4.1.27/drivers/staging/comedi/drivers/
H A Dke_counter.c54 unsigned int chan = CR_CHAN(insn->chanspec); ke_counter_insn_write() local
62 outb((val >> 24) & 0xff, dev->iobase + KE_SIGN_REG(chan)); ke_counter_insn_write()
63 outb((val >> 16) & 0xff, dev->iobase + KE_MSB_REG(chan)); ke_counter_insn_write()
64 outb((val >> 8) & 0xff, dev->iobase + KE_MID_REG(chan)); ke_counter_insn_write()
65 outb((val >> 0) & 0xff, dev->iobase + KE_LSB_REG(chan)); ke_counter_insn_write()
76 unsigned int chan = CR_CHAN(insn->chanspec); ke_counter_insn_read() local
82 inb(dev->iobase + KE_LATCH_REG(chan)); ke_counter_insn_read()
84 val = inb(dev->iobase + KE_LSB_REG(chan)); ke_counter_insn_read()
85 val |= (inb(dev->iobase + KE_MID_REG(chan)) << 8); ke_counter_insn_read()
86 val |= (inb(dev->iobase + KE_MSB_REG(chan)) << 16); ke_counter_insn_read()
87 val |= (inb(dev->iobase + KE_SIGN_REG(chan)) << 24); ke_counter_insn_read()
97 unsigned int chan; ke_counter_reset() local
99 for (chan = 0; chan < 3; chan++) ke_counter_reset()
100 outb(0, dev->iobase + KE_RESET_REG(chan)); ke_counter_reset()
H A Dfl512.c64 unsigned int chan = CR_CHAN(insn->chanspec); fl512_ai_insn_read() local
68 outb(chan, dev->iobase + FL512_AI_MUX_REG); fl512_ai_insn_read()
91 unsigned int chan = CR_CHAN(insn->chanspec); fl512_ao_insn_write() local
92 unsigned int val = s->readback[chan]; fl512_ao_insn_write()
99 outb(val & 0x0ff, dev->iobase + FL512_AO_DATA_REG(chan)); fl512_ao_insn_write()
100 outb((val >> 8) & 0xf, dev->iobase + FL512_AO_DATA_REG(chan)); fl512_ao_insn_write()
101 inb(dev->iobase + FL512_AO_TRIG_REG(chan)); fl512_ao_insn_write()
103 s->readback[chan] = val; fl512_ao_insn_write()
H A Dserial2002.c346 int j, chan; serial2002_setup_subdevice() local
348 for (chan = 0, j = 0; j < 32; j++) { serial2002_setup_subdevice()
350 chan++; serial2002_setup_subdevice()
352 s->n_chan = chan; serial2002_setup_subdevice()
372 for (chan = 0, j = 0; j < 32; j++) { serial2002_setup_subdevice()
375 mapping[chan] = j; serial2002_setup_subdevice()
380 range_table_list[chan] = serial2002_setup_subdevice()
383 maxdata_list[chan] = ((long long)1 << cfg[j].bits) - 1; serial2002_setup_subdevice()
384 chan++; serial2002_setup_subdevice()
583 int chan; serial2002_di_insn_read() local
585 chan = devpriv->digital_in_mapping[CR_CHAN(insn->chanspec)]; serial2002_di_insn_read()
589 serial2002_poll_digital(devpriv->tty, chan); serial2002_di_insn_read()
592 if (read.kind != is_digital || read.index == chan) serial2002_di_insn_read()
607 int chan; serial2002_do_insn_write() local
609 chan = devpriv->digital_out_mapping[CR_CHAN(insn->chanspec)]; serial2002_do_insn_write()
614 write.index = chan; serial2002_do_insn_write()
628 int chan; serial2002_ai_insn_read() local
630 chan = devpriv->analog_in_mapping[CR_CHAN(insn->chanspec)]; serial2002_ai_insn_read()
634 serial2002_poll_channel(devpriv->tty, chan); serial2002_ai_insn_read()
637 if (read.kind != is_channel || read.index == chan) serial2002_ai_insn_read()
652 int chan; serial2002_ao_insn_write() local
654 chan = devpriv->analog_out_mapping[CR_CHAN(insn->chanspec)]; serial2002_ao_insn_write()
659 write.index = chan; serial2002_ao_insn_write()
662 devpriv->ao_readback[chan] = data[n]; serial2002_ao_insn_write()
674 int chan = CR_CHAN(insn->chanspec); serial2002_ao_insn_read() local
677 data[n] = devpriv->ao_readback[chan]; serial2002_ao_insn_read()
689 int chan; serial2002_encoder_insn_read() local
691 chan = devpriv->encoder_in_mapping[CR_CHAN(insn->chanspec)]; serial2002_encoder_insn_read()
695 serial2002_poll_channel(devpriv->tty, chan); serial2002_encoder_insn_read()
698 if (read.kind != is_channel || read.index == chan) serial2002_encoder_insn_read()
/linux-4.1.27/arch/arm/plat-pxa/
H A Ddma.c54 int chan = (int)s->private; dbg_show_requester_chan() local
58 seq_printf(s, "DMA channel %d requesters list :\n", chan); dbg_show_requester_chan()
61 if ((drcmr & DRCMR_CHLNUM) == chan) dbg_show_requester_chan()
86 int chan = (int)s->private; dbg_show_descriptors() local
93 spin_lock_irqsave(&dma_channels[chan].lock, flags); dbg_show_descriptors()
94 phys_desc = DDADR(chan); dbg_show_descriptors()
96 seq_printf(s, "DMA channel %d descriptors :\n", chan); dbg_show_descriptors()
126 spin_unlock_irqrestore(&dma_channels[chan].lock, flags); dbg_show_descriptors()
133 int chan = (int)s->private; dbg_show_chan_state() local
138 dcsr = DCSR(chan); dbg_show_chan_state()
139 dcmd = DCMD(chan); dbg_show_chan_state()
143 seq_printf(s, "DMA channel %d\n", chan); dbg_show_chan_state()
144 seq_printf(s, "\tPriority : %s\n", str_prio[dma_channels[chan].prio]); dbg_show_chan_state()
146 DALGN & (1 << chan) ? "yes" : "no"); dbg_show_chan_state()
162 seq_printf(s, "\tDSADR = %08x\n", DSADR(chan)); dbg_show_chan_state()
163 seq_printf(s, "\tDTADR = %08x\n", DTADR(chan)); dbg_show_chan_state()
164 seq_printf(s, "\tDDADR = %08x\n", DDADR(chan)); dbg_show_chan_state()
199 struct dentry *chan, *chan_state = NULL, *chan_descr = NULL; pxa_dma_dbg_alloc_chan() local
204 chan = debugfs_create_dir(chan_name, chandir); pxa_dma_dbg_alloc_chan()
207 if (chan) pxa_dma_dbg_alloc_chan()
208 chan_state = debugfs_create_file("state", 0400, chan, dt, pxa_dma_dbg_alloc_chan()
211 chan_descr = debugfs_create_file("descriptors", 0400, chan, dt, pxa_dma_dbg_alloc_chan()
214 chan_reqs = debugfs_create_file("requesters", 0400, chan, dt, pxa_dma_dbg_alloc_chan()
219 return chan; pxa_dma_dbg_alloc_chan()
222 debugfs_remove_recursive(chan); pxa_dma_dbg_alloc_chan()
/linux-4.1.27/drivers/iio/dac/
H A Dad5755.c25 #define AD5755_WRITE_REG_DATA(chan) (chan)
26 #define AD5755_WRITE_REG_GAIN(chan) (0x08 | (chan))
27 #define AD5755_WRITE_REG_OFFSET(chan) (0x10 | (chan))
28 #define AD5755_WRITE_REG_CTRL(chan) (0x1c | (chan))
30 #define AD5755_READ_REG_DATA(chan) (chan)
31 #define AD5755_READ_REG_CTRL(chan) (0x4 | (chan))
32 #define AD5755_READ_REG_GAIN(chan) (0x8 | (chan))
33 #define AD5755_READ_REG_OFFSET(chan) (0xc | (chan))
34 #define AD5755_READ_REG_CLEAR(chan) (0x10 | (chan))
35 #define AD5755_READ_REG_SLEW(chan) (0x14 | (chan))
240 struct iio_chan_spec const *chan, int *min, int *max) ad5755_get_min_max()
242 enum ad5755_mode mode = st->ctrl[chan->channel] & 7; ad5755_get_min_max()
248 struct iio_chan_spec const *chan) ad5755_get_offset()
252 ad5755_get_min_max(st, chan, &min, &max); ad5755_get_offset()
253 return (min * (1 << chan->scan_type.realbits)) / (max - min); ad5755_get_offset()
257 struct iio_chan_spec const *chan, long info, bool write, ad5755_chan_reg_info()
263 *reg = AD5755_WRITE_REG_DATA(chan->address); ad5755_chan_reg_info()
265 *reg = AD5755_READ_REG_DATA(chan->address); ad5755_chan_reg_info()
266 *shift = chan->scan_type.shift; ad5755_chan_reg_info()
271 *reg = AD5755_WRITE_REG_OFFSET(chan->address); ad5755_chan_reg_info()
273 *reg = AD5755_READ_REG_OFFSET(chan->address); ad5755_chan_reg_info()
279 *reg = AD5755_WRITE_REG_GAIN(chan->address); ad5755_chan_reg_info()
281 *reg = AD5755_READ_REG_GAIN(chan->address); ad5755_chan_reg_info()
293 const struct iio_chan_spec *chan, int *val, int *val2, long info) ad5755_read_raw()
302 ad5755_get_min_max(st, chan, &min, &max); ad5755_read_raw()
304 *val2 = chan->scan_type.realbits; ad5755_read_raw()
307 *val = ad5755_get_offset(st, chan); ad5755_read_raw()
310 ret = ad5755_chan_reg_info(st, chan, info, false, ad5755_read_raw()
328 const struct iio_chan_spec *chan, int val, int val2, long info) ad5755_write_raw()
334 ret = ad5755_chan_reg_info(st, chan, info, true, ad5755_write_raw()
349 const struct iio_chan_spec *chan, char *buf) ad5755_read_powerdown()
354 (bool)(st->pwr_down & (1 << chan->channel))); ad5755_read_powerdown()
358 struct iio_chan_spec const *chan, const char *buf, size_t len) ad5755_write_powerdown()
367 ret = ad5755_set_channel_pwr_down(indio_dev, chan->channel, pwr_down); ad5755_write_powerdown()
239 ad5755_get_min_max(struct ad5755_state *st, struct iio_chan_spec const *chan, int *min, int *max) ad5755_get_min_max() argument
247 ad5755_get_offset(struct ad5755_state *st, struct iio_chan_spec const *chan) ad5755_get_offset() argument
256 ad5755_chan_reg_info(struct ad5755_state *st, struct iio_chan_spec const *chan, long info, bool write, unsigned int *reg, unsigned int *shift, unsigned int *offset) ad5755_chan_reg_info() argument
292 ad5755_read_raw(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, int *val, int *val2, long info) ad5755_read_raw() argument
327 ad5755_write_raw(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, int val, int val2, long info) ad5755_write_raw() argument
348 ad5755_read_powerdown(struct iio_dev *indio_dev, uintptr_t priv, const struct iio_chan_spec *chan, char *buf) ad5755_read_powerdown() argument
357 ad5755_write_powerdown(struct iio_dev *indio_dev, uintptr_t priv, struct iio_chan_spec const *chan, const char *buf, size_t len) ad5755_write_powerdown() argument
H A Dad5686.c27 #define AD5686_ADDR_DAC(chan) (0x1 << (chan))
140 const struct iio_chan_spec *chan) ad5686_get_powerdown_mode()
144 return ((st->pwr_down_mode >> (chan->channel * 2)) & 0x3) - 1; ad5686_get_powerdown_mode()
148 const struct iio_chan_spec *chan, unsigned int mode) ad5686_set_powerdown_mode()
152 st->pwr_down_mode &= ~(0x3 << (chan->channel * 2)); ad5686_set_powerdown_mode()
153 st->pwr_down_mode |= ((mode + 1) << (chan->channel * 2)); ad5686_set_powerdown_mode()
166 uintptr_t private, const struct iio_chan_spec *chan, char *buf) ad5686_read_dac_powerdown()
171 (0x3 << (chan->channel * 2)))); ad5686_read_dac_powerdown()
175 uintptr_t private, const struct iio_chan_spec *chan, const char *buf, ad5686_write_dac_powerdown()
187 st->pwr_down_mask |= (0x3 << (chan->channel * 2)); ad5686_write_dac_powerdown()
189 st->pwr_down_mask &= ~(0x3 << (chan->channel * 2)); ad5686_write_dac_powerdown()
198 struct iio_chan_spec const *chan, ad5686_read_raw()
209 ret = ad5686_spi_read(st, chan->address); ad5686_read_raw()
217 *val2 = chan->scan_type.realbits; ad5686_read_raw()
224 struct iio_chan_spec const *chan, ad5686_write_raw()
234 if (val > (1 << chan->scan_type.realbits) || val < 0) ad5686_write_raw()
240 chan->address, ad5686_write_raw()
242 chan->scan_type.shift); ad5686_write_raw()
270 #define AD5868_CHANNEL(chan, bits, _shift) { \
274 .channel = chan, \
277 .address = AD5686_ADDR_DAC(chan), \
139 ad5686_get_powerdown_mode(struct iio_dev *indio_dev, const struct iio_chan_spec *chan) ad5686_get_powerdown_mode() argument
147 ad5686_set_powerdown_mode(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, unsigned int mode) ad5686_set_powerdown_mode() argument
165 ad5686_read_dac_powerdown(struct iio_dev *indio_dev, uintptr_t private, const struct iio_chan_spec *chan, char *buf) ad5686_read_dac_powerdown() argument
174 ad5686_write_dac_powerdown(struct iio_dev *indio_dev, uintptr_t private, const struct iio_chan_spec *chan, const char *buf, size_t len) ad5686_write_dac_powerdown() argument
197 ad5686_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long m) ad5686_read_raw() argument
223 ad5686_write_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int val, int val2, long mask) ad5686_write_raw() argument
H A Dad7303.c52 static int ad7303_write(struct ad7303_state *st, unsigned int chan, ad7303_write() argument
56 (chan << AD7303_CFG_ADDR_OFFSET) | ad7303_write()
63 uintptr_t private, const struct iio_chan_spec *chan, char *buf) ad7303_read_dac_powerdown()
68 AD7303_CFG_POWER_DOWN(chan->channel))); ad7303_read_dac_powerdown()
72 uintptr_t private, const struct iio_chan_spec *chan, const char *buf, ad7303_write_dac_powerdown()
86 st->config |= AD7303_CFG_POWER_DOWN(chan->channel); ad7303_write_dac_powerdown()
88 st->config &= ~AD7303_CFG_POWER_DOWN(chan->channel); ad7303_write_dac_powerdown()
92 ad7303_write(st, chan->channel, st->dac_cache[chan->channel]); ad7303_write_dac_powerdown()
99 struct iio_chan_spec const *chan) ad7303_get_vref()
113 struct iio_chan_spec const *chan, int *val, int *val2, long info) ad7303_read_raw()
120 *val = st->dac_cache[chan->channel]; ad7303_read_raw()
123 vref_uv = ad7303_get_vref(st, chan); ad7303_read_raw()
128 *val2 = chan->scan_type.realbits; ad7303_read_raw()
138 struct iio_chan_spec const *chan, int val, int val2, long mask) ad7303_write_raw()
145 if (val >= (1 << chan->scan_type.realbits) || val < 0) ad7303_write_raw()
149 ret = ad7303_write(st, chan->address, val); ad7303_write_raw()
151 st->dac_cache[chan->channel] = val; ad7303_write_raw()
177 #define AD7303_CHANNEL(chan) { \
181 .channel = (chan), \
184 .address = (chan), \
62 ad7303_read_dac_powerdown(struct iio_dev *indio_dev, uintptr_t private, const struct iio_chan_spec *chan, char *buf) ad7303_read_dac_powerdown() argument
71 ad7303_write_dac_powerdown(struct iio_dev *indio_dev, uintptr_t private, const struct iio_chan_spec *chan, const char *buf, size_t len) ad7303_write_dac_powerdown() argument
98 ad7303_get_vref(struct ad7303_state *st, struct iio_chan_spec const *chan) ad7303_get_vref() argument
112 ad7303_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long info) ad7303_read_raw() argument
137 ad7303_write_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int val, int val2, long mask) ad7303_write_raw() argument
H A Dmax517.c80 struct iio_chan_spec const *chan, max517_read_raw()
90 *val = data->vref_mv[chan->channel]; max517_read_raw()
100 struct iio_chan_spec const *chan, int val, int val2, long mask) max517_write_raw()
106 ret = max517_set_value(indio_dev, val, chan->channel); max517_write_raw()
143 #define MAX517_CHANNEL(chan) { \
147 .channel = (chan), \
169 int chan; max517_probe() local
204 for (chan = 0; chan < indio_dev->num_channels; chan++) { max517_probe()
206 data->vref_mv[chan] = 5000; /* mV */ max517_probe()
208 data->vref_mv[chan] = platform_data->vref_mv[chan]; max517_probe()
79 max517_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long m) max517_read_raw() argument
99 max517_write_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int val, int val2, long mask) max517_write_raw() argument
H A Dmax5821.c58 const struct iio_chan_spec *chan) max5821_get_powerdown_mode()
62 return st->powerdown_mode[chan->channel]; max5821_get_powerdown_mode()
66 const struct iio_chan_spec *chan, max5821_set_powerdown_mode()
71 st->powerdown_mode[chan->channel] = mode; max5821_set_powerdown_mode()
85 const struct iio_chan_spec *chan, max5821_read_dac_powerdown()
90 return sprintf(buf, "%d\n", st->powerdown[chan->channel]); max5821_read_dac_powerdown()
94 const struct iio_chan_spec *chan) max5821_sync_powerdown_mode()
100 if (chan->channel == 0) max5821_sync_powerdown_mode()
105 if (data->powerdown[chan->channel]) max5821_sync_powerdown_mode()
106 outbuf[1] |= data->powerdown_mode[chan->channel] + 1; max5821_sync_powerdown_mode()
115 const struct iio_chan_spec *chan, max5821_write_dac_powerdown()
126 data->powerdown[chan->channel] = powerdown; max5821_write_dac_powerdown()
128 ret = max5821_sync_powerdown_mode(data, chan); max5821_write_dac_powerdown()
147 #define MAX5821_CHANNEL(chan) { \
151 .channel = (chan), \
241 struct iio_chan_spec const *chan, max5821_read_raw()
248 return max5821_get_value(indio_dev, val, chan->channel); max5821_read_raw()
259 struct iio_chan_spec const *chan, max5821_write_raw()
267 return max5821_set_value(indio_dev, val, chan->channel); max5821_write_raw()
57 max5821_get_powerdown_mode(struct iio_dev *indio_dev, const struct iio_chan_spec *chan) max5821_get_powerdown_mode() argument
65 max5821_set_powerdown_mode(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, unsigned int mode) max5821_set_powerdown_mode() argument
83 max5821_read_dac_powerdown(struct iio_dev *indio_dev, uintptr_t private, const struct iio_chan_spec *chan, char *buf) max5821_read_dac_powerdown() argument
93 max5821_sync_powerdown_mode(struct max5821_data *data, const struct iio_chan_spec *chan) max5821_sync_powerdown_mode() argument
113 max5821_write_dac_powerdown(struct iio_dev *indio_dev, uintptr_t private, const struct iio_chan_spec *chan, const char *buf, size_t len) max5821_write_dac_powerdown() argument
240 max5821_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long mask) max5821_read_raw() argument
258 max5821_write_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int val, int val2, long mask) max5821_write_raw() argument
H A Dad5380.c83 uintptr_t private, const struct iio_chan_spec *chan, char *buf) ad5380_read_dac_powerdown()
91 uintptr_t private, const struct iio_chan_spec *chan, const char *buf, ad5380_write_dac_powerdown()
122 const struct iio_chan_spec *chan) ad5380_get_powerdown_mode()
138 const struct iio_chan_spec *chan, unsigned int mode) ad5380_set_powerdown_mode()
157 static unsigned int ad5380_info_to_reg(struct iio_chan_spec const *chan, ad5380_info_to_reg() argument
162 return AD5380_REG_DATA(chan->address); ad5380_info_to_reg()
164 return AD5380_REG_OFFSET(chan->address); ad5380_info_to_reg()
166 return AD5380_REG_GAIN(chan->address); ad5380_info_to_reg()
175 struct iio_chan_spec const *chan, int val, int val2, long info) ad5380_write_raw()
177 const unsigned int max_val = (1 << chan->scan_type.realbits); ad5380_write_raw()
187 ad5380_info_to_reg(chan, info), ad5380_write_raw()
188 val << chan->scan_type.shift); ad5380_write_raw()
190 val += (1 << chan->scan_type.realbits) / 2; ad5380_write_raw()
195 AD5380_REG_OFFSET(chan->address), ad5380_write_raw()
196 val << chan->scan_type.shift); ad5380_write_raw()
204 struct iio_chan_spec const *chan, int *val, int *val2, long info) ad5380_read_raw()
212 ret = regmap_read(st->regmap, ad5380_info_to_reg(chan, info), ad5380_read_raw()
216 *val >>= chan->scan_type.shift; ad5380_read_raw()
219 ret = regmap_read(st->regmap, AD5380_REG_OFFSET(chan->address), ad5380_read_raw()
223 *val >>= chan->scan_type.shift; ad5380_read_raw()
224 val -= (1 << chan->scan_type.realbits) / 2; ad5380_read_raw()
228 *val2 = chan->scan_type.realbits; ad5380_read_raw()
82 ad5380_read_dac_powerdown(struct iio_dev *indio_dev, uintptr_t private, const struct iio_chan_spec *chan, char *buf) ad5380_read_dac_powerdown() argument
90 ad5380_write_dac_powerdown(struct iio_dev *indio_dev, uintptr_t private, const struct iio_chan_spec *chan, const char *buf, size_t len) ad5380_write_dac_powerdown() argument
121 ad5380_get_powerdown_mode(struct iio_dev *indio_dev, const struct iio_chan_spec *chan) ad5380_get_powerdown_mode() argument
137 ad5380_set_powerdown_mode(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, unsigned int mode) ad5380_set_powerdown_mode() argument
174 ad5380_write_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int val, int val2, long info) ad5380_write_raw() argument
203 ad5380_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long info) ad5380_read_raw() argument
H A Dad5360.c28 #define AD5360_CHAN_ADDR(chan) ((chan) + 0x8)
312 struct iio_chan_spec const *chan, ad5360_write_raw()
318 int max_val = (1 << chan->scan_type.realbits); ad5360_write_raw()
327 chan->address, val, chan->scan_type.shift); ad5360_write_raw()
334 chan->address, val, chan->scan_type.shift); ad5360_write_raw()
341 chan->address, val, chan->scan_type.shift); ad5360_write_raw()
352 val >>= (chan->scan_type.realbits - 14); ad5360_write_raw()
357 ofs_index = ad5360_get_channel_vref_index(st, chan->channel); ad5360_write_raw()
368 struct iio_chan_spec const *chan, ad5360_read_raw()
381 chan->address); ad5360_read_raw()
384 *val = ret >> chan->scan_type.shift; ad5360_read_raw()
387 scale_uv = ad5360_get_channel_vref(st, chan->channel); ad5360_read_raw()
393 *val2 = chan->scan_type.realbits; ad5360_read_raw()
397 chan->address); ad5360_read_raw()
404 chan->address); ad5360_read_raw()
410 ofs_index = ad5360_get_channel_vref_index(st, chan->channel); ad5360_read_raw()
416 ret <<= (chan->scan_type.realbits - 14); ad5360_read_raw()
311 ad5360_write_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int val, int val2, long mask) ad5360_write_raw() argument
367 ad5360_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long m) ad5360_read_raw() argument
H A Dad5764.c168 static int ad5764_chan_info_to_reg(struct iio_chan_spec const *chan, long info) ad5764_chan_info_to_reg() argument
172 return AD5764_REG_DATA(chan->address); ad5764_chan_info_to_reg()
174 return AD5764_REG_OFFSET(chan->address); ad5764_chan_info_to_reg()
176 return AD5764_REG_FINE_GAIN(chan->address); ad5764_chan_info_to_reg()
185 struct iio_chan_spec const *chan, int val, int val2, long info) ad5764_write_raw()
187 const int max_val = (1 << chan->scan_type.realbits); ad5764_write_raw()
194 val <<= chan->scan_type.shift; ad5764_write_raw()
208 reg = ad5764_chan_info_to_reg(chan, info); ad5764_write_raw()
222 struct iio_chan_spec const *chan, int *val, int *val2, long info) ad5764_read_raw()
231 reg = AD5764_REG_DATA(chan->address); ad5764_read_raw()
235 *val >>= chan->scan_type.shift; ad5764_read_raw()
238 reg = AD5764_REG_OFFSET(chan->address); ad5764_read_raw()
245 reg = AD5764_REG_FINE_GAIN(chan->address); ad5764_read_raw()
253 vref = ad5764_get_channel_vref(st, chan->channel); ad5764_read_raw()
258 *val2 = chan->scan_type.realbits; ad5764_read_raw()
261 *val = -(1 << chan->scan_type.realbits) / 2; ad5764_read_raw()
184 ad5764_write_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int val, int val2, long info) ad5764_write_raw() argument
221 ad5764_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long info) ad5764_read_raw() argument
H A Dmcp4922.c45 #define MCP4922_CHAN(chan, bits) { \
49 .channel = chan, \
70 struct iio_chan_spec const *chan, mcp4922_read_raw()
79 *val = state->value[chan->channel]; mcp4922_read_raw()
83 *val2 = chan->scan_type.realbits; mcp4922_read_raw()
91 struct iio_chan_spec const *chan, mcp4922_write_raw()
103 if (val > GENMASK(chan->scan_type.realbits-1, 0)) mcp4922_write_raw()
105 val <<= chan->scan_type.shift; mcp4922_write_raw()
106 state->value[chan->channel] = val; mcp4922_write_raw()
107 return mcp4922_spi_write(state, chan->channel, val); mcp4922_write_raw()
69 mcp4922_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long mask) mcp4922_read_raw() argument
90 mcp4922_write_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int val, int val2, long mask) mcp4922_write_raw() argument
/linux-4.1.27/drivers/staging/comedi/
H A Drange.c61 int subd, chan; do_rangeinfo_ioctl() local
68 chan = (it.range_type >> 16) & 0xff; do_rangeinfo_ioctl()
78 if (chan >= s->n_chan) do_rangeinfo_ioctl()
80 lr = s->range_table_list[chan]; do_rangeinfo_ioctl()
111 int chan, range_len, i; comedi_check_chanlist() local
115 chan = CR_CHAN(chanspec); comedi_check_chanlist()
118 else if (s->range_table_list && chan < s->n_chan) comedi_check_chanlist()
119 range_len = s->range_table_list[chan]->length; comedi_check_chanlist()
122 if (chan >= s->n_chan || comedi_check_chanlist()
125 "bad chanlist[%d]=0x%08x chan=%d range length=%d\n", comedi_check_chanlist()
126 i, chanspec, chan, range_len); comedi_check_chanlist()
/linux-4.1.27/sound/soc/
H A Dsoc-generic-dmaengine-pcm.c28 struct dma_chan *chan[SNDRV_PCM_STREAM_LAST + 1]; member in struct:dmaengine_pcm
42 if (!pcm->chan[substream->stream]) dmaengine_dma_dev()
45 return pcm->chan[substream->stream]->device->dev; dmaengine_dma_dev()
86 struct dma_chan *chan = snd_dmaengine_pcm_get_chan(substream); dmaengine_pcm_hw_params() local
105 ret = dmaengine_slave_config(chan, &slave_config); dmaengine_pcm_hw_params()
118 struct dma_chan *chan = pcm->chan[substream->stream]; dmaengine_pcm_set_runtime_hwparams() local
146 ret = dma_get_slave_caps(chan, &dma_caps); dmaengine_pcm_set_runtime_hwparams()
193 struct dma_chan *chan = pcm->chan[substream->stream]; dmaengine_pcm_open() local
200 return snd_dmaengine_pcm_open(substream, chan); dmaengine_pcm_open()
213 if ((pcm->flags & SND_DMAENGINE_PCM_FLAG_HALF_DUPLEX) && pcm->chan[0]) dmaengine_pcm_compat_request_channel()
214 return pcm->chan[0]; dmaengine_pcm_compat_request_channel()
225 static bool dmaengine_pcm_can_report_residue(struct dma_chan *chan) dmaengine_pcm_can_report_residue() argument
230 ret = dma_get_slave_caps(chan, &dma_caps); dmaengine_pcm_can_report_residue()
268 if (!pcm->chan[i] && dmaengine_pcm_new()
270 pcm->chan[i] = dma_request_slave_channel(dev, dmaengine_pcm_new()
273 if (!pcm->chan[i] && (pcm->flags & SND_DMAENGINE_PCM_FLAG_COMPAT)) { dmaengine_pcm_new()
274 pcm->chan[i] = dmaengine_pcm_compat_request_channel(rtd, dmaengine_pcm_new()
278 if (!pcm->chan[i]) { dmaengine_pcm_new()
299 if (!dmaengine_pcm_can_report_residue(pcm->chan[i])) dmaengine_pcm_new()
346 struct dma_chan *chan; dmaengine_pcm_request_chan_of() local
373 chan = dma_request_slave_channel_reason(dev, name); dmaengine_pcm_request_chan_of()
374 if (IS_ERR(chan)) { dmaengine_pcm_request_chan_of()
375 if (PTR_ERR(chan) == -EPROBE_DEFER) dmaengine_pcm_request_chan_of()
377 pcm->chan[i] = NULL; dmaengine_pcm_request_chan_of()
379 pcm->chan[i] = chan; dmaengine_pcm_request_chan_of()
386 pcm->chan[1] = pcm->chan[0]; dmaengine_pcm_request_chan_of()
397 if (!pcm->chan[i]) dmaengine_pcm_release_chan()
399 dma_release_channel(pcm->chan[i]); dmaengine_pcm_release_chan()
/linux-4.1.27/include/net/bluetooth/
H A Dl2cap.h596 struct l2cap_chan *(*new_connection) (struct l2cap_chan *chan);
597 int (*recv) (struct l2cap_chan * chan,
599 void (*teardown) (struct l2cap_chan *chan, int err);
600 void (*close) (struct l2cap_chan *chan);
601 void (*state_change) (struct l2cap_chan *chan,
603 void (*ready) (struct l2cap_chan *chan);
604 void (*defer) (struct l2cap_chan *chan);
605 void (*resume) (struct l2cap_chan *chan);
606 void (*suspend) (struct l2cap_chan *chan);
607 void (*set_shutdown) (struct l2cap_chan *chan);
608 long (*get_sndtimeo) (struct l2cap_chan *chan);
609 struct sk_buff *(*alloc_skb) (struct l2cap_chan *chan,
669 struct l2cap_chan *chan; member in struct:l2cap_pinfo
791 static inline void l2cap_chan_lock(struct l2cap_chan *chan) l2cap_chan_lock() argument
793 mutex_lock_nested(&chan->lock, atomic_read(&chan->nesting)); l2cap_chan_lock()
796 static inline void l2cap_chan_unlock(struct l2cap_chan *chan) l2cap_chan_unlock() argument
798 mutex_unlock(&chan->lock); l2cap_chan_unlock()
801 static inline void l2cap_set_timer(struct l2cap_chan *chan, l2cap_set_timer() argument
804 BT_DBG("chan %p state %s timeout %ld", chan, l2cap_set_timer()
805 state_to_string(chan->state), timeout); l2cap_set_timer()
807 /* If delayed work cancelled do not hold(chan) l2cap_set_timer()
810 l2cap_chan_hold(chan); l2cap_set_timer()
815 static inline bool l2cap_clear_timer(struct l2cap_chan *chan, l2cap_clear_timer() argument
820 /* put(chan) if delayed work cancelled otherwise it l2cap_clear_timer()
824 l2cap_chan_put(chan); l2cap_clear_timer()
833 #define __set_ack_timer(c) l2cap_set_timer(c, &chan->ack_timer, \
837 static inline int __seq_offset(struct l2cap_chan *chan, __u16 seq1, __u16 seq2) __seq_offset() argument
842 return chan->tx_win_max + 1 - seq2 + seq1; __seq_offset()
845 static inline __u16 __next_seq(struct l2cap_chan *chan, __u16 seq) __next_seq() argument
847 return (seq + 1) % (chan->tx_win_max + 1); __next_seq()
850 static inline struct l2cap_chan *l2cap_chan_no_new_connection(struct l2cap_chan *chan) l2cap_chan_no_new_connection() argument
855 static inline int l2cap_chan_no_recv(struct l2cap_chan *chan, struct sk_buff *skb) l2cap_chan_no_recv() argument
860 static inline struct sk_buff *l2cap_chan_no_alloc_skb(struct l2cap_chan *chan, l2cap_chan_no_alloc_skb() argument
867 static inline void l2cap_chan_no_teardown(struct l2cap_chan *chan, int err) l2cap_chan_no_teardown() argument
871 static inline void l2cap_chan_no_close(struct l2cap_chan *chan) l2cap_chan_no_close() argument
875 static inline void l2cap_chan_no_ready(struct l2cap_chan *chan) l2cap_chan_no_ready() argument
879 static inline void l2cap_chan_no_state_change(struct l2cap_chan *chan, l2cap_chan_no_state_change() argument
884 static inline void l2cap_chan_no_defer(struct l2cap_chan *chan) l2cap_chan_no_defer() argument
888 static inline void l2cap_chan_no_suspend(struct l2cap_chan *chan) l2cap_chan_no_suspend() argument
892 static inline void l2cap_chan_no_resume(struct l2cap_chan *chan) l2cap_chan_no_resume() argument
896 static inline void l2cap_chan_no_set_shutdown(struct l2cap_chan *chan) l2cap_chan_no_set_shutdown() argument
900 static inline long l2cap_chan_no_get_sndtimeo(struct l2cap_chan *chan) l2cap_chan_no_get_sndtimeo() argument
911 void __l2cap_le_connect_rsp_defer(struct l2cap_chan *chan);
912 void __l2cap_connect_rsp_defer(struct l2cap_chan *chan);
914 int l2cap_add_psm(struct l2cap_chan *chan, bdaddr_t *src, __le16 psm);
915 int l2cap_add_scid(struct l2cap_chan *chan, __u16 scid);
918 void l2cap_chan_close(struct l2cap_chan *chan, int reason);
919 int l2cap_chan_connect(struct l2cap_chan *chan, __le16 psm, u16 cid,
921 int l2cap_chan_send(struct l2cap_chan *chan, struct msghdr *msg, size_t len);
922 void l2cap_chan_busy(struct l2cap_chan *chan, int busy);
923 int l2cap_chan_check_security(struct l2cap_chan *chan, bool initiator);
924 void l2cap_chan_set_defaults(struct l2cap_chan *chan);
925 int l2cap_ertm_init(struct l2cap_chan *chan);
926 void l2cap_chan_add(struct l2cap_conn *conn, struct l2cap_chan *chan);
927 void __l2cap_chan_add(struct l2cap_conn *conn, struct l2cap_chan *chan);
928 void l2cap_chan_del(struct l2cap_chan *chan, int err);
929 void l2cap_send_conn_req(struct l2cap_chan *chan);
930 void l2cap_move_start(struct l2cap_chan *chan);
931 void l2cap_logical_cfm(struct l2cap_chan *chan, struct hci_chan *hchan,
933 void __l2cap_physical_cfm(struct l2cap_chan *chan, int result);
/linux-4.1.27/drivers/net/wireless/ath/ath9k/
H A Dhw-ops.h45 struct ath9k_channel *chan, ath9k_hw_calibrate()
48 return ath9k_hw_ops(ah)->calibrate(ah, chan, rxchainmask, longcal); ath9k_hw_calibrate()
141 struct ath9k_channel *chan) ath9k_hw_rf_set_freq()
143 return ath9k_hw_private_ops(ah)->rf_set_freq(ah, chan); ath9k_hw_rf_set_freq()
147 struct ath9k_channel *chan) ath9k_hw_spur_mitigate_freq()
149 ath9k_hw_private_ops(ah)->spur_mitigate_freq(ah, chan); ath9k_hw_spur_mitigate_freq()
153 struct ath9k_channel *chan, ath9k_hw_set_rf_regs()
159 return ath9k_hw_private_ops(ah)->set_rf_regs(ah, chan, modesIndex); ath9k_hw_set_rf_regs()
163 struct ath9k_channel *chan) ath9k_hw_init_bb()
165 return ath9k_hw_private_ops(ah)->init_bb(ah, chan); ath9k_hw_init_bb()
169 struct ath9k_channel *chan) ath9k_hw_set_channel_regs()
171 return ath9k_hw_private_ops(ah)->set_channel_regs(ah, chan); ath9k_hw_set_channel_regs()
175 struct ath9k_channel *chan) ath9k_hw_process_ini()
177 return ath9k_hw_private_ops(ah)->process_ini(ah, chan); ath9k_hw_process_ini()
189 struct ath9k_channel *chan) ath9k_hw_set_rfmode()
191 return ath9k_hw_private_ops(ah)->set_rfmode(ah, chan); ath9k_hw_set_rfmode()
200 struct ath9k_channel *chan) ath9k_hw_set_delta_slope()
202 return ath9k_hw_private_ops(ah)->set_delta_slope(ah, chan); ath9k_hw_set_delta_slope()
236 struct ath9k_channel *chan) ath9k_hw_init_cal()
238 return ath9k_hw_private_ops(ah)->init_cal(ah, chan); ath9k_hw_init_cal()
248 struct ath9k_channel *chan, ath9k_hw_fast_chan_change()
251 return ath9k_hw_private_ops(ah)->fast_chan_change(ah, chan, ath9k_hw_fast_chan_change()
269 struct ath9k_channel *chan) ath9k_hw_compute_pll_control()
271 return ath9k_hw_private_ops(ah)->compute_pll_control(ah, chan); ath9k_hw_compute_pll_control()
44 ath9k_hw_calibrate(struct ath_hw *ah, struct ath9k_channel *chan, u8 rxchainmask, bool longcal) ath9k_hw_calibrate() argument
140 ath9k_hw_rf_set_freq(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_rf_set_freq() argument
146 ath9k_hw_spur_mitigate_freq(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_spur_mitigate_freq() argument
152 ath9k_hw_set_rf_regs(struct ath_hw *ah, struct ath9k_channel *chan, u16 modesIndex) ath9k_hw_set_rf_regs() argument
162 ath9k_hw_init_bb(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_init_bb() argument
168 ath9k_hw_set_channel_regs(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_set_channel_regs() argument
174 ath9k_hw_process_ini(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_process_ini() argument
188 ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_set_rfmode() argument
199 ath9k_hw_set_delta_slope(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_set_delta_slope() argument
235 ath9k_hw_init_cal(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_init_cal() argument
247 ath9k_hw_fast_chan_change(struct ath_hw *ah, struct ath9k_channel *chan, u8 *ini_reloaded) ath9k_hw_fast_chan_change() argument
268 ath9k_hw_compute_pll_control(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_compute_pll_control() argument
H A Dar9003_phy.c126 * @chan:
148 static int ar9003_hw_set_channel(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_set_channel() argument
155 ath9k_hw_get_channel_centers(ah, chan, &centers); ar9003_hw_set_channel()
240 ah->curchan = chan; ar9003_hw_set_channel()
248 * @chan:
256 struct ath9k_channel *chan) ar9003_hw_spur_mitigate_mrc_cck()
262 u8 *spur_fbin_ptr = ar9003_get_spur_chan_ptr(ah, IS_CHAN_2GHZ(chan)); ar9003_hw_spur_mitigate_mrc_cck()
274 if (IS_CHAN_HT40(chan)) { ar9003_hw_spur_mitigate_mrc_cck()
278 synth_freq = chan->channel + 10; ar9003_hw_spur_mitigate_mrc_cck()
280 synth_freq = chan->channel - 10; ar9003_hw_spur_mitigate_mrc_cck()
283 synth_freq = chan->channel; ar9003_hw_spur_mitigate_mrc_cck()
288 synth_freq = chan->channel; ar9003_hw_spur_mitigate_mrc_cck()
299 IS_CHAN_2GHZ(chan)); ar9003_hw_spur_mitigate_mrc_cck()
487 struct ath9k_channel *chan, ar9003_hw_spur_ofdm_work()
496 if (IS_CHAN_HT40(chan)) { ar9003_hw_spur_ofdm_work()
538 struct ath9k_channel *chan) ar9003_hw_spur_mitigate_ofdm()
548 if (IS_CHAN_5GHZ(chan)) { ar9003_hw_spur_mitigate_ofdm()
560 if (IS_CHAN_HT40(chan)) { ar9003_hw_spur_mitigate_ofdm()
564 synth_freq = chan->channel - 10; ar9003_hw_spur_mitigate_ofdm()
566 synth_freq = chan->channel + 10; ar9003_hw_spur_mitigate_ofdm()
569 synth_freq = chan->channel; ar9003_hw_spur_mitigate_ofdm()
578 ar9003_hw_spur_ofdm_work(ah, chan, freq_offset, ar9003_hw_spur_mitigate_ofdm()
595 struct ath9k_channel *chan) ar9003_hw_spur_mitigate()
598 ar9003_hw_spur_mitigate_mrc_cck(ah, chan); ar9003_hw_spur_mitigate()
599 ar9003_hw_spur_mitigate_ofdm(ah, chan); ar9003_hw_spur_mitigate()
603 struct ath9k_channel *chan) ar9003_hw_compute_pll_control_soc()
609 if (chan && IS_CHAN_HALF_RATE(chan)) ar9003_hw_compute_pll_control_soc()
611 else if (chan && IS_CHAN_QUARTER_RATE(chan)) ar9003_hw_compute_pll_control_soc()
620 struct ath9k_channel *chan) ar9003_hw_compute_pll_control()
626 if (chan && IS_CHAN_HALF_RATE(chan)) ar9003_hw_compute_pll_control()
628 else if (chan && IS_CHAN_QUARTER_RATE(chan)) ar9003_hw_compute_pll_control()
637 struct ath9k_channel *chan) ar9003_hw_set_channel_regs()
652 if (IS_CHAN_HT40(chan)) { ar9003_hw_set_channel_regs()
655 if (IS_CHAN_HT40PLUS(chan)) ar9003_hw_set_channel_regs()
668 ath9k_hw_set11nmac2040(ah, chan); ar9003_hw_set_channel_regs()
677 struct ath9k_channel *chan) ar9003_hw_init_bb()
690 ath9k_hw_synth_delay(ah, chan, synthDelay); ar9003_hw_init_bb()
796 struct ath9k_channel *chan) ar9550_hw_get_modes_txgain_index()
800 if (IS_CHAN_2GHZ(chan)) { ar9550_hw_get_modes_txgain_index()
801 if (IS_CHAN_HT40(chan)) ar9550_hw_get_modes_txgain_index()
807 if (chan->channel <= 5350) ar9550_hw_get_modes_txgain_index()
809 else if ((chan->channel > 5350) && (chan->channel <= 5600)) ar9550_hw_get_modes_txgain_index()
814 if (IS_CHAN_HT40(chan)) ar9550_hw_get_modes_txgain_index()
821 struct ath9k_channel *chan) ar9561_hw_get_modes_txgain_index()
823 if (IS_CHAN_2GHZ(chan)) { ar9561_hw_get_modes_txgain_index()
824 if (IS_CHAN_HT40(chan)) ar9561_hw_get_modes_txgain_index()
882 struct ath9k_channel *chan) ar9003_hw_process_ini()
887 if (IS_CHAN_5GHZ(chan)) ar9003_hw_process_ini()
888 modesIndex = IS_CHAN_HT40(chan) ? 2 : 1; ar9003_hw_process_ini()
890 modesIndex = IS_CHAN_HT40(chan) ? 3 : 4; ar9003_hw_process_ini()
949 modes_txgain_index = ar9550_hw_get_modes_txgain_index(ah, chan); ar9003_hw_process_ini()
953 ar9561_hw_get_modes_txgain_index(ah, chan); ar9003_hw_process_ini()
968 if (IS_CHAN_A_FAST_CLOCK(ah, chan)) ar9003_hw_process_ini()
980 if (chan->channel == 2484) ar9003_hw_process_ini()
985 ar9003_hw_set_channel_regs(ah, chan); ar9003_hw_process_ini()
987 ath9k_hw_apply_txpower(ah, chan, false); ar9003_hw_process_ini()
993 struct ath9k_channel *chan) ar9003_hw_set_rfmode()
997 if (chan == NULL) ar9003_hw_set_rfmode()
1000 if (IS_CHAN_2GHZ(chan)) ar9003_hw_set_rfmode()
1005 if (IS_CHAN_A_FAST_CLOCK(ah, chan)) ar9003_hw_set_rfmode()
1021 struct ath9k_channel *chan) ar9003_hw_set_delta_slope()
1031 if (IS_CHAN_HALF_RATE(chan)) ar9003_hw_set_delta_slope()
1033 else if (IS_CHAN_QUARTER_RATE(chan)) ar9003_hw_set_delta_slope()
1040 ath9k_hw_get_channel_centers(ah, chan, &centers); ar9003_hw_set_delta_slope()
1091 struct ath9k_channel *chan = ah->curchan; ar9003_hw_ani_control() local
1176 chan->channel, ar9003_hw_ani_control()
1231 chan->channel, ar9003_hw_ani_control()
1239 chan->channel, ar9003_hw_ani_control()
1295 chan->channel, ar9003_hw_ani_control()
1303 chan->channel, ar9003_hw_ani_control()
1333 chan->channel, ar9003_hw_ani_control()
1418 struct ath9k_channel *chan = ah->curchan; ar9003_hw_ani_cache_ini_regs() local
1425 ath_dbg(common, ANI, "ver %d.%d opmode %u chan %d Mhz\n", ar9003_hw_ani_cache_ini_regs()
1429 chan->channel); ar9003_hw_ani_cache_ini_regs()
1690 struct ath9k_channel *chan, ar9003_hw_fast_chan_change()
1696 if (IS_CHAN_5GHZ(chan)) ar9003_hw_fast_chan_change()
1697 modesIndex = IS_CHAN_HT40(chan) ? 2 : 1; ar9003_hw_fast_chan_change()
1699 modesIndex = IS_CHAN_HT40(chan) ? 3 : 4; ar9003_hw_fast_chan_change()
1735 if (IS_CHAN_A_FAST_CLOCK(ah, chan)) ar9003_hw_fast_chan_change()
1744 if (chan->channel == 2484) ar9003_hw_fast_chan_change()
1751 ar9003_hw_set_rfmode(ah, chan); ar9003_hw_fast_chan_change()
1968 struct ath9k_channel *chan) ar9003_hw_init_rate_txpower()
1970 if (IS_CHAN_5GHZ(chan)) { ar9003_hw_init_rate_txpower()
1973 if (IS_CHAN_HT20(chan) || IS_CHAN_HT40(chan)) { ar9003_hw_init_rate_txpower()
1978 IS_CHAN_HT40(chan)); ar9003_hw_init_rate_txpower()
1988 if (IS_CHAN_HT20(chan) || IS_CHAN_HT40(chan)) { ar9003_hw_init_rate_txpower()
1993 IS_CHAN_HT40(chan)); ar9003_hw_init_rate_txpower()
255 ar9003_hw_spur_mitigate_mrc_cck(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_spur_mitigate_mrc_cck() argument
486 ar9003_hw_spur_ofdm_work(struct ath_hw *ah, struct ath9k_channel *chan, int freq_offset, int range, int synth_freq) ar9003_hw_spur_ofdm_work() argument
537 ar9003_hw_spur_mitigate_ofdm(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_spur_mitigate_ofdm() argument
594 ar9003_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_spur_mitigate() argument
602 ar9003_hw_compute_pll_control_soc(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_compute_pll_control_soc() argument
619 ar9003_hw_compute_pll_control(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_compute_pll_control() argument
636 ar9003_hw_set_channel_regs(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_set_channel_regs() argument
676 ar9003_hw_init_bb(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_init_bb() argument
795 ar9550_hw_get_modes_txgain_index(struct ath_hw *ah, struct ath9k_channel *chan) ar9550_hw_get_modes_txgain_index() argument
820 ar9561_hw_get_modes_txgain_index(struct ath_hw *ah, struct ath9k_channel *chan) ar9561_hw_get_modes_txgain_index() argument
881 ar9003_hw_process_ini(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_process_ini() argument
992 ar9003_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_set_rfmode() argument
1020 ar9003_hw_set_delta_slope(struct ath_hw *ah, struct ath9k_channel *chan) ar9003_hw_set_delta_slope() argument
1689 ar9003_hw_fast_chan_change(struct ath_hw *ah, struct ath9k_channel *chan, u8 *ini_reloaded) ar9003_hw_fast_chan_change() argument
1967 ar9003_hw_init_rate_txpower(struct ath_hw *ah, u8 *rate_array, struct ath9k_channel *chan) ar9003_hw_init_rate_txpower() argument
H A Dcalib.c48 struct ath9k_channel *chan) ath9k_hw_get_nf_limits()
52 if (!chan || IS_CHAN_2GHZ(chan)) ath9k_hw_get_nf_limits()
61 struct ath9k_channel *chan) ath9k_hw_get_default_nf()
63 return ath9k_hw_get_nf_limits(ah, chan)->nominal; ath9k_hw_get_default_nf()
66 s16 ath9k_hw_getchan_noise(struct ath_hw *ah, struct ath9k_channel *chan, ath9k_hw_getchan_noise() argument
73 ath9k_hw_get_default_nf(ah, chan); ath9k_hw_getchan_noise()
210 currCal->calData->calType, ah->curchan->chan->center_freq); ath9k_hw_reset_calvalid()
237 int ath9k_hw_loadnf(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_loadnf() argument
243 s16 default_nf = ath9k_hw_get_default_nf(ah, chan); ath9k_hw_loadnf()
253 if ((i >= AR5416_MAX_CHAINS) && !IS_CHAN_HT40(chan)) ath9k_hw_loadnf()
314 if ((i >= AR5416_MAX_CHAINS) && !IS_CHAN_HT40(chan)) ath9k_hw_loadnf()
360 bool ath9k_hw_getnf(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_getnf() argument
366 struct ieee80211_channel *c = chan->chan; ath9k_hw_getnf()
386 chan->noisefloor = nf; ath9k_hw_getnf()
393 chan->noisefloor = h[0].privNF; ath9k_hw_getnf()
394 ah->noise = ath9k_hw_getchan_noise(ah, chan, chan->noisefloor); ath9k_hw_getnf()
400 struct ath9k_channel *chan) ath9k_init_nfcal_hist_buffer()
406 ah->caldata->channel = chan->channel; ath9k_init_nfcal_hist_buffer()
407 ah->caldata->channelFlags = chan->channelFlags; ath9k_init_nfcal_hist_buffer()
409 default_nf = ath9k_hw_get_default_nf(ah, chan); ath9k_init_nfcal_hist_buffer()
47 ath9k_hw_get_nf_limits(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_get_nf_limits() argument
60 ath9k_hw_get_default_nf(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_hw_get_default_nf() argument
399 ath9k_init_nfcal_hist_buffer(struct ath_hw *ah, struct ath9k_channel *chan) ath9k_init_nfcal_hist_buffer() argument
H A Dar5008_phy.c179 * @chan:
185 static int ar5008_hw_set_channel(struct ath_hw *ah, struct ath9k_channel *chan) ar5008_hw_set_channel() argument
195 ath9k_hw_get_channel_centers(ah, chan, &centers); ar5008_hw_set_channel()
252 ah->curchan = chan; ar5008_hw_set_channel()
260 * @chan:
266 struct ath9k_channel *chan) ar5008_hw_spur_mitigate()
291 bool is2GHz = IS_CHAN_2GHZ(chan); ar5008_hw_spur_mitigate()
300 cur_bb_spur = cur_bb_spur - (chan->channel * 10); ar5008_hw_spur_mitigate()
330 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400; ar5008_hw_spur_mitigate()
493 * @chan:
503 struct ath9k_channel *chan, ar5008_hw_set_rf_regs()
528 if (IS_CHAN_2GHZ(chan)) { ar5008_hw_set_rf_regs()
557 struct ath9k_channel *chan) ar5008_hw_init_bb()
565 ath9k_hw_synth_delay(ah, chan, synthDelay); ar5008_hw_init_bb()
612 struct ath9k_channel *chan) ar5008_hw_override_ini()
665 struct ath9k_channel *chan) ar5008_hw_set_channel_regs()
677 if (IS_CHAN_HT40(chan)) { ar5008_hw_set_channel_regs()
680 if (IS_CHAN_HT40PLUS(chan)) ar5008_hw_set_channel_regs()
689 ath9k_hw_set11nmac2040(ah, chan); ar5008_hw_set_channel_regs()
699 struct ath9k_channel *chan) ar5008_hw_process_ini()
705 if (IS_CHAN_5GHZ(chan)) { ar5008_hw_process_ini()
707 modesIndex = IS_CHAN_HT40(chan) ? 2 : 1; ar5008_hw_process_ini()
710 modesIndex = IS_CHAN_HT40(chan) ? 3 : 4; ar5008_hw_process_ini()
722 ah->eep_ops->set_addac(ah, chan); ar5008_hw_process_ini()
783 if (IS_CHAN_A_FAST_CLOCK(ah, chan)) ar5008_hw_process_ini()
787 ar5008_hw_override_ini(ah, chan); ar5008_hw_process_ini()
788 ar5008_hw_set_channel_regs(ah, chan); ar5008_hw_process_ini()
791 ath9k_hw_apply_txpower(ah, chan, false); ar5008_hw_process_ini()
794 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) { ar5008_hw_process_ini()
802 static void ar5008_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan) ar5008_hw_set_rfmode() argument
806 if (chan == NULL) ar5008_hw_set_rfmode()
809 if (IS_CHAN_2GHZ(chan)) ar5008_hw_set_rfmode()
815 rfMode |= (IS_CHAN_5GHZ(chan)) ? ar5008_hw_set_rfmode()
818 if (IS_CHAN_A_FAST_CLOCK(ah, chan)) ar5008_hw_set_rfmode()
830 struct ath9k_channel *chan) ar5008_hw_set_delta_slope()
836 if (IS_CHAN_HALF_RATE(chan)) ar5008_hw_set_delta_slope()
838 else if (IS_CHAN_QUARTER_RATE(chan)) ar5008_hw_set_delta_slope()
841 ath9k_hw_get_channel_centers(ah, chan, &centers); ar5008_hw_set_delta_slope()
890 struct ath9k_channel *chan) ar9160_hw_compute_pll_control()
896 if (chan && IS_CHAN_HALF_RATE(chan)) ar9160_hw_compute_pll_control()
898 else if (chan && IS_CHAN_QUARTER_RATE(chan)) ar9160_hw_compute_pll_control()
901 if (chan && IS_CHAN_5GHZ(chan)) ar9160_hw_compute_pll_control()
910 struct ath9k_channel *chan) ar5008_hw_compute_pll_control()
916 if (chan && IS_CHAN_HALF_RATE(chan)) ar5008_hw_compute_pll_control()
918 else if (chan && IS_CHAN_QUARTER_RATE(chan)) ar5008_hw_compute_pll_control()
921 if (chan && IS_CHAN_5GHZ(chan)) ar5008_hw_compute_pll_control()
934 struct ath9k_channel *chan = ah->curchan; ar5008_hw_ani_control_new() local
1008 chan->channel, ar5008_hw_ani_control_new()
1032 chan->channel, ar5008_hw_ani_control_new()
1040 chan->channel, ar5008_hw_ani_control_new()
1067 chan->channel, ar5008_hw_ani_control_new()
1075 chan->channel, ar5008_hw_ani_control_new()
1148 struct ath9k_channel *chan = ah->curchan; ar5008_hw_ani_cache_ini_regs() local
1155 ath_dbg(common, ANI, "ver %d.%d opmode %u chan %d Mhz\n", ar5008_hw_ani_cache_ini_regs()
1159 chan->channel); ar5008_hw_ani_cache_ini_regs()
1293 struct ath9k_channel *chan, int ht40_delta) ar5008_hw_init_rate_txpower()
1295 if (IS_CHAN_5GHZ(chan)) { ar5008_hw_init_rate_txpower()
1298 if (IS_CHAN_HT20(chan) || IS_CHAN_HT40(chan)) { ar5008_hw_init_rate_txpower()
1302 IS_CHAN_HT40(chan), ar5008_hw_init_rate_txpower()
1309 if (IS_CHAN_HT20(chan) || IS_CHAN_HT40(chan)) { ar5008_hw_init_rate_txpower()
1313 IS_CHAN_HT40(chan), ar5008_hw_init_rate_txpower()
265 ar5008_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan) ar5008_hw_spur_mitigate() argument
502 ar5008_hw_set_rf_regs(struct ath_hw *ah, struct ath9k_channel *chan, u16 modesIndex) ar5008_hw_set_rf_regs() argument
556 ar5008_hw_init_bb(struct ath_hw *ah, struct ath9k_channel *chan) ar5008_hw_init_bb() argument
611 ar5008_hw_override_ini(struct ath_hw *ah, struct ath9k_channel *chan) ar5008_hw_override_ini() argument
664 ar5008_hw_set_channel_regs(struct ath_hw *ah, struct ath9k_channel *chan) ar5008_hw_set_channel_regs() argument
698 ar5008_hw_process_ini(struct ath_hw *ah, struct ath9k_channel *chan) ar5008_hw_process_ini() argument
829 ar5008_hw_set_delta_slope(struct ath_hw *ah, struct ath9k_channel *chan) ar5008_hw_set_delta_slope() argument
889 ar9160_hw_compute_pll_control(struct ath_hw *ah, struct ath9k_channel *chan) ar9160_hw_compute_pll_control() argument
909 ar5008_hw_compute_pll_control(struct ath_hw *ah, struct ath9k_channel *chan) ar5008_hw_compute_pll_control() argument
1292 ar5008_hw_init_rate_txpower(struct ath_hw *ah, int16_t *rate_array, struct ath9k_channel *chan, int ht40_delta) ar5008_hw_init_rate_txpower() argument
H A Deeprom.c169 struct ath9k_channel *chan, ath9k_hw_get_legacy_target_powers()
181 ath9k_hw_get_channel_centers(ah, chan, &centers); ath9k_hw_get_legacy_target_powers()
185 IS_CHAN_2GHZ(chan))) { ath9k_hw_get_legacy_target_powers()
191 IS_CHAN_2GHZ(chan))) { ath9k_hw_get_legacy_target_powers()
195 IS_CHAN_2GHZ(chan)) && i > 0 && ath9k_hw_get_legacy_target_powers()
197 IS_CHAN_2GHZ(chan))) { ath9k_hw_get_legacy_target_powers()
210 IS_CHAN_2GHZ(chan)); ath9k_hw_get_legacy_target_powers()
212 IS_CHAN_2GHZ(chan)); ath9k_hw_get_legacy_target_powers()
224 struct ath9k_channel *chan, ath9k_hw_get_target_powers()
236 ath9k_hw_get_channel_centers(ah, chan, &centers); ath9k_hw_get_target_powers()
239 if (freq <= ath9k_hw_fbin2freq(powInfo[0].bChannel, IS_CHAN_2GHZ(chan))) { ath9k_hw_get_target_powers()
245 IS_CHAN_2GHZ(chan))) { ath9k_hw_get_target_powers()
250 IS_CHAN_2GHZ(chan)) && i > 0 && ath9k_hw_get_target_powers()
252 IS_CHAN_2GHZ(chan))) { ath9k_hw_get_target_powers()
265 IS_CHAN_2GHZ(chan)); ath9k_hw_get_target_powers()
267 IS_CHAN_2GHZ(chan)); ath9k_hw_get_target_powers()
354 struct ath9k_channel *chan, ath9k_hw_get_gain_boundaries_pdadcs()
393 ath9k_hw_get_channel_centers(ah, chan, &centers); ath9k_hw_get_gain_boundaries_pdadcs()
401 IS_CHAN_2GHZ(chan)), ath9k_hw_get_gain_boundaries_pdadcs()
478 (chan)), ath9k_hw_get_gain_boundaries_pdadcs()
168 ath9k_hw_get_legacy_target_powers(struct ath_hw *ah, struct ath9k_channel *chan, struct cal_target_power_leg *powInfo, u16 numChannels, struct cal_target_power_leg *pNewPower, u16 numRates, bool isExtTarget) ath9k_hw_get_legacy_target_powers() argument
223 ath9k_hw_get_target_powers(struct ath_hw *ah, struct ath9k_channel *chan, struct cal_target_power_ht *powInfo, u16 numChannels, struct cal_target_power_ht *pNewPower, u16 numRates, bool isHt40Target) ath9k_hw_get_target_powers() argument
353 ath9k_hw_get_gain_boundaries_pdadcs(struct ath_hw *ah, struct ath9k_channel *chan, void *pRawDataSet, u8 *bChans, u16 availPiers, u16 tPdGainOverlap, u16 *pPdGainBoundaries, u8 *pPDADCValues, u16 numXpdGains) ath9k_hw_get_gain_boundaries_pdadcs() argument
/linux-4.1.27/arch/arm/include/asm/
H A Ddma.h53 #define clear_dma_ff(chan)
60 extern void set_dma_page(unsigned int chan, char pagenr);
66 extern int request_dma(unsigned int chan, const char * device_id);
72 extern void free_dma(unsigned int chan);
79 extern void enable_dma(unsigned int chan);
86 extern void disable_dma(unsigned int chan);
90 extern int dma_channel_active(unsigned int chan);
98 extern void set_dma_sg(unsigned int chan, struct scatterlist *sg, int nr_sg);
106 extern void __set_dma_addr(unsigned int chan, void *addr);
107 #define set_dma_addr(chan, addr) \
108 __set_dma_addr(chan, (void *)__bus_to_virt(addr))
116 extern void set_dma_count(unsigned int chan, unsigned long count);
125 extern void set_dma_mode(unsigned int chan, unsigned int mode);
129 extern void set_dma_speed(unsigned int chan, int cycle_ns);
137 extern int get_dma_residue(unsigned int chan);
/linux-4.1.27/drivers/mmc/host/
H A Dtmio_mmc_dma.c51 struct dma_chan *chan = host->chan_rx; tmio_mmc_start_dma_rx() local
86 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_FROM_DEVICE); tmio_mmc_start_dma_rx()
88 desc = dmaengine_prep_slave_sg(chan, sg, ret, tmio_mmc_start_dma_rx()
108 dma_release_channel(chan); tmio_mmc_start_dma_rx()
110 chan = host->chan_tx; tmio_mmc_start_dma_rx()
111 if (chan) { tmio_mmc_start_dma_rx()
113 dma_release_channel(chan); tmio_mmc_start_dma_rx()
127 struct dma_chan *chan = host->chan_tx; tmio_mmc_start_dma_tx() local
166 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_TO_DEVICE); tmio_mmc_start_dma_tx()
168 desc = dmaengine_prep_slave_sg(chan, sg, ret, tmio_mmc_start_dma_tx()
188 dma_release_channel(chan); tmio_mmc_start_dma_tx()
190 chan = host->chan_rx; tmio_mmc_start_dma_tx()
191 if (chan) { tmio_mmc_start_dma_tx()
193 dma_release_channel(chan); tmio_mmc_start_dma_tx()
218 struct dma_chan *chan = NULL; tmio_mmc_issue_tasklet_fn() local
224 chan = host->chan_rx; tmio_mmc_issue_tasklet_fn()
226 chan = host->chan_tx; tmio_mmc_issue_tasklet_fn()
233 if (chan) tmio_mmc_issue_tasklet_fn()
234 dma_async_issue_pending(chan); tmio_mmc_issue_tasklet_fn()
343 struct dma_chan *chan = host->chan_tx; tmio_mmc_release_dma() local
345 dma_release_channel(chan); tmio_mmc_release_dma()
348 struct dma_chan *chan = host->chan_rx; tmio_mmc_release_dma() local
350 dma_release_channel(chan); tmio_mmc_release_dma()
/linux-4.1.27/sound/synth/emux/
H A Demux_effect.c100 effect_set_byte(unsigned char *valp, struct snd_midi_channel *chan, int type) effect_set_byte() argument
103 struct snd_emux_effect_table *fx = chan->private; effect_set_byte()
121 effect_set_word(unsigned short *valp, struct snd_midi_channel *chan, int type) effect_set_word() argument
124 struct snd_emux_effect_table *fx = chan->private; effect_set_word()
138 effect_get_offset(struct snd_midi_channel *chan, int lo, int hi, int mode) effect_get_offset() argument
141 struct snd_emux_effect_table *fx = chan->private; effect_get_offset()
157 struct snd_midi_channel *chan, int type, int val) snd_emux_send_effect_oss()
169 snd_emux_send_effect(port, chan, type, val, mode); snd_emux_send_effect_oss()
177 snd_emux_send_effect(struct snd_emux_port *port, struct snd_midi_channel *chan, snd_emux_send_effect() argument
188 fx = chan->private; snd_emux_send_effect()
212 if (!STATE_IS_PLAYING(vp->state) || vp->chan != chan) snd_emux_send_effect()
218 effect_set_byte(srcp, chan, type); snd_emux_send_effect()
221 effect_set_word((unsigned short*)srcp, chan, type); snd_emux_send_effect()
227 snd_emux_update_channel(port, chan, parm_defs[type].update); snd_emux_send_effect()
235 struct snd_midi_channel *chan = vp->chan; snd_emux_setup_effect() local
240 if (! (fx = chan->private)) snd_emux_setup_effect()
257 effect_set_byte(srcp, chan, i); snd_emux_setup_effect()
259 effect_set_word((unsigned short*)srcp, chan, i); snd_emux_setup_effect()
263 vp->reg.start += effect_get_offset(chan, EMUX_FX_SAMPLE_START, snd_emux_setup_effect()
267 vp->reg.loopstart += effect_get_offset(chan, EMUX_FX_LOOP_START, snd_emux_setup_effect()
271 vp->reg.loopend += effect_get_offset(chan, EMUX_FX_LOOP_END, snd_emux_setup_effect()
156 snd_emux_send_effect_oss(struct snd_emux_port *port, struct snd_midi_channel *chan, int type, int val) snd_emux_send_effect_oss() argument
H A Demux_voice.h47 void snd_emux_note_on(void *p, int note, int vel, struct snd_midi_channel *chan);
48 void snd_emux_note_off(void *p, int note, int vel, struct snd_midi_channel *chan);
49 void snd_emux_key_press(void *p, int note, int vel, struct snd_midi_channel *chan);
50 void snd_emux_terminate_note(void *p, int note, struct snd_midi_channel *chan);
51 void snd_emux_control(void *p, int type, struct snd_midi_channel *chan);
55 struct snd_midi_channel *chan, int update);
67 struct snd_midi_channel *chan, int type, int val);
69 struct snd_midi_channel *chan, int type, int val, int mode);
76 struct snd_midi_channel *chan, int param);
77 void snd_emux_nrpn(void *private_data, struct snd_midi_channel *chan,
H A Demux_synth.c41 int *notep, int vel, struct snd_midi_channel *chan,
43 static int get_bank(struct snd_emux_port *port, struct snd_midi_channel *chan);
45 struct snd_midi_channel *chan, int free);
60 snd_emux_note_on(void *p, int note, int vel, struct snd_midi_channel *chan) snd_emux_note_on() argument
70 if (snd_BUG_ON(!port || !chan)) snd_emux_note_on()
78 nvoices = get_zone(emu, port, &note, vel, chan, table); snd_emux_note_on()
91 terminate_note1(emu, key, chan, 0); snd_emux_note_on()
110 vp->chan = chan; snd_emux_note_on()
135 vp->chan == chan) { snd_emux_note_on()
146 struct snd_emux_effect_table *fx = chan->private; snd_emux_note_on()
159 snd_emux_note_off(void *p, int note, int vel, struct snd_midi_channel *chan) snd_emux_note_off() argument
168 if (snd_BUG_ON(!port || !chan)) snd_emux_note_off()
179 vp->chan == chan && vp->key == note) { snd_emux_note_off()
236 snd_emux_key_press(void *p, int note, int vel, struct snd_midi_channel *chan) snd_emux_key_press() argument
245 if (snd_BUG_ON(!port || !chan)) snd_emux_key_press()
256 vp->chan == chan && vp->key == note) { snd_emux_key_press()
269 snd_emux_update_channel(struct snd_emux_port *port, struct snd_midi_channel *chan, int update) snd_emux_update_channel() argument
286 if (vp->chan == chan) snd_emux_update_channel()
325 snd_emux_control(void *p, int type, struct snd_midi_channel *chan) snd_emux_control() argument
330 if (snd_BUG_ON(!port || !chan)) snd_emux_control()
336 snd_emux_update_channel(port, chan, SNDRV_EMUX_UPDATE_VOLUME); snd_emux_control()
340 snd_emux_update_channel(port, chan, SNDRV_EMUX_UPDATE_PAN); snd_emux_control()
346 if (chan->control[type] >= 64) snd_emux_control()
347 snd_emux_send_effect(port, chan, EMUX_FX_CUTOFF, -160, snd_emux_control()
350 snd_emux_send_effect(port, chan, EMUX_FX_CUTOFF, 0, snd_emux_control()
356 snd_emux_update_channel(port, chan, SNDRV_EMUX_UPDATE_PITCH); snd_emux_control()
361 snd_emux_update_channel(port, chan, snd_emux_control()
369 snd_emux_xg_control(port, chan, type); snd_emux_control()
378 terminate_note1(struct snd_emux *emu, int note, struct snd_midi_channel *chan, int free) terminate_note1() argument
387 if (STATE_IS_PLAYING(vp->state) && vp->chan == chan && terminate_note1()
399 snd_emux_terminate_note(void *p, int note, struct snd_midi_channel *chan) snd_emux_terminate_note() argument
405 if (snd_BUG_ON(!port || !chan)) snd_emux_terminate_note()
412 terminate_note1(emu, note, chan, 1); snd_emux_terminate_note()
511 vp->chan = NULL; terminate_voice()
530 if (vp->chan == NULL || vp->port == NULL) update_voice()
655 struct snd_midi_channel *chan = vp->chan; calc_pan() local
662 pan = chan->control[MIDI_CTL_MSB_PAN] - 64; calc_pan()
758 struct snd_midi_channel *chan = vp->chan; calc_volume() local
761 expression_vol = chan->control[MIDI_CTL_MSB_EXPRESSION]; calc_volume()
766 main_vol = chan->control[MIDI_CTL_MSB_MAIN_VOLUME]; calc_volume()
776 main_vol = chan->control[MIDI_CTL_MSB_MAIN_VOLUME] * vp->reg.amplitude / 127; calc_volume()
791 if (chan->private) { calc_volume()
792 struct snd_emux_effect_table *fx = chan->private; calc_volume()
802 if (!SF_IS_DRUM_BANK(get_bank(port, chan)) calc_volume()
827 struct snd_midi_channel *chan = vp->chan; calc_pitch() local
838 if (chan->midi_pitchbend != 0) { calc_pitch()
840 offset += chan->midi_pitchbend * chan->gm_rpn_pitch_bend_range / 3072; calc_pitch()
848 offset += chan->gm_rpn_coarse_tuning * 4096 / (12 * 128); calc_pitch()
849 offset += chan->gm_rpn_fine_tuning / 24; calc_pitch()
853 if (chan->private) { calc_pitch()
854 struct snd_emux_effect_table *fx = chan->private; calc_pitch()
874 get_bank(struct snd_emux_port *port, struct snd_midi_channel *chan) get_bank() argument
880 val = chan->control[MIDI_CTL_MSB_BANK]; get_bank()
883 return chan->control[MIDI_CTL_LSB_BANK]; get_bank()
886 if (chan->drum_channel) get_bank()
889 return chan->control[MIDI_CTL_MSB_BANK]; get_bank()
892 if (chan->drum_channel) get_bank()
894 return chan->control[MIDI_CTL_MSB_BANK]; get_bank()
904 int *notep, int vel, struct snd_midi_channel *chan, get_zone()
909 bank = get_bank(port, chan); get_zone()
910 preset = chan->midi_program; get_zone()
939 vp->chan = NULL; snd_emux_init_voices()
903 get_zone(struct snd_emux *emu, struct snd_emux_port *port, int *notep, int vel, struct snd_midi_channel *chan, struct snd_sf_zone **table) get_zone() argument
H A Demux_nrpn.c53 struct snd_midi_channel *chan, send_converted_effect()
60 snd_emux_send_effect(port, chan, table[i].effect, send_converted_effect()
286 snd_emux_nrpn(void *p, struct snd_midi_channel *chan, snd_emux_nrpn() argument
292 if (snd_BUG_ON(!port || !chan)) snd_emux_nrpn()
295 if (chan->control[MIDI_CTL_NONREG_PARM_NUM_MSB] == 127 && snd_emux_nrpn()
296 chan->control[MIDI_CTL_NONREG_PARM_NUM_LSB] <= 26) { snd_emux_nrpn()
300 val = (chan->control[MIDI_CTL_MSB_DATA_ENTRY] << 7) | snd_emux_nrpn()
301 chan->control[MIDI_CTL_LSB_DATA_ENTRY]; snd_emux_nrpn()
305 port, chan, chan->control[MIDI_CTL_NONREG_PARM_NUM_LSB], snd_emux_nrpn()
311 chan->control[MIDI_CTL_NONREG_PARM_NUM_MSB] == 1) { snd_emux_nrpn()
315 val = chan->control[MIDI_CTL_MSB_DATA_ENTRY]; snd_emux_nrpn()
318 port, chan, chan->control[MIDI_CTL_NONREG_PARM_NUM_LSB], snd_emux_nrpn()
362 snd_emux_xg_control(struct snd_emux_port *port, struct snd_midi_channel *chan, snd_emux_xg_control() argument
366 port, chan, param, snd_emux_xg_control()
367 chan->control[param], snd_emux_xg_control()
51 send_converted_effect(struct nrpn_conv_table *table, int num_tables, struct snd_emux_port *port, struct snd_midi_channel *chan, int type, int val, int mode) send_converted_effect() argument
/linux-4.1.27/drivers/dma/dw/
H A Dcore.c73 static struct device *chan2dev(struct dma_chan *chan) chan2dev() argument
75 return &chan->dev->device; chan2dev()
98 dev_dbg(chan2dev(&dwc->chan), "desc %p not ACKed\n", desc); dwc_desc_get()
102 dev_vdbg(chan2dev(&dwc->chan), "scanned %u descriptors on freelist\n", i); dwc_desc_get()
120 dev_vdbg(chan2dev(&dwc->chan), dwc_desc_put()
124 dev_vdbg(chan2dev(&dwc->chan), "moving desc %p to freelist\n", desc); dwc_desc_put()
132 struct dw_dma *dw = to_dw_dma(dwc->chan.device); dwc_initialize()
171 dev_err(chan2dev(&dwc->chan), dwc_dump_chan_regs()
193 struct dw_dma *dw = to_dw_dma(dwc->chan.device); dwc_do_single_block()
215 struct dw_dma *dw = to_dw_dma(dwc->chan.device); dwc_dostart()
220 dev_err(chan2dev(&dwc->chan), dwc_dostart()
233 dev_err(chan2dev(&dwc->chan), dwc_dostart()
267 dev_vdbg(chan2dev(&dwc->chan), "%s: started %u\n", __func__, desc->txd.cookie); dwc_dostart_first_queued()
283 dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie); dwc_descriptor_complete()
315 dev_err(chan2dev(&dwc->chan), dwc_complete_all()
405 dev_vdbg(chan2dev(&dwc->chan), "%s: soft LLP mode\n", __func__); dwc_scan_descriptors()
410 dev_vdbg(chan2dev(&dwc->chan), "%s: llp=%pad\n", __func__, &llp); dwc_scan_descriptors()
450 dev_err(chan2dev(&dwc->chan), dwc_scan_descriptors()
462 dev_crit(chan2dev(&dwc->chan), " desc: s0x%x d0x%x l0x%x c0x%x:%x\n", dwc_dump_lli()
497 dev_WARN(chan2dev(&dwc->chan), "Bad descriptor submitted for DMA!\n" dwc_handle_error()
511 dma_addr_t dw_dma_get_src_addr(struct dma_chan *chan) dw_dma_get_src_addr() argument
513 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dw_dma_get_src_addr()
518 dma_addr_t dw_dma_get_dst_addr(struct dma_chan *chan) dw_dma_get_dst_addr() argument
520 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dw_dma_get_dst_addr()
535 dev_vdbg(chan2dev(&dwc->chan), "new cyclic period llp 0x%08x\n", dwc_handle_cyclic()
554 dev_err(chan2dev(&dwc->chan), dwc_handle_cyclic()
601 dwc = &dw->chan[i]; dw_dma_tasklet()
659 struct dw_dma_chan *dwc = to_dw_dma_chan(tx->chan); dwc_tx_submit()
672 dev_vdbg(chan2dev(tx->chan), "%s: queued %u\n", __func__, desc->txd.cookie); dwc_tx_submit()
681 dwc_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src, dwc_prep_dma_memcpy() argument
684 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dwc_prep_dma_memcpy()
685 struct dw_dma *dw = to_dw_dma(chan->device); dwc_prep_dma_memcpy()
696 dev_vdbg(chan2dev(chan), dwc_prep_dma_memcpy()
701 dev_dbg(chan2dev(chan), "%s: length is zero!\n", __func__); dwc_prep_dma_memcpy()
713 ctllo = DWC_DEFAULT_CTLLO(chan) dwc_prep_dma_memcpy()
761 dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, dwc_prep_slave_sg() argument
765 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dwc_prep_slave_sg()
766 struct dw_dma *dw = to_dw_dma(chan->device); dwc_prep_slave_sg()
779 dev_vdbg(chan2dev(chan), "%s\n", __func__); dwc_prep_slave_sg()
792 ctllo = (DWC_DEFAULT_CTLLO(chan) dwc_prep_slave_sg()
849 ctllo = (DWC_DEFAULT_CTLLO(chan)
916 dev_err(chan2dev(chan),
922 bool dw_dma_filter(struct dma_chan *chan, void *param) dw_dma_filter() argument
924 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dw_dma_filter()
927 if (dws->dma_dev != chan->device->dev) dw_dma_filter()
958 static int dwc_config(struct dma_chan *chan, struct dma_slave_config *sconfig) dwc_config() argument
960 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dwc_config()
962 /* Check if chan will be configured for slave transfers */ dwc_config()
975 static int dwc_pause(struct dma_chan *chan) dwc_pause() argument
977 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dwc_pause()
1005 static int dwc_resume(struct dma_chan *chan) dwc_resume() argument
1007 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dwc_resume()
1022 static int dwc_terminate_all(struct dma_chan *chan) dwc_terminate_all() argument
1024 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dwc_terminate_all()
1025 struct dw_dma *dw = to_dw_dma(chan->device); dwc_terminate_all()
1067 dwc_tx_status(struct dma_chan *chan, dwc_tx_status() argument
1071 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dwc_tx_status()
1074 ret = dma_cookie_status(chan, cookie, txstate); dwc_tx_status()
1078 dwc_scan_descriptors(to_dw_dma(chan->device), dwc); dwc_tx_status()
1080 ret = dma_cookie_status(chan, cookie, txstate); dwc_tx_status()
1090 static void dwc_issue_pending(struct dma_chan *chan) dwc_issue_pending() argument
1092 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dwc_issue_pending()
1119 dw->chan[i].initialized = false; dw_dma_off()
1127 static int dwc_alloc_chan_resources(struct dma_chan *chan) dwc_alloc_chan_resources() argument
1129 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dwc_alloc_chan_resources()
1130 struct dw_dma *dw = to_dw_dma(chan->device); dwc_alloc_chan_resources()
1135 dev_vdbg(chan2dev(chan), "%s\n", __func__); dwc_alloc_chan_resources()
1139 dev_dbg(chan2dev(chan), "DMA channel not idle?\n"); dwc_alloc_chan_resources()
1143 dma_cookie_init(chan); dwc_alloc_chan_resources()
1154 if (chan->private && !dw_dma_filter(chan, chan->private)) { dwc_alloc_chan_resources()
1155 dev_warn(chan2dev(chan), "Wrong controller-specific data\n"); dwc_alloc_chan_resources()
1178 dma_async_tx_descriptor_init(&desc->txd, chan); dwc_alloc_chan_resources()
1191 dev_dbg(chan2dev(chan), "%s: allocated %d descriptors\n", __func__, i); dwc_alloc_chan_resources()
1196 dev_info(chan2dev(chan), "only allocated %d descriptors\n", i); dwc_alloc_chan_resources()
1201 static void dwc_free_chan_resources(struct dma_chan *chan) dwc_free_chan_resources() argument
1203 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dwc_free_chan_resources()
1204 struct dw_dma *dw = to_dw_dma(chan->device); dwc_free_chan_resources()
1209 dev_dbg(chan2dev(chan), "%s: descs allocated=%u\n", __func__, dwc_free_chan_resources()
1215 BUG_ON(dma_readl(to_dw_dma(chan->device), CH_EN) & dwc->mask); dwc_free_chan_resources()
1243 dev_vdbg(chan2dev(chan), " freeing descriptor %p\n", desc); dwc_free_chan_resources()
1247 dev_vdbg(chan2dev(chan), "%s: done\n", __func__); dwc_free_chan_resources()
1254 * @chan: the DMA channel to start
1259 int dw_dma_cyclic_start(struct dma_chan *chan) dw_dma_cyclic_start() argument
1261 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dw_dma_cyclic_start()
1262 struct dw_dma *dw = to_dw_dma(chan->device); dw_dma_cyclic_start()
1266 dev_err(chan2dev(&dwc->chan), "missing prep for cyclic DMA\n"); dw_dma_cyclic_start()
1285 * @chan: the DMA channel to stop
1289 void dw_dma_cyclic_stop(struct dma_chan *chan) dw_dma_cyclic_stop() argument
1291 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dw_dma_cyclic_stop()
1292 struct dw_dma *dw = to_dw_dma(dwc->chan.device); dw_dma_cyclic_stop()
1305 * @chan: the DMA channel to prepare
1314 struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan, dw_dma_cyclic_prep() argument
1318 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dw_dma_cyclic_prep()
1333 dev_dbg(chan2dev(&dwc->chan), dw_dma_cyclic_prep()
1340 dev_dbg(chan2dev(&dwc->chan), dw_dma_cyclic_prep()
1348 dev_dbg(chan2dev(&dwc->chan), dw_dma_cyclic_prep()
1397 desc->lli.ctllo = (DWC_DEFAULT_CTLLO(chan) dw_dma_cyclic_prep()
1412 desc->lli.ctllo = (DWC_DEFAULT_CTLLO(chan) dw_dma_cyclic_prep()
1440 dev_dbg(chan2dev(&dwc->chan), dw_dma_cyclic_prep()
1462 * @chan: the DMA channel to free
1464 void dw_dma_cyclic_free(struct dma_chan *chan) dw_dma_cyclic_free() argument
1466 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); dw_dma_cyclic_free()
1467 struct dw_dma *dw = to_dw_dma(dwc->chan.device); dw_dma_cyclic_free()
1472 dev_dbg(chan2dev(&dwc->chan), "%s\n", __func__); dw_dma_cyclic_free()
1544 dw->chan = devm_kcalloc(chip->dev, nr_channels, sizeof(*dw->chan), dw_dma_probe()
1546 if (!dw->chan) { dw_dma_probe()
1590 struct dw_dma_chan *dwc = &dw->chan[i]; dw_dma_probe()
1592 dwc->chan.device = &dw->dma; dw_dma_probe()
1593 dma_cookie_init(&dwc->chan); dw_dma_probe()
1595 list_add_tail(&dwc->chan.device_node, dw_dma_probe()
1598 list_add(&dwc->chan.device_node, &dw->dma.channels); dw_dma_probe()
1715 chan.device_node) { dw_dma_remove()
1716 list_del(&dwc->chan.device_node); dw_dma_remove()
/linux-4.1.27/crypto/async_tx/
H A Dasync_tx.c63 dma_has_cap(tx_type, depend_tx->chan->device->cap_mask)) __async_tx_find_channel()
64 return depend_tx->chan; __async_tx_find_channel()
81 struct dma_chan *chan = depend_tx->chan; async_tx_channel_switch() local
82 struct dma_device *device = chan->device; async_tx_channel_switch()
87 if (txd_parent(depend_tx) && depend_tx->chan == tx->chan) { async_tx_channel_switch()
95 device->device_issue_pending(chan); async_tx_channel_switch()
103 intr_tx = device->device_prep_dma_interrupt(chan, 0); async_tx_channel_switch()
129 device->device_issue_pending(chan); async_tx_channel_switch()
156 async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx, async_tx_submit() argument
185 if (depend_tx->chan == chan) { async_tx_submit()
194 if (depend_tx->chan == chan) async_tx_submit()
236 struct dma_chan *chan; async_trigger_callback() local
242 chan = depend_tx->chan; async_trigger_callback()
243 device = chan->device; async_trigger_callback()
251 tx = device ? device->device_prep_dma_interrupt(chan, 0) : NULL; async_trigger_callback()
258 async_tx_submit(chan, tx, submit); async_trigger_callback()
/linux-4.1.27/drivers/staging/iio/cdc/
H A Dad7150.c98 struct iio_chan_spec const *chan, ad7150_read_raw()
109 ad7150_addresses[chan->channel][0]); ad7150_read_raw()
116 ad7150_addresses[chan->channel][1]); ad7150_read_raw()
127 const struct iio_chan_spec *chan, enum iio_event_type type, ad7150_read_event_config()
163 unsigned int chan, enum iio_event_type type, ad7150_write_event_params()
173 event_code = IIO_UNMOD_EVENT_CODE(IIO_CAPACITANCE, chan, type, dir); ad7150_write_event_params()
181 value = chip->threshold[rising][chan]; ad7150_write_event_params()
183 ad7150_addresses[chan][3], ad7150_write_event_params()
189 sens = chip->mag_sensitivity[rising][chan]; ad7150_write_event_params()
190 timeout = chip->mag_timeout[rising][chan]; ad7150_write_event_params()
193 sens = chip->thresh_sensitivity[rising][chan]; ad7150_write_event_params()
194 timeout = chip->thresh_timeout[rising][chan]; ad7150_write_event_params()
200 ad7150_addresses[chan][4], ad7150_write_event_params()
206 ad7150_addresses[chan][5], ad7150_write_event_params()
215 const struct iio_chan_spec *chan, enum iio_event_type type, ad7150_write_event_config()
228 event_code = IIO_UNMOD_EVENT_CODE(chan->type, chan->channel, type, dir); ad7150_write_event_config()
274 ret = ad7150_write_event_params(indio_dev, chan->channel, type, dir); ad7150_write_event_config()
282 const struct iio_chan_spec *chan, ad7150_read_event_value()
294 *val = chip->mag_sensitivity[rising][chan->channel]; ad7150_read_event_value()
297 *val = chip->thresh_sensitivity[rising][chan->channel]; ad7150_read_event_value()
300 *val = chip->threshold[rising][chan->channel]; ad7150_read_event_value()
308 const struct iio_chan_spec *chan, ad7150_write_event_value()
321 chip->mag_sensitivity[rising][chan->channel] = val; ad7150_write_event_value()
324 chip->thresh_sensitivity[rising][chan->channel] = val; ad7150_write_event_value()
327 chip->threshold[rising][chan->channel] = val; ad7150_write_event_value()
335 ret = ad7150_write_event_params(indio_dev, chan->channel, type, dir); ad7150_write_event_value()
352 int chan = IIO_EVENT_CODE_EXTRACT_CHAN(this_attr->address); ad7150_show_timeout() local
358 value = chip->mag_timeout[rising][chan]; ad7150_show_timeout()
361 value = chip->thresh_timeout[rising][chan]; ad7150_show_timeout()
378 int chan = IIO_EVENT_CODE_EXTRACT_CHAN(this_attr->address); ad7150_store_timeout() local
396 chip->mag_timeout[rising][chan] = data; ad7150_store_timeout()
399 chip->thresh_timeout[rising][chan] = data; ad7150_store_timeout()
406 ret = ad7150_write_event_params(indio_dev, chan, type, dir); ad7150_store_timeout()
416 #define AD7150_TIMEOUT(chan, type, dir, ev_type, ev_dir) \
417 IIO_DEVICE_ATTR(in_capacitance##chan##_##type##_##dir##_timeout, \
422 chan, \
97 ad7150_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long mask) ad7150_read_raw() argument
126 ad7150_read_event_config(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, enum iio_event_type type, enum iio_event_direction dir) ad7150_read_event_config() argument
162 ad7150_write_event_params(struct iio_dev *indio_dev, unsigned int chan, enum iio_event_type type, enum iio_event_direction dir) ad7150_write_event_params() argument
214 ad7150_write_event_config(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, enum iio_event_type type, enum iio_event_direction dir, int state) ad7150_write_event_config() argument
281 ad7150_read_event_value(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, enum iio_event_type type, enum iio_event_direction dir, enum iio_event_info info, int *val, int *val2) ad7150_read_event_value() argument
307 ad7150_write_event_value(struct iio_dev *indio_dev, const struct iio_chan_spec *chan, enum iio_event_type type, enum iio_event_direction dir, enum iio_event_info info, int val, int val2) ad7150_write_event_value() argument
H A Dad7152.c251 struct iio_chan_spec const *chan, ad7152_write_raw()
271 ad7152_addresses[chan->channel][AD7152_GAIN], ad7152_write_raw()
285 ad7152_addresses[chan->channel][AD7152_OFFS], ad7152_write_raw()
301 chip->setup[chan->channel] &= ~AD7152_SETUP_RANGE_4pF; ad7152_write_raw()
302 chip->setup[chan->channel] |= AD7152_SETUP_RANGE(i); ad7152_write_raw()
305 ad7152_addresses[chan->channel][AD7152_SETUP], ad7152_write_raw()
306 chip->setup[chan->channel]); ad7152_write_raw()
321 struct iio_chan_spec const *chan, ad7152_read_raw()
335 regval = chip->setup[chan->channel]; ad7152_read_raw()
337 if (chan->differential) ad7152_read_raw()
338 chip->setup[chan->channel] |= AD7152_SETUP_CAPDIFF; ad7152_read_raw()
340 chip->setup[chan->channel] &= ~AD7152_SETUP_CAPDIFF; ad7152_read_raw()
342 if (regval != chip->setup[chan->channel]) { ad7152_read_raw()
344 ad7152_addresses[chan->channel][AD7152_SETUP], ad7152_read_raw()
345 chip->setup[chan->channel]); ad7152_read_raw()
350 if (chan->channel == 0) ad7152_read_raw()
365 ad7152_addresses[chan->channel][AD7152_DATA]); ad7152_read_raw()
370 if (chan->differential) ad7152_read_raw()
378 ad7152_addresses[chan->channel][AD7152_GAIN]); ad7152_read_raw()
389 ad7152_addresses[chan->channel][AD7152_OFFS]); ad7152_read_raw()
398 ad7152_addresses[chan->channel][AD7152_SETUP]); ad7152_read_raw()
415 struct iio_chan_spec const *chan, ad7152_write_raw_get_fmt()
250 ad7152_write_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int val, int val2, long mask) ad7152_write_raw() argument
320 ad7152_read_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, int *val, int *val2, long mask) ad7152_read_raw() argument
414 ad7152_write_raw_get_fmt(struct iio_dev *indio_dev, struct iio_chan_spec const *chan, long mask) ad7152_write_raw_get_fmt() argument
/linux-4.1.27/drivers/dma/ppc4xx/
H A Dadma.c79 struct dma_chan *chan; member in struct:ppc_dma_chan_ref
141 static void print_cb(struct ppc440spe_adma_chan *chan, void *block) print_cb() argument
147 switch (chan->device->id) { print_cb()
157 cdb, chan->device->id, print_cb()
171 cb, chan->device->id, print_cb()
185 static void print_cb_list(struct ppc440spe_adma_chan *chan, print_cb_list() argument
189 print_cb(chan, iter->hw_desc); print_cb_list()
244 struct ppc440spe_adma_chan *chan) ppc440spe_desc_init_interrupt()
248 switch (chan->device->id) { ppc440spe_desc_init_interrupt()
262 printk(KERN_ERR "Unsupported id %d in %s\n", chan->device->id, ppc440spe_desc_init_interrupt()
454 * and/or Q to chan->pdest and/or chan->qdest as we have ppc440spe_desc_init_dma01pqzero_sum()
535 struct ppc440spe_adma_chan *chan, ppc440spe_desc_set_src_addr()
543 switch (chan->device->id) { ppc440spe_desc_set_src_addr()
570 struct ppc440spe_adma_chan *chan, u32 mult_index, ppc440spe_desc_set_src_mult()
577 switch (chan->device->id) { ppc440spe_desc_set_src_mult()
616 struct ppc440spe_adma_chan *chan, ppc440spe_desc_set_dest_addr()
625 switch (chan->device->id) { ppc440spe_desc_set_dest_addr()
657 struct ppc440spe_adma_chan *chan, ppc440spe_desc_set_byte_count()
663 switch (chan->device->id) { ppc440spe_desc_set_byte_count()
692 struct ppc440spe_adma_chan *chan, u8 *qword) ppc440spe_desc_set_dcheck()
696 switch (chan->device->id) { ppc440spe_desc_set_dcheck()
735 static void ppc440spe_desc_set_link(struct ppc440spe_adma_chan *chan, ppc440spe_desc_set_link() argument
759 switch (chan->device->id) { ppc440spe_desc_set_link()
784 struct ppc440spe_adma_chan *chan) ppc440spe_desc_get_link()
904 ppc440spe_chan_get_current_descriptor(struct ppc440spe_adma_chan *chan);
905 static void ppc440spe_chan_append(struct ppc440spe_adma_chan *chan);
911 struct ppc440spe_adma_chan *chan) ppc440spe_adma_device_clear_eot_status()
915 u8 *p = chan->device->dma_desc_pool_virt; ppc440spe_adma_device_clear_eot_status()
919 switch (chan->device->id) { ppc440spe_adma_device_clear_eot_status()
923 dma_reg = chan->device->dma_reg; ppc440spe_adma_device_clear_eot_status()
927 (u32)chan->device->dma_desc_pool]; ppc440spe_adma_device_clear_eot_status()
959 list_for_each_entry(iter, &chan->chain, ppc440spe_adma_device_clear_eot_status()
968 BUG_ON(&iter->chain_node == &chan->chain); ppc440spe_adma_device_clear_eot_status()
989 chan->device->id, rv); ppc440spe_adma_device_clear_eot_status()
996 xor_reg = chan->device->xor_reg; ppc440spe_adma_device_clear_eot_status()
1022 ppc440spe_chan_append(chan); ppc440spe_adma_device_clear_eot_status()
1030 static int ppc440spe_chan_is_busy(struct ppc440spe_adma_chan *chan) ppc440spe_chan_is_busy() argument
1036 switch (chan->device->id) { ppc440spe_chan_is_busy()
1039 dma_reg = chan->device->dma_reg; ppc440spe_chan_is_busy()
1050 xor_reg = chan->device->xor_reg; ppc440spe_chan_is_busy()
1062 struct ppc440spe_adma_chan *chan, ppc440spe_chan_set_first_xor_descriptor()
1065 struct xor_regs *xor_reg = chan->device->xor_reg; ppc440spe_chan_set_first_xor_descriptor()
1081 chan->hw_chain_inited = 1; ppc440spe_chan_set_first_xor_descriptor()
1088 static void ppc440spe_dma_put_desc(struct ppc440spe_adma_chan *chan, ppc440spe_dma_put_desc() argument
1092 struct dma_regs *dma_reg = chan->device->dma_reg; ppc440spe_dma_put_desc()
1098 chan_last_sub[chan->device->id] = desc; ppc440spe_dma_put_desc()
1100 ADMA_LL_DBG(print_cb(chan, desc->hw_desc)); ppc440spe_dma_put_desc()
1108 static void ppc440spe_chan_append(struct ppc440spe_adma_chan *chan) ppc440spe_chan_append() argument
1118 switch (chan->device->id) { ppc440spe_chan_append()
1121 cur_desc = ppc440spe_chan_get_current_descriptor(chan); ppc440spe_chan_append()
1124 iter = chan_last_sub[chan->device->id]; ppc440spe_chan_append()
1128 iter = chan_first_cdb[chan->device->id]; ppc440spe_chan_append()
1130 ppc440spe_dma_put_desc(chan, iter); ppc440spe_chan_append()
1131 chan->hw_chain_inited = 1; ppc440spe_chan_append()
1139 list_for_each_entry_continue(iter, &chan->chain, chain_node) { ppc440spe_chan_append()
1140 ppc440spe_dma_put_desc(chan, iter); ppc440spe_chan_append()
1150 xor_reg = chan->device->xor_reg; ppc440spe_chan_append()
1165 ADMA_LL_DBG(print_cb_list(chan, ppc440spe_chan_append()
1187 ppc440spe_chan_get_current_descriptor(struct ppc440spe_adma_chan *chan) ppc440spe_chan_get_current_descriptor() argument
1192 if (unlikely(!chan->hw_chain_inited)) ppc440spe_chan_get_current_descriptor()
1196 switch (chan->device->id) { ppc440spe_chan_get_current_descriptor()
1199 dma_reg = chan->device->dma_reg; ppc440spe_chan_get_current_descriptor()
1202 xor_reg = chan->device->xor_reg; ppc440spe_chan_get_current_descriptor()
1211 static void ppc440spe_chan_run(struct ppc440spe_adma_chan *chan) ppc440spe_chan_run() argument
1215 switch (chan->device->id) { ppc440spe_chan_run()
1222 xor_reg = chan->device->xor_reg; ppc440spe_chan_run()
1235 static void ppc440spe_chan_start_null_xor(struct ppc440spe_adma_chan *chan);
1236 static int ppc440spe_adma_alloc_chan_resources(struct dma_chan *chan);
1330 * the operation given on this channel. It's assumed that 'chan' is
1332 * @chan: channel to use
1340 static int ppc440spe_adma_estimate(struct dma_chan *chan, ppc440spe_adma_estimate() argument
1363 if (cap == DMA_PQ && chan->chan_id == PPC440SPE_XOR_ID) { ppc440spe_adma_estimate()
1375 !ppc440spe_chan_is_busy(to_ppc440spe_adma_chan(chan))) ppc440spe_adma_estimate()
1414 if (dma_has_cap(cap, ref->chan->device->cap_mask)) { ppc440spe_async_tx_find_best_channel()
1417 rank = ppc440spe_adma_estimate(ref->chan, cap, dst_lst, ppc440spe_async_tx_find_best_channel()
1421 best_chan = ref->chan; ppc440spe_async_tx_find_best_channel()
1459 struct ppc440spe_adma_chan *chan) ppc440spe_adma_free_slots()
1477 struct ppc440spe_adma_chan *chan, ppc440spe_adma_run_tx_complete_actions()
1505 struct ppc440spe_adma_chan *chan) ppc440spe_adma_clean_slot()
1516 if (list_is_last(&desc->chain_node, &chan->chain) || ppc440spe_adma_clean_slot()
1517 desc->phys == ppc440spe_chan_get_current_descriptor(chan)) ppc440spe_adma_clean_slot()
1520 if (chan->device->id != PPC440SPE_XOR_ID) { ppc440spe_adma_clean_slot()
1535 dev_dbg(chan->device->common.dev, "\tfree slot %llx: %d stride: %d\n", ppc440spe_adma_clean_slot()
1539 ppc440spe_adma_free_slots(desc, chan); ppc440spe_adma_clean_slot()
1550 static void __ppc440spe_adma_slot_cleanup(struct ppc440spe_adma_chan *chan) __ppc440spe_adma_slot_cleanup() argument
1554 u32 current_desc = ppc440spe_chan_get_current_descriptor(chan); __ppc440spe_adma_slot_cleanup()
1555 int busy = ppc440spe_chan_is_busy(chan); __ppc440spe_adma_slot_cleanup()
1558 dev_dbg(chan->device->common.dev, "ppc440spe adma%d: %s\n", __ppc440spe_adma_slot_cleanup()
1559 chan->device->id, __func__); __ppc440spe_adma_slot_cleanup()
1571 list_for_each_entry_safe(iter, _iter, &chan->chain, __ppc440spe_adma_slot_cleanup()
1573 dev_dbg(chan->device->common.dev, "\tcookie: %d slot: %d " __ppc440spe_adma_slot_cleanup()
1577 ppc440spe_desc_get_link(iter, chan), current_desc, __ppc440spe_adma_slot_cleanup()
1595 if (busy || ppc440spe_desc_get_link(iter, chan)) { __ppc440spe_adma_slot_cleanup()
1628 &chan->chain, chain_node) { __ppc440spe_adma_slot_cleanup()
1631 grp_iter, chan, cookie); __ppc440spe_adma_slot_cleanup()
1635 grp_iter, chan); __ppc440spe_adma_slot_cleanup()
1639 chan->common.completed_cookie = cookie; __ppc440spe_adma_slot_cleanup()
1659 cookie = ppc440spe_adma_run_tx_complete_actions(iter, chan, __ppc440spe_adma_slot_cleanup()
1662 if (ppc440spe_adma_clean_slot(iter, chan)) __ppc440spe_adma_slot_cleanup()
1669 chan->common.completed_cookie = cookie; __ppc440spe_adma_slot_cleanup()
1680 struct ppc440spe_adma_chan *chan = (struct ppc440spe_adma_chan *) data; ppc440spe_adma_tasklet() local
1682 spin_lock_nested(&chan->lock, SINGLE_DEPTH_NESTING); ppc440spe_adma_tasklet()
1683 __ppc440spe_adma_slot_cleanup(chan); ppc440spe_adma_tasklet()
1684 spin_unlock(&chan->lock); ppc440spe_adma_tasklet()
1690 static void ppc440spe_adma_slot_cleanup(struct ppc440spe_adma_chan *chan) ppc440spe_adma_slot_cleanup() argument
1692 spin_lock_bh(&chan->lock); ppc440spe_adma_slot_cleanup()
1693 __ppc440spe_adma_slot_cleanup(chan); ppc440spe_adma_slot_cleanup()
1694 spin_unlock_bh(&chan->lock); ppc440spe_adma_slot_cleanup()
1701 struct ppc440spe_adma_chan *chan, int num_slots, ppc440spe_adma_alloc_slots()
1718 iter = chan->last_used; ppc440spe_adma_alloc_slots()
1720 iter = list_entry(&chan->all_slots, ppc440spe_adma_alloc_slots()
1723 list_for_each_entry_safe_continue(iter, _iter, &chan->all_slots, ppc440spe_adma_alloc_slots()
1766 chan->last_used = last_used; ppc440spe_adma_alloc_slots()
1774 tasklet_schedule(&chan->irq_tasklet); ppc440spe_adma_alloc_slots()
1781 static int ppc440spe_adma_alloc_chan_resources(struct dma_chan *chan) ppc440spe_adma_alloc_chan_resources() argument
1789 ppc440spe_chan = to_ppc440spe_adma_chan(chan); ppc440spe_adma_alloc_chan_resources()
1791 chan->chan_id = ppc440spe_chan->device->id; ppc440spe_adma_alloc_chan_resources()
1811 dma_async_tx_descriptor_init(&slot->async_tx, chan); ppc440spe_adma_alloc_chan_resources()
1896 static void ppc440spe_adma_check_threshold(struct ppc440spe_adma_chan *chan) ppc440spe_adma_check_threshold() argument
1898 dev_dbg(chan->device->common.dev, "ppc440spe adma%d: pending: %d\n", ppc440spe_adma_check_threshold()
1899 chan->device->id, chan->pending); ppc440spe_adma_check_threshold()
1901 if (chan->pending >= PPC440SPE_ADMA_THRESHOLD) { ppc440spe_adma_check_threshold()
1902 chan->pending = 0; ppc440spe_adma_check_threshold()
1903 ppc440spe_chan_append(chan); ppc440spe_adma_check_threshold()
1915 struct ppc440spe_adma_chan *chan = to_ppc440spe_adma_chan(tx->chan); ppc440spe_adma_tx_submit() local
1927 spin_lock_bh(&chan->lock); ppc440spe_adma_tx_submit()
1930 if (unlikely(list_empty(&chan->chain))) { ppc440spe_adma_tx_submit()
1932 list_splice_init(&sw_desc->group_list, &chan->chain); ppc440spe_adma_tx_submit()
1933 chan_first_cdb[chan->device->id] = group_start; ppc440spe_adma_tx_submit()
1936 old_chain_tail = list_entry(chan->chain.prev, ppc440spe_adma_tx_submit()
1942 ppc440spe_desc_set_link(chan, old_chain_tail, group_start); ppc440spe_adma_tx_submit()
1946 chan->pending += slot_cnt / slots_per_op; ppc440spe_adma_tx_submit()
1947 ppc440spe_adma_check_threshold(chan); ppc440spe_adma_tx_submit()
1948 spin_unlock_bh(&chan->lock); ppc440spe_adma_tx_submit()
1950 dev_dbg(chan->device->common.dev, ppc440spe_adma_tx_submit()
1952 chan->device->id, __func__, ppc440spe_adma_tx_submit()
1962 struct dma_chan *chan, unsigned long flags) ppc440spe_adma_prep_dma_interrupt()
1968 ppc440spe_chan = to_ppc440spe_adma_chan(chan); ppc440spe_adma_prep_dma_interrupt()
1993 struct dma_chan *chan, dma_addr_t dma_dest, ppc440spe_adma_prep_dma_memcpy()
2000 ppc440spe_chan = to_ppc440spe_adma_chan(chan); ppc440spe_adma_prep_dma_memcpy()
2034 struct dma_chan *chan, dma_addr_t dma_dest, ppc440spe_adma_prep_dma_xor()
2042 ppc440spe_chan = to_ppc440spe_adma_chan(chan); ppc440spe_adma_prep_dma_xor()
2117 struct ppc440spe_adma_chan *chan; ppc440spe_dma01_prep_mult() local
2121 chan = to_ppc440spe_adma_chan(sw_desc->async_tx.chan); ppc440spe_dma01_prep_mult()
2140 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_dma01_prep_mult()
2142 ppc440spe_desc_set_dest_addr(iter, chan, 0, dst[1], 1); ppc440spe_dma01_prep_mult()
2143 ppc440spe_desc_set_src_addr(iter, chan, 0, DMA_CUED_XOR_HB, ppc440spe_dma01_prep_mult()
2164 ppc440spe_desc_set_src_addr(iter, chan, 0, ppc440spe_dma01_prep_mult()
2166 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_dma01_prep_mult()
2169 ppc440spe_desc_set_src_mult(iter, chan, DMA_CUED_MULT1_OFF, ppc440spe_dma01_prep_mult()
2203 struct ppc440spe_adma_chan *chan; ppc440spe_dma01_prep_sum_product() local
2207 chan = to_ppc440spe_adma_chan(sw_desc->async_tx.chan); ppc440spe_dma01_prep_sum_product()
2223 ppc440spe_desc_set_dest_addr(iter, chan, DMA_CUED_XOR_BASE, ppc440spe_dma01_prep_sum_product()
2225 ppc440spe_desc_set_dest_addr(iter, chan, 0, ppc440spe_dma01_prep_sum_product()
2227 ppc440spe_desc_set_src_addr(iter, chan, 0, DMA_CUED_XOR_HB, ppc440spe_dma01_prep_sum_product()
2249 ppc440spe_desc_set_src_addr(iter, chan, 0, DMA_CUED_XOR_HB, ppc440spe_dma01_prep_sum_product()
2251 ppc440spe_desc_set_dest_addr(iter, chan, DMA_CUED_XOR_BASE, ppc440spe_dma01_prep_sum_product()
2253 ppc440spe_desc_set_src_mult(iter, chan, DMA_CUED_MULT1_OFF, ppc440spe_dma01_prep_sum_product()
2274 ppc440spe_desc_set_src_addr(iter, chan, 0, DMA_CUED_XOR_HB, ppc440spe_dma01_prep_sum_product()
2276 ppc440spe_desc_set_dest_addr(iter, chan, DMA_CUED_XOR_BASE, ppc440spe_dma01_prep_sum_product()
2278 ppc440spe_desc_set_src_mult(iter, chan, DMA_CUED_MULT1_OFF, ppc440spe_dma01_prep_sum_product()
2525 struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, ppc440spe_adma_prep_dma_pq()
2533 ppc440spe_chan = to_ppc440spe_adma_chan(chan); ppc440spe_adma_prep_dma_pq()
2601 struct dma_chan *chan, dma_addr_t *pq, dma_addr_t *src, ppc440spe_adma_prep_dma_pqzero_sum()
2610 ppc440spe_chan = to_ppc440spe_adma_chan(chan); ppc440spe_adma_prep_dma_pqzero_sum()
2652 struct ppc440spe_adma_chan *chan; ppc440spe_adma_prep_dma_pqzero_sum() local
2655 chan = to_ppc440spe_adma_chan(iter->async_tx.chan); ppc440spe_adma_prep_dma_pqzero_sum()
2664 ppc440spe_desc_set_dest_addr(iter, chan, 0, ppc440spe_adma_prep_dma_pqzero_sum()
2666 ppc440spe_desc_set_src_addr(iter, chan, 0, 0, pdest); ppc440spe_adma_prep_dma_pqzero_sum()
2675 struct ppc440spe_adma_chan *chan; ppc440spe_adma_prep_dma_pqzero_sum() local
2680 chan = to_ppc440spe_adma_chan(iter->async_tx.chan); ppc440spe_adma_prep_dma_pqzero_sum()
2696 ppc440spe_desc_set_dest_addr(iter, chan, 0, ppc440spe_adma_prep_dma_pqzero_sum()
2698 ppc440spe_desc_set_src_addr(iter, chan, 0, 0, qdest); ppc440spe_adma_prep_dma_pqzero_sum()
2752 struct ppc440spe_adma_chan *chan; ppc440spe_adma_prep_dma_pqzero_sum() local
2755 chan = to_ppc440spe_adma_chan(iter->async_tx.chan); ppc440spe_adma_prep_dma_pqzero_sum()
2756 ppc440spe_desc_set_src_addr(iter, chan, 0, ppc440spe_adma_prep_dma_pqzero_sum()
2762 ppc440spe_desc_set_src_mult(iter, chan, ppc440spe_adma_prep_dma_pqzero_sum()
2780 struct dma_chan *chan, dma_addr_t *src, unsigned int src_cnt, ppc440spe_adma_prep_dma_xor_zero_sum()
2791 tx = ppc440spe_adma_prep_dma_pqzero_sum(chan, pq, &src[1], ppc440spe_adma_prep_dma_xor_zero_sum()
2803 struct ppc440spe_adma_chan *chan; ppc440spe_adma_set_dest() local
2807 chan = to_ppc440spe_adma_chan(sw_desc->async_tx.chan); ppc440spe_adma_set_dest()
2809 switch (chan->device->id) { ppc440spe_adma_set_dest()
2816 chan, 0, addr, index); ppc440spe_adma_set_dest()
2821 chan, 0, addr, index); ppc440spe_adma_set_dest()
2827 struct ppc440spe_adma_chan *chan, dma_addr_t addr) ppc440spe_adma_pq_zero_op()
2833 ppc440spe_desc_set_dest_addr(iter, chan, DMA_CUED_XOR_BASE, addr, 0); ppc440spe_adma_pq_zero_op()
2836 ppc440spe_desc_set_src_addr(iter, chan, 0, DMA_CUED_XOR_HB, addr); ppc440spe_adma_pq_zero_op()
2839 ppc440spe_desc_set_src_mult(iter, chan, DMA_CUED_MULT1_OFF, ppc440spe_adma_pq_zero_op()
2851 struct ppc440spe_adma_chan *chan; ppc440spe_adma_pq_set_dest() local
2856 chan = to_ppc440spe_adma_chan(sw_desc->async_tx.chan); ppc440spe_adma_pq_set_dest()
2871 switch (chan->device->id) { ppc440spe_adma_pq_set_dest()
2889 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_adma_pq_set_dest()
2895 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_adma_pq_set_dest()
2897 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_adma_pq_set_dest()
2911 ppc440spe_adma_pq_zero_op(iter, chan, ppc440spe_adma_pq_set_dest()
2919 ppc440spe_adma_pq_zero_op(iter, chan, ppc440spe_adma_pq_set_dest()
2942 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_adma_pq_set_dest()
2949 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_adma_pq_set_dest()
2965 iter, chan, ppc440spe_adma_pq_set_dest()
2975 iter, chan, ppc440spe_adma_pq_set_dest()
2979 iter, chan, ppc440spe_adma_pq_set_dest()
3007 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_adma_pq_set_dest()
3021 chan, qpath, qaddr, 0); ppc440spe_adma_pq_set_dest()
3041 struct ppc440spe_adma_chan *chan; ppc440spe_adma_pqzero_sum_set_dest() local
3045 chan = to_ppc440spe_adma_chan(sw_desc->async_tx.chan); ppc440spe_adma_pqzero_sum_set_dest()
3067 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_adma_pqzero_sum_set_dest()
3069 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_adma_pqzero_sum_set_dest()
3079 ppc440spe_desc_set_dest_addr(iter, chan, ppc440spe_adma_pqzero_sum_set_dest()
3088 ppc440spe_desc_set_src_addr(end, chan, 0, 0, addr ? addr : paddr); ppc440spe_adma_pqzero_sum_set_dest()
3093 ppc440spe_desc_set_src_addr(end, chan, 0, 0, qaddr); ppc440spe_adma_pqzero_sum_set_dest()
3116 struct ppc440spe_adma_chan *chan; ppc440spe_adma_pq_set_src() local
3120 chan = to_ppc440spe_adma_chan(sw_desc->async_tx.chan); ppc440spe_adma_pq_set_src()
3122 switch (chan->device->id) { ppc440spe_adma_pq_set_src()
3188 ppc440spe_desc_set_src_addr(iter, chan, 0, haddr, addr); ppc440spe_adma_pq_set_src()
3197 ppc440spe_desc_set_src_addr(iter, chan, 0, ppc440spe_adma_pq_set_src()
3226 struct ppc440spe_adma_chan *chan; ppc440spe_adma_memcpy_xor_set_src() local
3228 chan = to_ppc440spe_adma_chan(sw_desc->async_tx.chan); ppc440spe_adma_memcpy_xor_set_src()
3232 ppc440spe_desc_set_src_addr(sw_desc, chan, index, 0, addr); ppc440spe_adma_memcpy_xor_set_src()
3468 struct ppc440spe_adma_chan *chan; ppc440spe_adma_pq_set_src_mult() local
3472 chan = to_ppc440spe_adma_chan(sw_desc->async_tx.chan); ppc440spe_adma_pq_set_src_mult()
3474 switch (chan->device->id) { ppc440spe_adma_pq_set_src_mult()
3518 ppc440spe_desc_set_src_mult(iter, chan, ppc440spe_adma_pq_set_src_mult()
3525 ppc440spe_desc_set_src_mult(iter1, chan, ppc440spe_adma_pq_set_src_mult()
3550 static void ppc440spe_adma_free_chan_resources(struct dma_chan *chan) ppc440spe_adma_free_chan_resources() argument
3556 ppc440spe_chan = to_ppc440spe_adma_chan(chan); ppc440spe_adma_free_chan_resources()
3587 * @chan: ADMA channel handle
3591 static enum dma_status ppc440spe_adma_tx_status(struct dma_chan *chan, ppc440spe_adma_tx_status() argument
3597 ppc440spe_chan = to_ppc440spe_adma_chan(chan); ppc440spe_adma_tx_status()
3598 ret = dma_cookie_status(chan, cookie, txstate); ppc440spe_adma_tx_status()
3604 return dma_cookie_status(chan, cookie, txstate); ppc440spe_adma_tx_status()
3612 struct ppc440spe_adma_chan *chan = data; ppc440spe_adma_eot_handler() local
3614 dev_dbg(chan->device->common.dev, ppc440spe_adma_eot_handler()
3615 "ppc440spe adma%d: %s\n", chan->device->id, __func__); ppc440spe_adma_eot_handler()
3617 tasklet_schedule(&chan->irq_tasklet); ppc440spe_adma_eot_handler()
3618 ppc440spe_adma_device_clear_eot_status(chan); ppc440spe_adma_eot_handler()
3629 struct ppc440spe_adma_chan *chan = data; ppc440spe_adma_err_handler() local
3631 dev_dbg(chan->device->common.dev, ppc440spe_adma_err_handler()
3632 "ppc440spe adma%d: %s\n", chan->device->id, __func__); ppc440spe_adma_err_handler()
3634 tasklet_schedule(&chan->irq_tasklet); ppc440spe_adma_err_handler()
3635 ppc440spe_adma_device_clear_eot_status(chan); ppc440spe_adma_err_handler()
3651 static void ppc440spe_adma_issue_pending(struct dma_chan *chan) ppc440spe_adma_issue_pending() argument
3655 ppc440spe_chan = to_ppc440spe_adma_chan(chan); ppc440spe_adma_issue_pending()
3671 static void ppc440spe_chan_start_null_xor(struct ppc440spe_adma_chan *chan) ppc440spe_chan_start_null_xor() argument
3677 dev_dbg(chan->device->common.dev, ppc440spe_chan_start_null_xor()
3678 "ppc440spe adma%d: %s\n", chan->device->id, __func__); ppc440spe_chan_start_null_xor()
3680 spin_lock_bh(&chan->lock); ppc440spe_chan_start_null_xor()
3682 sw_desc = ppc440spe_adma_alloc_slots(chan, slot_cnt, slots_per_op); ppc440spe_chan_start_null_xor()
3685 list_splice_init(&sw_desc->group_list, &chan->chain); ppc440spe_chan_start_null_xor()
3694 chan->common.completed_cookie = cookie - 1; ppc440spe_chan_start_null_xor()
3697 BUG_ON(ppc440spe_chan_is_busy(chan)); ppc440spe_chan_start_null_xor()
3700 ppc440spe_chan_set_first_xor_descriptor(chan, sw_desc); ppc440spe_chan_start_null_xor()
3703 ppc440spe_chan_run(chan); ppc440spe_chan_start_null_xor()
3707 chan->device->id); ppc440spe_chan_start_null_xor()
3708 spin_unlock_bh(&chan->lock); ppc440spe_chan_start_null_xor()
3717 static int ppc440spe_test_raid6(struct ppc440spe_adma_chan *chan) ppc440spe_test_raid6() argument
3732 spin_lock_bh(&chan->lock); ppc440spe_test_raid6()
3733 sw_desc = ppc440spe_adma_alloc_slots(chan, 1, 1); ppc440spe_test_raid6()
3738 ppc440spe_desc_set_byte_count(iter, chan, PAGE_SIZE); ppc440spe_test_raid6()
3743 spin_unlock_bh(&chan->lock); ppc440spe_test_raid6()
3746 spin_unlock_bh(&chan->lock); ppc440spe_test_raid6()
3750 dma_addr = dma_map_page(chan->device->dev, pg, 0, ppc440spe_test_raid6()
3767 ppc440spe_adma_issue_pending(&chan->common); ppc440spe_test_raid6()
3883 struct ppc440spe_adma_chan *chan, ppc440spe_adma_setup_irqs()
3915 0, dev_driver_string(adev->dev), chan); ppc440spe_adma_setup_irqs()
3933 chan); ppc440spe_adma_setup_irqs()
3978 free_irq(adev->irq, chan); ppc440spe_adma_setup_irqs()
3990 struct ppc440spe_adma_chan *chan) ppc440spe_adma_release_irqs()
4008 free_irq(adev->irq, chan); ppc440spe_adma_release_irqs()
4011 free_irq(adev->err_irq, chan); ppc440spe_adma_release_irqs()
4027 struct ppc440spe_adma_chan *chan; ppc440spe_adma_probe() local
4146 chan = kzalloc(sizeof(*chan), GFP_KERNEL); ppc440spe_adma_probe()
4147 if (!chan) { ppc440spe_adma_probe()
4154 spin_lock_init(&chan->lock); ppc440spe_adma_probe()
4155 INIT_LIST_HEAD(&chan->chain); ppc440spe_adma_probe()
4156 INIT_LIST_HEAD(&chan->all_slots); ppc440spe_adma_probe()
4157 chan->device = adev; ppc440spe_adma_probe()
4158 chan->common.device = &adev->common; ppc440spe_adma_probe()
4159 dma_cookie_init(&chan->common); ppc440spe_adma_probe()
4160 list_add_tail(&chan->common.device_node, &adev->common.channels); ppc440spe_adma_probe()
4161 tasklet_init(&chan->irq_tasklet, ppc440spe_adma_tasklet, ppc440spe_adma_probe()
4162 (unsigned long)chan); ppc440spe_adma_probe()
4168 chan->pdest_page = alloc_page(GFP_KERNEL); ppc440spe_adma_probe()
4169 chan->qdest_page = alloc_page(GFP_KERNEL); ppc440spe_adma_probe()
4170 if (!chan->pdest_page || ppc440spe_adma_probe()
4171 !chan->qdest_page) { ppc440spe_adma_probe()
4172 if (chan->pdest_page) ppc440spe_adma_probe()
4173 __free_page(chan->pdest_page); ppc440spe_adma_probe()
4174 if (chan->qdest_page) ppc440spe_adma_probe()
4175 __free_page(chan->qdest_page); ppc440spe_adma_probe()
4179 chan->pdest = dma_map_page(&ofdev->dev, chan->pdest_page, 0, ppc440spe_adma_probe()
4181 chan->qdest = dma_map_page(&ofdev->dev, chan->qdest_page, 0, ppc440spe_adma_probe()
4187 ref->chan = &chan->common; ppc440spe_adma_probe()
4196 ret = ppc440spe_adma_setup_irqs(adev, chan, &initcode); ppc440spe_adma_probe()
4212 ppc440spe_adma_release_irqs(adev, chan); ppc440spe_adma_probe()
4215 if (chan == to_ppc440spe_adma_chan(ref->chan)) { ppc440spe_adma_probe()
4222 dma_unmap_page(&ofdev->dev, chan->pdest, ppc440spe_adma_probe()
4224 dma_unmap_page(&ofdev->dev, chan->qdest, ppc440spe_adma_probe()
4226 __free_page(chan->pdest_page); ppc440spe_adma_probe()
4227 __free_page(chan->qdest_page); ppc440spe_adma_probe()
4230 kfree(chan); ppc440spe_adma_probe()
4259 struct dma_chan *chan, *_chan; ppc440spe_adma_remove() local
4268 list_for_each_entry_safe(chan, _chan, &adev->common.channels, ppc440spe_adma_remove()
4270 ppc440spe_chan = to_ppc440spe_adma_chan(chan); ppc440spe_adma_remove()
4284 to_ppc440spe_adma_chan(ref->chan)) { ppc440spe_adma_remove()
4289 list_del(&chan->device_node); ppc440spe_adma_remove()
243 ppc440spe_desc_init_interrupt(struct ppc440spe_adma_desc_slot *desc, struct ppc440spe_adma_chan *chan) ppc440spe_desc_init_interrupt() argument
534 ppc440spe_desc_set_src_addr(struct ppc440spe_adma_desc_slot *desc, struct ppc440spe_adma_chan *chan, int src_idx, dma_addr_t addrh, dma_addr_t addrl) ppc440spe_desc_set_src_addr() argument
569 ppc440spe_desc_set_src_mult(struct ppc440spe_adma_desc_slot *desc, struct ppc440spe_adma_chan *chan, u32 mult_index, int sg_index, unsigned char mult_value) ppc440spe_desc_set_src_mult() argument
615 ppc440spe_desc_set_dest_addr(struct ppc440spe_adma_desc_slot *desc, struct ppc440spe_adma_chan *chan, dma_addr_t addrh, dma_addr_t addrl, u32 dst_idx) ppc440spe_desc_set_dest_addr() argument
656 ppc440spe_desc_set_byte_count(struct ppc440spe_adma_desc_slot *desc, struct ppc440spe_adma_chan *chan, u32 byte_count) ppc440spe_desc_set_byte_count() argument
691 ppc440spe_desc_set_dcheck(struct ppc440spe_adma_desc_slot *desc, struct ppc440spe_adma_chan *chan, u8 *qword) ppc440spe_desc_set_dcheck() argument
783 ppc440spe_desc_get_link(struct ppc440spe_adma_desc_slot *desc, struct ppc440spe_adma_chan *chan) ppc440spe_desc_get_link() argument
910 ppc440spe_adma_device_clear_eot_status( struct ppc440spe_adma_chan *chan) ppc440spe_adma_device_clear_eot_status() argument
1061 ppc440spe_chan_set_first_xor_descriptor( struct ppc440spe_adma_chan *chan, struct ppc440spe_adma_desc_slot *next_desc) ppc440spe_chan_set_first_xor_descriptor() argument
1458 ppc440spe_adma_free_slots(struct ppc440spe_adma_desc_slot *slot, struct ppc440spe_adma_chan *chan) ppc440spe_adma_free_slots() argument
1475 ppc440spe_adma_run_tx_complete_actions( struct ppc440spe_adma_desc_slot *desc, struct ppc440spe_adma_chan *chan, dma_cookie_t cookie) ppc440spe_adma_run_tx_complete_actions() argument
1504 ppc440spe_adma_clean_slot(struct ppc440spe_adma_desc_slot *desc, struct ppc440spe_adma_chan *chan) ppc440spe_adma_clean_slot() argument
1700 ppc440spe_adma_alloc_slots( struct ppc440spe_adma_chan *chan, int num_slots, int slots_per_op) ppc440spe_adma_alloc_slots() argument
1961 ppc440spe_adma_prep_dma_interrupt( struct dma_chan *chan, unsigned long flags) ppc440spe_adma_prep_dma_interrupt() argument
1992 ppc440spe_adma_prep_dma_memcpy( struct dma_chan *chan, dma_addr_t dma_dest, dma_addr_t dma_src, size_t len, unsigned long flags) ppc440spe_adma_prep_dma_memcpy() argument
2033 ppc440spe_adma_prep_dma_xor( struct dma_chan *chan, dma_addr_t dma_dest, dma_addr_t *dma_src, u32 src_cnt, size_t len, unsigned long flags) ppc440spe_adma_prep_dma_xor() argument
2524 ppc440spe_adma_prep_dma_pq( struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) ppc440spe_adma_prep_dma_pq() argument
2600 ppc440spe_adma_prep_dma_pqzero_sum( struct dma_chan *chan, dma_addr_t *pq, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, enum sum_check_flags *pqres, unsigned long flags) ppc440spe_adma_prep_dma_pqzero_sum() argument
2779 ppc440spe_adma_prep_dma_xor_zero_sum( struct dma_chan *chan, dma_addr_t *src, unsigned int src_cnt, size_t len, enum sum_check_flags *result, unsigned long flags) ppc440spe_adma_prep_dma_xor_zero_sum() argument
2826 ppc440spe_adma_pq_zero_op(struct ppc440spe_adma_desc_slot *iter, struct ppc440spe_adma_chan *chan, dma_addr_t addr) ppc440spe_adma_pq_zero_op() argument
3882 ppc440spe_adma_setup_irqs(struct ppc440spe_adma_device *adev, struct ppc440spe_adma_chan *chan, int *initcode) ppc440spe_adma_setup_irqs() argument
3989 ppc440spe_adma_release_irqs(struct ppc440spe_adma_device *adev, struct ppc440spe_adma_chan *chan) ppc440spe_adma_release_irqs() argument
/linux-4.1.27/include/linux/platform_data/
H A Ddma-imx.h60 static inline int imx_dma_is_ipu(struct dma_chan *chan) imx_dma_is_ipu() argument
62 return !strcmp(dev_name(chan->device->dev), "ipu-core"); imx_dma_is_ipu()
65 static inline int imx_dma_is_general_purpose(struct dma_chan *chan) imx_dma_is_general_purpose() argument
67 return !strcmp(chan->device->dev->driver->name, "imx-sdma") || imx_dma_is_general_purpose()
68 !strcmp(chan->device->dev->driver->name, "imx-dma"); imx_dma_is_general_purpose()
H A Ddma-s3c24xx.h13 #define S3C24XX_DMA_CHANREQ(src, chan) ((BIT(3) | src) << chan * 4)
46 bool s3c24xx_dma_filter(struct dma_chan *chan, void *param);
H A Ddma-ep93xx.h70 static inline bool ep93xx_dma_chan_is_m2p(struct dma_chan *chan) ep93xx_dma_chan_is_m2p() argument
72 return !strcmp(dev_name(chan->device->dev), "ep93xx-dma-m2p"); ep93xx_dma_chan_is_m2p()
77 * @chan: channel
84 ep93xx_dma_chan_direction(struct dma_chan *chan) ep93xx_dma_chan_direction() argument
86 if (!ep93xx_dma_chan_is_m2p(chan)) ep93xx_dma_chan_direction()
90 return (chan->chan_id % 2 == 0) ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM; ep93xx_dma_chan_direction()
H A Dcrypto-ux500.h14 bool (*dma_filter)(struct dma_chan *chan, void *filter_param);
H A Dusb-musb-ux500.h17 bool (*dma_filter)(struct dma_chan *chan, void *filter_param);
/linux-4.1.27/drivers/dma/hsu/
H A Dhsu.c18 * 2. 0/1 channel are assigned to port 0, 2/3 chan to port 1, 4/5 chan to
160 hsuc = &chip->hsu->chan[nr]; hsu_dma_irq()
223 struct dma_chan *chan, struct scatterlist *sgl, hsu_dma_prep_slave_sg()
227 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); hsu_dma_prep_slave_sg()
249 static void hsu_dma_issue_pending(struct dma_chan *chan) hsu_dma_issue_pending() argument
251 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); hsu_dma_issue_pending()
288 static enum dma_status hsu_dma_tx_status(struct dma_chan *chan, hsu_dma_tx_status() argument
291 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); hsu_dma_tx_status()
297 status = dma_cookie_status(chan, cookie, state); hsu_dma_tx_status()
316 static int hsu_dma_slave_config(struct dma_chan *chan, hsu_dma_slave_config() argument
319 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); hsu_dma_slave_config()
321 /* Check if chan will be configured for slave transfers */ hsu_dma_slave_config()
348 static int hsu_dma_pause(struct dma_chan *chan) hsu_dma_pause() argument
350 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); hsu_dma_pause()
363 static int hsu_dma_resume(struct dma_chan *chan) hsu_dma_resume() argument
365 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); hsu_dma_resume()
378 static int hsu_dma_terminate_all(struct dma_chan *chan) hsu_dma_terminate_all() argument
380 struct hsu_dma_chan *hsuc = to_hsu_dma_chan(chan); hsu_dma_terminate_all()
399 static void hsu_dma_free_chan_resources(struct dma_chan *chan) hsu_dma_free_chan_resources() argument
401 vchan_free_chan_resources(to_virt_chan(chan)); hsu_dma_free_chan_resources()
430 hsu->chan = devm_kcalloc(chip->dev, pdata->nr_channels, hsu_dma_probe()
431 sizeof(*hsu->chan), GFP_KERNEL); hsu_dma_probe()
432 if (!hsu->chan) hsu_dma_probe()
437 struct hsu_dma_chan *hsuc = &hsu->chan[i]; hsu_dma_probe()
487 struct hsu_dma_chan *hsuc = &hsu->chan[i]; hsu_dma_remove()
222 hsu_dma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) hsu_dma_prep_slave_sg() argument
/linux-4.1.27/arch/arm/mach-footbridge/
H A Ddma.c24 static int fb_dma_request(unsigned int chan, dma_t *dma)
29 static void fb_dma_enable(unsigned int chan, dma_t *dma)
33 static void fb_dma_disable(unsigned int chan, dma_t *dma)
/linux-4.1.27/net/wireless/
H A Dmesh.c124 if (!setup->chandef.chan) { __cfg80211_join_mesh()
129 if (!setup->chandef.chan) { __cfg80211_join_mesh()
135 struct ieee80211_channel *chan; __cfg80211_join_mesh() local
143 chan = &sband->channels[i]; __cfg80211_join_mesh()
144 if (chan->flags & (IEEE80211_CHAN_NO_IR | __cfg80211_join_mesh()
148 setup->chandef.chan = chan; __cfg80211_join_mesh()
152 if (setup->chandef.chan) __cfg80211_join_mesh()
157 if (!setup->chandef.chan) __cfg80211_join_mesh()
161 setup->chandef.center_freq1 = setup->chandef.chan->center_freq; __cfg80211_join_mesh()
171 rdev->wiphy.bands[setup->chandef.chan->band]; __cfg80211_join_mesh()
227 chandef->chan); cfg80211_set_mesh_channel()
H A Ddebugfs.c43 static int ht_print_chan(struct ieee80211_channel *chan, ht_print_chan() argument
49 if (chan->flags & IEEE80211_CHAN_DISABLED) ht_print_chan()
53 chan->center_freq); ht_print_chan()
58 chan->center_freq, ht_print_chan()
59 (chan->flags & IEEE80211_CHAN_NO_HT40MINUS) ? ht_print_chan()
61 (chan->flags & IEEE80211_CHAN_NO_HT40PLUS) ? ht_print_chan()
H A Dibss.c105 rdev->wiphy.bands[params->chandef.chan->band]; __cfg80211_join_ibss()
107 u32 flag = params->chandef.chan->band == IEEE80211_BAND_5GHZ ? __cfg80211_join_ibss()
248 if (!wdev->wext.ibss.chandef.chan) { cfg80211_ibss_wext_join()
253 struct ieee80211_channel *chan; cfg80211_ibss_wext_join() local
260 chan = &sband->channels[i]; cfg80211_ibss_wext_join()
261 if (chan->flags & IEEE80211_CHAN_NO_IR) cfg80211_ibss_wext_join()
263 if (chan->flags & IEEE80211_CHAN_DISABLED) cfg80211_ibss_wext_join()
265 new_chan = chan; cfg80211_ibss_wext_join()
315 struct ieee80211_channel *chan = NULL; cfg80211_ibss_wext_siwfreq() local
330 chan = ieee80211_get_channel(wdev->wiphy, freq); cfg80211_ibss_wext_siwfreq()
331 if (!chan) cfg80211_ibss_wext_siwfreq()
333 if (chan->flags & IEEE80211_CHAN_NO_IR || cfg80211_ibss_wext_siwfreq()
334 chan->flags & IEEE80211_CHAN_DISABLED) cfg80211_ibss_wext_siwfreq()
338 if (wdev->wext.ibss.chandef.chan == chan) cfg80211_ibss_wext_siwfreq()
350 if (chan) { cfg80211_ibss_wext_siwfreq()
351 cfg80211_chandef_create(&wdev->wext.ibss.chandef, chan, cfg80211_ibss_wext_siwfreq()
371 struct ieee80211_channel *chan = NULL; cfg80211_ibss_wext_giwfreq() local
379 chan = wdev->current_bss->pub.channel; cfg80211_ibss_wext_giwfreq()
380 else if (wdev->wext.ibss.chandef.chan) cfg80211_ibss_wext_giwfreq()
381 chan = wdev->wext.ibss.chandef.chan; cfg80211_ibss_wext_giwfreq()
384 if (chan) { cfg80211_ibss_wext_giwfreq()
385 freq->m = chan->center_freq; cfg80211_ibss_wext_giwfreq()
H A Dchan.c16 struct ieee80211_channel *chan, cfg80211_chandef_create()
19 if (WARN_ON(!chan)) cfg80211_chandef_create()
22 chandef->chan = chan; cfg80211_chandef_create()
28 chandef->center_freq1 = chan->center_freq; cfg80211_chandef_create()
32 chandef->center_freq1 = chan->center_freq; cfg80211_chandef_create()
36 chandef->center_freq1 = chan->center_freq + 10; cfg80211_chandef_create()
40 chandef->center_freq1 = chan->center_freq - 10; cfg80211_chandef_create()
52 if (!chandef->chan) cfg80211_chandef_valid()
55 control_freq = chandef->chan->center_freq; cfg80211_chandef_valid()
131 tmp = (30 + c->chan->center_freq - c->center_freq1)/20; chandef_primary_freqs()
139 tmp = (70 + c->chan->center_freq - c->center_freq1)/20; chandef_primary_freqs()
196 if (c1->chan != c2->chan) cfg80211_chandef_compatible()
611 ht_cap = &wiphy->bands[chandef->chan->band]->ht_cap; cfg80211_chandef_usable()
612 vht_cap = &wiphy->bands[chandef->chan->band]->vht_cap; cfg80211_chandef_usable()
614 control_freq = chandef->chan->center_freq; cfg80211_chandef_usable()
639 chandef->chan->flags & IEEE80211_CHAN_NO_HT40MINUS) cfg80211_chandef_usable()
642 chandef->chan->flags & IEEE80211_CHAN_NO_HT40PLUS) cfg80211_chandef_usable()
710 struct ieee80211_channel *chan) cfg80211_go_permissive_chan()
722 (chan->flags & IEEE80211_CHAN_INDOOR_ONLY)) cfg80211_go_permissive_chan()
725 if (!(chan->flags & IEEE80211_CHAN_GO_CONCURRENT)) cfg80211_go_permissive_chan()
751 if (chan == other_chan) cfg80211_go_permissive_chan()
754 if (chan->band != IEEE80211_BAND_5GHZ) cfg80211_go_permissive_chan()
757 r1 = cfg80211_get_unii(chan->center_freq); cfg80211_go_permissive_chan()
773 if (chan->center_freq == 5825 && cfg80211_go_permissive_chan()
801 !cfg80211_go_permissive_chan(rdev, chandef->chan)) cfg80211_reg_can_beacon()
830 struct ieee80211_channel **chan, cfg80211_get_chan_state()
836 *chan = NULL; cfg80211_get_chan_state()
847 *chan = wdev->current_bss->pub.channel; cfg80211_get_chan_state()
863 *chan = wdev->current_bss->pub.channel; cfg80211_get_chan_state()
871 *chan = wdev->chandef.chan; cfg80211_get_chan_state()
875 *chan = wdev->chandef.chan; cfg80211_get_chan_state()
888 *chan = wdev->chandef.chan; cfg80211_get_chan_state()
900 if (wdev->chandef.chan) { cfg80211_get_chan_state()
901 *chan = wdev->chandef.chan; cfg80211_get_chan_state()
15 cfg80211_chandef_create(struct cfg80211_chan_def *chandef, struct ieee80211_channel *chan, enum nl80211_channel_type chan_type) cfg80211_chandef_create() argument
709 cfg80211_go_permissive_chan(struct cfg80211_registered_device *rdev, struct ieee80211_channel *chan) cfg80211_go_permissive_chan() argument
829 cfg80211_get_chan_state(struct wireless_dev *wdev, struct ieee80211_channel **chan, enum cfg80211_chan_mode *chanmode, u8 *radar_detect) cfg80211_get_chan_state() argument
/linux-4.1.27/drivers/ptp/
H A Dptp_chardev.c29 enum ptp_pin_function func, unsigned int chan) ptp_disable_pinfunc()
41 rq.extts.index = chan; ptp_disable_pinfunc()
46 rq.perout.index = chan; ptp_disable_pinfunc()
59 enum ptp_pin_function func, unsigned int chan) ptp_set_pinfunc()
68 info->pin_config[i].chan == chan) { ptp_set_pinfunc()
81 if (chan >= info->n_ext_ts) ptp_set_pinfunc()
85 if (chan >= info->n_per_out) ptp_set_pinfunc()
89 if (chan != 0) ptp_set_pinfunc()
95 if (info->verify(info, pin, func, chan)) { ptp_set_pinfunc()
96 pr_err("driver cannot use function %u on pin %u\n", func, chan); ptp_set_pinfunc()
102 ptp_disable_pinfunc(info, func, chan); ptp_set_pinfunc()
104 pin1->chan = 0; ptp_set_pinfunc()
106 ptp_disable_pinfunc(info, pin2->func, pin2->chan); ptp_set_pinfunc()
108 pin2->chan = chan; ptp_set_pinfunc()
246 err = ptp_set_pinfunc(ptp, pin_index, pd.func, pd.chan); ptp_ioctl()
28 ptp_disable_pinfunc(struct ptp_clock_info *ops, enum ptp_pin_function func, unsigned int chan) ptp_disable_pinfunc() argument
58 ptp_set_pinfunc(struct ptp_clock *ptp, unsigned int pin, enum ptp_pin_function func, unsigned int chan) ptp_set_pinfunc() argument
/linux-4.1.27/drivers/power/
H A Dgeneric-adc-battery.c248 int chan; gab_probe() local
290 for (chan = 0; chan < ARRAY_SIZE(gab_chan_name); chan++) { gab_probe()
291 adc_bat->channel[chan] = iio_channel_get(&pdev->dev, gab_probe()
292 gab_chan_name[chan]); gab_probe()
293 if (IS_ERR(adc_bat->channel[chan])) { gab_probe()
294 ret = PTR_ERR(adc_bat->channel[chan]); gab_probe()
295 adc_bat->channel[chan] = NULL; gab_probe()
299 &gab_dyn_props[chan], gab_probe()
300 sizeof(gab_dyn_props[chan])); gab_probe()
353 for (chan = 0; chan < ARRAY_SIZE(gab_chan_name); chan++) { gab_probe()
354 if (adc_bat->channel[chan]) gab_probe()
355 iio_channel_release(adc_bat->channel[chan]); gab_probe()
365 int chan; gab_remove() local
376 for (chan = 0; chan < ARRAY_SIZE(gab_chan_name); chan++) { gab_remove()
377 if (adc_bat->channel[chan]) gab_remove()
378 iio_channel_release(adc_bat->channel[chan]); gab_remove()
/linux-4.1.27/drivers/net/ethernet/pasemi/
H A Dpasemi_mac.c322 offsetof(struct pasemi_mac_csring, chan)); pasemi_mac_setup_csring()
329 chno = ring->chan.chno; pasemi_mac_setup_csring()
335 if (pasemi_dma_alloc_ring(&ring->chan, CS_RING_SIZE)) pasemi_mac_setup_csring()
339 PAS_DMA_TXCHAN_BASEL_BRBL(ring->chan.ring_dma)); pasemi_mac_setup_csring()
340 val = PAS_DMA_TXCHAN_BASEU_BRBH(ring->chan.ring_dma >> 32); pasemi_mac_setup_csring()
367 pasemi_dma_start_chan(&ring->chan, PAS_DMA_TXCHAN_TCMDSTA_SZ | pasemi_mac_setup_csring()
380 pasemi_dma_free_ring(&ring->chan); pasemi_mac_setup_csring()
382 pasemi_dma_free_chan(&ring->chan); pasemi_mac_setup_csring()
404 pasemi_dma_stop_chan(&csring->chan); pasemi_mac_free_csring()
407 pasemi_dma_free_ring(&csring->chan); pasemi_mac_free_csring()
408 pasemi_dma_free_chan(&csring->chan); pasemi_mac_free_csring()
420 offsetof(struct pasemi_mac_rxring, chan)); pasemi_mac_setup_rx_resources()
426 chno = ring->chan.chno; pasemi_mac_setup_rx_resources()
438 if (pasemi_dma_alloc_ring(&ring->chan, RX_RING_SIZE)) pasemi_mac_setup_rx_resources()
448 PAS_DMA_RXCHAN_BASEL_BRBL(ring->chan.ring_dma)); pasemi_mac_setup_rx_resources()
451 PAS_DMA_RXCHAN_BASEU_BRBH(ring->chan.ring_dma >> 32) | pasemi_mac_setup_rx_resources()
487 pasemi_dma_free_chan(&ring->chan); pasemi_mac_setup_rx_resources()
502 offsetof(struct pasemi_mac_txring, chan)); pasemi_mac_setup_tx_resources()
509 chno = ring->chan.chno; pasemi_mac_setup_tx_resources()
520 if (pasemi_dma_alloc_ring(&ring->chan, TX_RING_SIZE)) pasemi_mac_setup_tx_resources()
524 PAS_DMA_TXCHAN_BASEL_BRBL(ring->chan.ring_dma)); pasemi_mac_setup_tx_resources()
525 val = PAS_DMA_TXCHAN_BASEU_BRBH(ring->chan.ring_dma >> 32); pasemi_mac_setup_tx_resources()
549 pasemi_dma_free_chan(&ring->chan); pasemi_mac_setup_tx_resources()
585 pasemi_dma_free_chan(&txring->chan); pasemi_mac_free_tx_resources()
620 pasemi_dma_free_chan(&rx_ring(mac)->chan); pasemi_mac_free_rx_resources()
681 pcnt = *rx->chan.status & PAS_STATUS_PCNT_M; pasemi_mac_restart_rx_intr()
685 if (*rx->chan.status & PAS_STATUS_TIMER) pasemi_mac_restart_rx_intr()
688 write_iob_reg(PAS_IOB_DMA_RXCH_RESET(mac->rx->chan.chno), reg); pasemi_mac_restart_rx_intr()
696 pcnt = *tx_ring(mac)->chan.status & PAS_STATUS_PCNT_M; pasemi_mac_restart_tx_intr()
700 write_iob_reg(PAS_IOB_DMA_TXCH_RESET(tx_ring(mac)->chan.chno), reg); pasemi_mac_restart_tx_intr()
708 struct pasemi_dmachan *chan = &rx_ring(mac)->chan; pasemi_mac_rx_error() local
714 ccmdsta = read_dma_reg(PAS_DMA_RXCHAN_CCMDSTA(chan->chno)); pasemi_mac_rx_error()
717 macrx, *chan->status); pasemi_mac_rx_error()
727 struct pasemi_dmachan *chan = &tx_ring(mac)->chan; pasemi_mac_tx_error() local
732 cmdsta = read_dma_reg(PAS_DMA_TXCHAN_TCMDSTA(chan->chno)); pasemi_mac_tx_error()
735 "tx status 0x%016llx\n", mactx, *chan->status); pasemi_mac_tx_error()
743 const struct pasemi_dmachan *chan = &rx->chan; pasemi_mac_clean_rx() local
768 (*chan->status & PAS_STATUS_ERROR)) pasemi_mac_clean_rx()
848 write_dma_reg(PAS_DMA_RXCHAN_INCR(mac->rx->chan.chno), count << 1); pasemi_mac_clean_rx()
865 struct pasemi_dmachan *chan = &txring->chan; pasemi_mac_clean_tx() local
901 (*chan->status & PAS_STATUS_ERROR)) pasemi_mac_clean_tx()
959 const struct pasemi_dmachan *chan = &rxring->chan; pasemi_mac_rx_intr() local
962 if (!(*chan->status & PAS_STATUS_CAUSE_M)) pasemi_mac_rx_intr()
970 if (*chan->status & PAS_STATUS_SOFT) pasemi_mac_rx_intr()
972 if (*chan->status & PAS_STATUS_ERROR) pasemi_mac_rx_intr()
977 write_iob_reg(PAS_IOB_DMA_RXCH_RESET(chan->chno), reg); pasemi_mac_rx_intr()
999 const struct pasemi_dmachan *chan = &txring->chan; pasemi_mac_tx_intr() local
1003 if (!(*chan->status & PAS_STATUS_CAUSE_M)) pasemi_mac_tx_intr()
1008 if (*chan->status & PAS_STATUS_SOFT) pasemi_mac_tx_intr()
1010 if (*chan->status & PAS_STATUS_ERROR) pasemi_mac_tx_intr()
1018 write_iob_reg(PAS_IOB_DMA_TXCH_RESET(chan->chno), reg); pasemi_mac_tx_intr()
1152 write_iob_reg(PAS_IOB_DMA_RXCH_CFG(mac->rx->chan.chno), pasemi_mac_open()
1155 write_iob_reg(PAS_IOB_DMA_TXCH_CFG(mac->tx->chan.chno), pasemi_mac_open()
1159 PAS_MAC_IPC_CHNL_DCHNO(mac->rx->chan.chno) | pasemi_mac_open()
1160 PAS_MAC_IPC_CHNL_BCH(mac->rx->chan.chno)); pasemi_mac_open()
1171 pasemi_dma_start_chan(&rx_ring(mac)->chan, PAS_DMA_RXCHAN_CCMDSTA_DU | pasemi_mac_open()
1177 pasemi_dma_start_chan(&tx_ring(mac)->chan, PAS_DMA_TXCHAN_TCMDSTA_SZ | pasemi_mac_open()
1184 write_dma_reg(PAS_DMA_RXCHAN_INCR(rx_ring(mac)->chan.chno), pasemi_mac_open()
1220 ret = request_irq(mac->tx->chan.irq, pasemi_mac_tx_intr, 0, pasemi_mac_open()
1224 mac->tx->chan.irq, ret); pasemi_mac_open()
1231 ret = request_irq(mac->rx->chan.irq, pasemi_mac_rx_intr, 0, pasemi_mac_open()
1235 mac->rx->chan.irq, ret); pasemi_mac_open()
1249 free_irq(mac->tx->chan.irq, mac->tx); pasemi_mac_open()
1267 int txch = tx_ring(mac)->chan.chno; pasemi_mac_pause_txchan()
1289 int rxch = rx_ring(mac)->chan.chno; pasemi_mac_pause_rxchan()
1331 rxch = rx_ring(mac)->chan.chno; pasemi_mac_close()
1332 txch = tx_ring(mac)->chan.chno; pasemi_mac_close()
1371 free_irq(mac->tx->chan.irq, mac->tx); pasemi_mac_close()
1372 free_irq(mac->rx->chan.irq, mac->rx); pasemi_mac_close()
1428 csdma = csring->chan.ring_dma + (fill & (CS_RING_SIZE-1)) * 8 + 2; pasemi_mac_queue_csdesc()
1460 write_dma_reg(PAS_DMA_TXCHAN_INCR(csring->chan.chno), (cs_size) >> 1); pasemi_mac_queue_csdesc()
1472 write_dma_reg(PAS_DMA_TXCHAN_INCR(txring->chan.chno), 2); pasemi_mac_queue_csdesc()
1578 write_dma_reg(PAS_DMA_TXCHAN_INCR(txring->chan.chno), (nfrags+2) >> 1); pasemi_mac_start_tx()
1636 disable_irq(mac->tx->chan.irq); pasemi_mac_netpoll()
1637 pasemi_mac_tx_intr(mac->tx->chan.irq, mac->tx); pasemi_mac_netpoll()
1638 enable_irq(mac->tx->chan.irq); pasemi_mac_netpoll()
1640 disable_irq(mac->rx->chan.irq); pasemi_mac_netpoll()
1641 pasemi_mac_rx_intr(mac->rx->chan.irq, mac->rx); pasemi_mac_netpoll()
1642 enable_irq(mac->rx->chan.irq); pasemi_mac_netpoll()
1864 pasemi_dma_free_chan(&mac->tx->chan); pasemi_mac_remove()
1865 pasemi_dma_free_chan(&mac->rx->chan); pasemi_mac_remove()
/linux-4.1.27/include/linux/iio/
H A Dconsumer.h47 * @chan: The channel to be released.
49 void iio_channel_release(struct iio_channel *chan);
64 * @chan: Array of channels to be released.
66 void iio_channel_release_all(struct iio_channel *chan);
116 * @chan: The channel being queried.
122 int iio_read_channel_raw(struct iio_channel *chan,
127 * @chan: The channel being queried.
136 int iio_read_channel_average_raw(struct iio_channel *chan, int *val);
140 * @chan: The channel being queried.
151 int iio_read_channel_processed(struct iio_channel *chan, int *val);
155 * @chan: The channel being queried.
161 int iio_write_channel_raw(struct iio_channel *chan, int val);
175 * @chan: The channel being queried.
183 int iio_read_channel_scale(struct iio_channel *chan, int *val,
188 * @chan: The channel being queried
206 int iio_convert_raw_to_processed(struct iio_channel *chan, int raw,
/linux-4.1.27/include/linux/mtd/
H A Dlpc32xx_mlc.h17 bool (*dma_filter)(struct dma_chan *chan, void *filter_param);
H A Dlpc32xx_slc.h17 bool (*dma_filter)(struct dma_chan *chan, void *filter_param);
/linux-4.1.27/drivers/iio/imu/
H A Dadis_buffer.c27 const struct iio_chan_spec *chan; adis_update_scan_mode() local
63 chan = indio_dev->channels; adis_update_scan_mode()
64 for (i = 0; i < indio_dev->num_channels; i++, chan++) { adis_update_scan_mode()
65 if (!test_bit(chan->scan_index, scan_mask)) adis_update_scan_mode()
67 if (chan->scan_type.storagebits == 32) adis_update_scan_mode()
68 *tx++ = cpu_to_be16((chan->address + 2) << 8); adis_update_scan_mode()
69 *tx++ = cpu_to_be16(chan->address << 8); adis_update_scan_mode()

Completed in 3627 milliseconds

123456