Searched refs:idx (Results 1 - 200 of 2337) sorted by relevance

1234567891011>>

/linux-4.4.14/include/uapi/linux/netfilter/
H A Dxt_TCPOPTSTRIP.h6 #define tcpoptstrip_set_bit(bmap, idx) \
7 (bmap[(idx) >> 5] |= 1U << (idx & 31))
8 #define tcpoptstrip_test_bit(bmap, idx) \
9 (((1U << (idx & 31)) & bmap[(idx) >> 5]) != 0)
/linux-4.4.14/include/asm-generic/
H A Dfixmap.h25 * 'index to address' translation. If anyone tries to use the idx
29 static __always_inline unsigned long fix_to_virt(const unsigned int idx) fix_to_virt() argument
31 BUILD_BUG_ON(idx >= __end_of_fixed_addresses); fix_to_virt()
32 return __fix_to_virt(idx); fix_to_virt()
63 #define set_fixmap(idx, phys) \
64 __set_fixmap(idx, phys, FIXMAP_PAGE_NORMAL)
68 #define clear_fixmap(idx) \
69 __set_fixmap(idx, 0, FIXMAP_PAGE_CLEAR)
73 #define __set_fixmap_offset(idx, phys, flags) \
76 __set_fixmap(idx, phys, flags); \
77 addr = fix_to_virt(idx) + ((phys) & (PAGE_SIZE - 1)); \
81 #define set_fixmap_offset(idx, phys) \
82 __set_fixmap_offset(idx, phys, FIXMAP_PAGE_NORMAL)
87 #define set_fixmap_nocache(idx, phys) \
88 __set_fixmap(idx, phys, FIXMAP_PAGE_NOCACHE)
90 #define set_fixmap_offset_nocache(idx, phys) \
91 __set_fixmap_offset(idx, phys, FIXMAP_PAGE_NOCACHE)
96 #define set_fixmap_io(idx, phys) \
97 __set_fixmap(idx, phys, FIXMAP_PAGE_IO)
99 #define set_fixmap_offset_io(idx, phys) \
100 __set_fixmap_offset(idx, phys, FIXMAP_PAGE_IO)
/linux-4.4.14/drivers/net/can/sja1000/
H A Dsja1000_isa.c133 int idx = pdev->id; sja1000_isa_probe() local
136 dev_dbg(&pdev->dev, "probing idx=%d: port=%#lx, mem=%#lx, irq=%d\n", sja1000_isa_probe()
137 idx, port[idx], mem[idx], irq[idx]); sja1000_isa_probe()
139 if (mem[idx]) { sja1000_isa_probe()
140 if (!request_mem_region(mem[idx], iosize, DRV_NAME)) { sja1000_isa_probe()
144 base = ioremap_nocache(mem[idx], iosize); sja1000_isa_probe()
150 if (indirect[idx] > 0 || sja1000_isa_probe()
151 (indirect[idx] == -1 && indirect[0] > 0)) sja1000_isa_probe()
153 if (!request_region(port[idx], iosize, DRV_NAME)) { sja1000_isa_probe()
166 dev->irq = irq[idx]; sja1000_isa_probe()
168 if (mem[idx]) { sja1000_isa_probe()
170 dev->base_addr = mem[idx]; sja1000_isa_probe()
174 priv->reg_base = (void __iomem *)port[idx]; sja1000_isa_probe()
175 dev->base_addr = port[idx]; sja1000_isa_probe()
180 spin_lock_init(&indirect_lock[idx]); sja1000_isa_probe()
187 if (clk[idx]) sja1000_isa_probe()
188 priv->can.clock.freq = clk[idx] / 2; sja1000_isa_probe()
194 if (ocr[idx] != 0xff) sja1000_isa_probe()
195 priv->ocr = ocr[idx]; sja1000_isa_probe()
201 if (cdr[idx] != 0xff) sja1000_isa_probe()
202 priv->cdr = cdr[idx]; sja1000_isa_probe()
210 dev->dev_id = idx; sja1000_isa_probe()
224 if (mem[idx]) sja1000_isa_probe()
227 if (mem[idx]) sja1000_isa_probe()
228 release_mem_region(mem[idx], iosize); sja1000_isa_probe()
230 release_region(port[idx], iosize); sja1000_isa_probe()
239 int idx = pdev->id; sja1000_isa_remove() local
243 if (mem[idx]) { sja1000_isa_remove()
245 release_mem_region(mem[idx], SJA1000_IOSIZE); sja1000_isa_remove()
248 release_region(port[idx], SJA1000_IOSIZE_INDIRECT); sja1000_isa_remove()
250 release_region(port[idx], SJA1000_IOSIZE); sja1000_isa_remove()
267 int idx, err; sja1000_isa_init() local
269 for (idx = 0; idx < MAXDEV; idx++) { sja1000_isa_init()
270 if ((port[idx] || mem[idx]) && irq[idx]) { sja1000_isa_init()
271 sja1000_isa_devs[idx] = sja1000_isa_init()
272 platform_device_alloc(DRV_NAME, idx); sja1000_isa_init()
273 if (!sja1000_isa_devs[idx]) { sja1000_isa_init()
277 err = platform_device_add(sja1000_isa_devs[idx]); sja1000_isa_init()
279 platform_device_put(sja1000_isa_devs[idx]); sja1000_isa_init()
284 DRV_NAME, idx, port[idx], mem[idx], irq[idx]); sja1000_isa_init()
285 } else if (idx == 0 || port[idx] || mem[idx]) { sja1000_isa_init()
303 while (--idx >= 0) { sja1000_isa_init()
304 if (sja1000_isa_devs[idx]) sja1000_isa_init()
305 platform_device_unregister(sja1000_isa_devs[idx]); sja1000_isa_init()
313 int idx; sja1000_isa_exit() local
316 for (idx = 0; idx < MAXDEV; idx++) { sja1000_isa_exit()
317 if (sja1000_isa_devs[idx]) sja1000_isa_exit()
318 platform_device_unregister(sja1000_isa_devs[idx]); sja1000_isa_exit()
/linux-4.4.14/tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/
H A DCore.py37 for idx in keys:
38 if not value and not idx:
39 string += flag_fields[event_name][field_name]['values'][idx]
41 if idx and (value & idx) == idx:
44 string += flag_fields[event_name][field_name]['values'][idx]
46 value &= ~idx
56 for idx in keys:
57 if not value and not idx:
58 string = symbolic_fields[event_name][field_name]['values'][idx]
60 if (value == idx):
61 string = symbolic_fields[event_name][field_name]['values'][idx]
79 for idx in keys:
80 if not value and not idx:
84 if idx and (value & idx) == idx:
87 string += trace_flags[idx]
89 value &= ~idx
/linux-4.4.14/tools/perf/scripts/perl/Perf-Trace-Util/lib/Perf/Trace/
H A DCore.pm39 foreach my $idx (sort {$a <=> $b} keys %trace_flags) {
40 if (!$value && !$idx) {
45 if ($idx && ($value & $idx) == $idx) {
49 $string .= "$trace_flags{$idx}";
51 $value &= ~$idx;
69 foreach my $idx (sort {$a <=> $b} keys %{$flag_fields{$event_name}{$field_name}{"values"}}) {
70 if (!$value && !$idx) {
71 $string .= "$flag_fields{$event_name}{$field_name}{'values'}{$idx}";
74 if ($idx && ($value & $idx) == $idx) {
78 $string .= "$flag_fields{$event_name}{$field_name}{'values'}{$idx}";
80 $value &= ~$idx;
109 foreach my $idx (sort {$a <=> $b} keys %{$flag_fields{$event}{$field}{"values"}}) {
110 print " value $idx: $flag_fields{$event}{$field}{'values'}{$idx}\n";
121 foreach my $idx (sort {$a <=> $b} keys %{$symbolic_fields{$event_name}{$field_name}{"values"}}) {
122 if (!$value && !$idx) {
123 return "$symbolic_fields{$event_name}{$field_name}{'values'}{$idx}";
126 if ($value == $idx) {
127 return "$symbolic_fields{$event_name}{$field_name}{'values'}{$idx}";
155 foreach my $idx (sort {$a <=> $b} keys %{$symbolic_fields{$event}{$field}{"values"}}) {
156 print " value $idx: $symbolic_fields{$event}{$field}{'values'}{$idx}\n";
/linux-4.4.14/drivers/net/ethernet/ti/
H A Dcpsw_ale.c56 int idx; cpsw_ale_get_field() local
58 idx = start / 32; cpsw_ale_get_field()
59 start -= idx * 32; cpsw_ale_get_field()
60 idx = 2 - idx; /* flip */ cpsw_ale_get_field()
61 return (ale_entry[idx] >> start) & BITMASK(bits); cpsw_ale_get_field()
67 int idx; cpsw_ale_set_field() local
70 idx = start / 32; cpsw_ale_set_field()
71 start -= idx * 32; cpsw_ale_set_field()
72 idx = 2 - idx; /* flip */ cpsw_ale_set_field()
73 ale_entry[idx] &= ~(BITMASK(bits) << start); cpsw_ale_set_field()
74 ale_entry[idx] |= (value << start); cpsw_ale_set_field()
119 static int cpsw_ale_read(struct cpsw_ale *ale, int idx, u32 *ale_entry) cpsw_ale_read() argument
123 WARN_ON(idx > ale->params.ale_entries); cpsw_ale_read()
125 __raw_writel(idx, ale->params.ale_regs + ALE_TABLE_CONTROL); cpsw_ale_read()
131 return idx; cpsw_ale_read()
134 static int cpsw_ale_write(struct cpsw_ale *ale, int idx, u32 *ale_entry) cpsw_ale_write() argument
138 WARN_ON(idx > ale->params.ale_entries); cpsw_ale_write()
144 __raw_writel(idx | ALE_TABLE_WRITE, ale->params.ale_regs + cpsw_ale_write()
147 return idx; cpsw_ale_write()
153 int type, idx; cpsw_ale_match_addr() local
155 for (idx = 0; idx < ale->params.ale_entries; idx++) { cpsw_ale_match_addr()
158 cpsw_ale_read(ale, idx, ale_entry); cpsw_ale_match_addr()
166 return idx; cpsw_ale_match_addr()
174 int type, idx; cpsw_ale_match_vlan() local
176 for (idx = 0; idx < ale->params.ale_entries; idx++) { cpsw_ale_match_vlan()
177 cpsw_ale_read(ale, idx, ale_entry); cpsw_ale_match_vlan()
182 return idx; cpsw_ale_match_vlan()
190 int type, idx; cpsw_ale_match_free() local
192 for (idx = 0; idx < ale->params.ale_entries; idx++) { cpsw_ale_match_free()
193 cpsw_ale_read(ale, idx, ale_entry); cpsw_ale_match_free()
196 return idx; cpsw_ale_match_free()
204 int type, idx; cpsw_ale_find_ageable() local
206 for (idx = 0; idx < ale->params.ale_entries; idx++) { cpsw_ale_find_ageable()
207 cpsw_ale_read(ale, idx, ale_entry); cpsw_ale_find_ageable()
216 return idx; cpsw_ale_find_ageable()
241 int ret, idx; cpsw_ale_flush_multicast() local
243 for (idx = 0; idx < ale->params.ale_entries; idx++) { cpsw_ale_flush_multicast()
244 cpsw_ale_read(ale, idx, ale_entry); cpsw_ale_flush_multicast()
265 cpsw_ale_write(ale, idx, ale_entry); cpsw_ale_flush_multicast()
286 int idx; cpsw_ale_add_ucast() local
296 idx = cpsw_ale_match_addr(ale, addr, (flags & ALE_VLAN) ? vid : 0); cpsw_ale_add_ucast()
297 if (idx < 0) cpsw_ale_add_ucast()
298 idx = cpsw_ale_match_free(ale); cpsw_ale_add_ucast()
299 if (idx < 0) cpsw_ale_add_ucast()
300 idx = cpsw_ale_find_ageable(ale); cpsw_ale_add_ucast()
301 if (idx < 0) cpsw_ale_add_ucast()
304 cpsw_ale_write(ale, idx, ale_entry); cpsw_ale_add_ucast()
313 int idx; cpsw_ale_del_ucast() local
315 idx = cpsw_ale_match_addr(ale, addr, (flags & ALE_VLAN) ? vid : 0); cpsw_ale_del_ucast()
316 if (idx < 0) cpsw_ale_del_ucast()
320 cpsw_ale_write(ale, idx, ale_entry); cpsw_ale_del_ucast()
329 int idx, mask; cpsw_ale_add_mcast() local
331 idx = cpsw_ale_match_addr(ale, addr, (flags & ALE_VLAN) ? vid : 0); cpsw_ale_add_mcast()
332 if (idx >= 0) cpsw_ale_add_mcast()
333 cpsw_ale_read(ale, idx, ale_entry); cpsw_ale_add_mcast()
345 if (idx < 0) cpsw_ale_add_mcast()
346 idx = cpsw_ale_match_free(ale); cpsw_ale_add_mcast()
347 if (idx < 0) cpsw_ale_add_mcast()
348 idx = cpsw_ale_find_ageable(ale); cpsw_ale_add_mcast()
349 if (idx < 0) cpsw_ale_add_mcast()
352 cpsw_ale_write(ale, idx, ale_entry); cpsw_ale_add_mcast()
361 int idx; cpsw_ale_del_mcast() local
363 idx = cpsw_ale_match_addr(ale, addr, (flags & ALE_VLAN) ? vid : 0); cpsw_ale_del_mcast()
364 if (idx < 0) cpsw_ale_del_mcast()
367 cpsw_ale_read(ale, idx, ale_entry); cpsw_ale_del_mcast()
374 cpsw_ale_write(ale, idx, ale_entry); cpsw_ale_del_mcast()
383 int idx; cpsw_ale_add_vlan() local
385 idx = cpsw_ale_match_vlan(ale, vid); cpsw_ale_add_vlan()
386 if (idx >= 0) cpsw_ale_add_vlan()
387 cpsw_ale_read(ale, idx, ale_entry); cpsw_ale_add_vlan()
397 if (idx < 0) cpsw_ale_add_vlan()
398 idx = cpsw_ale_match_free(ale); cpsw_ale_add_vlan()
399 if (idx < 0) cpsw_ale_add_vlan()
400 idx = cpsw_ale_find_ageable(ale); cpsw_ale_add_vlan()
401 if (idx < 0) cpsw_ale_add_vlan()
404 cpsw_ale_write(ale, idx, ale_entry); cpsw_ale_add_vlan()
412 int idx; cpsw_ale_del_vlan() local
414 idx = cpsw_ale_match_vlan(ale, vid); cpsw_ale_del_vlan()
415 if (idx < 0) cpsw_ale_del_vlan()
418 cpsw_ale_read(ale, idx, ale_entry); cpsw_ale_del_vlan()
425 cpsw_ale_write(ale, idx, ale_entry); cpsw_ale_del_vlan()
433 int type, idx; cpsw_ale_set_allmulti() local
443 for (idx = 0; idx < ale->params.ale_entries; idx++) { cpsw_ale_set_allmulti()
444 cpsw_ale_read(ale, idx, ale_entry); cpsw_ale_set_allmulti()
455 cpsw_ale_write(ale, idx, ale_entry); cpsw_ale_set_allmulti()
/linux-4.4.14/sound/aoa/codecs/
H A Dtas-basstreble.h91 static inline u8 tas3004_treble(int idx) tas3004_treble() argument
93 return tas3004_treble_table[idx]; tas3004_treble()
127 static inline u8 tas3004_bass(int idx) tas3004_bass() argument
129 u8 result = tas3004_treble_table[idx]; tas3004_bass()
131 if (idx >= 50) tas3004_bass()
132 result += tas3004_bass_diff_to_treble[idx-50]; tas3004_bass()
/linux-4.4.14/drivers/net/ethernet/chelsio/cxgb/
H A Dfpga_defs.h214 #define MAC_REG_ADDR(idx, reg) (MAC_REG_BASE + (idx) * 128 + (reg))
216 #define MAC_REG_IDLO(idx) MAC_REG_ADDR(idx, A_GMAC_MACID_LO)
217 #define MAC_REG_IDHI(idx) MAC_REG_ADDR(idx, A_GMAC_MACID_HI)
218 #define MAC_REG_CSR(idx) MAC_REG_ADDR(idx, A_GMAC_CSR)
219 #define MAC_REG_IFS(idx) MAC_REG_ADDR(idx, A_GMAC_IFS)
220 #define MAC_REG_LARGEFRAMELENGTH(idx) MAC_REG_ADDR(idx, A_GMAC_JUMBO_FRAME_LEN)
221 #define MAC_REG_LINKDLY(idx) MAC_REG_ADDR(idx, A_GMAC_LNK_DLY)
222 #define MAC_REG_PAUSETIME(idx) MAC_REG_ADDR(idx, A_GMAC_PAUSETIME)
223 #define MAC_REG_CASTLO(idx) MAC_REG_ADDR(idx, A_GMAC_MCAST_LO)
224 #define MAC_REG_MCASTHI(idx) MAC_REG_ADDR(idx, A_GMAC_MCAST_HI)
225 #define MAC_REG_CASTMASKLO(idx) MAC_REG_ADDR(idx, A_GMAC_MCAST_MASK_LO)
226 #define MAC_REG_MCASTMASKHI(idx) MAC_REG_ADDR(idx, A_GMAC_MCAST_MASK_HI)
227 #define MAC_REG_RMCNT(idx) MAC_REG_ADDR(idx, A_GMAC_RMT_CNT)
228 #define MAC_REG_RMDATA(idx) MAC_REG_ADDR(idx, A_GMAC_RMT_DATA)
229 #define MAC_REG_GMRANDBACKOFFSEED(idx) MAC_REG_ADDR(idx, A_GMAC_BACKOFF_SEED)
230 #define MAC_REG_TXFTHRESHOLDS(idx) MAC_REG_ADDR(idx, A_GMAC_TXF_THRES)
/linux-4.4.14/include/xen/interface/io/
H A Dconsole.h14 #define MASK_XENCONS_IDX(idx, ring) ((idx) & (sizeof(ring)-1))
/linux-4.4.14/drivers/gpu/drm/nouveau/include/nvkm/subdev/bios/
H A DP0260.h10 u32 nvbios_P0260Ee(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr);
11 u32 nvbios_P0260Ep(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr,
18 u32 nvbios_P0260Xe(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr);
19 u32 nvbios_P0260Xp(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr,
H A DM0205.h17 u32 nvbios_M0205Ee(struct nvkm_bios *, int idx,
19 u32 nvbios_M0205Ep(struct nvkm_bios *, int idx,
26 u32 nvbios_M0205Se(struct nvkm_bios *, int ent, int idx, u8 *ver, u8 *hdr);
27 u32 nvbios_M0205Sp(struct nvkm_bios *, int ent, int idx, u8 *ver, u8 *hdr,
H A DM0209.h15 u32 nvbios_M0209Ee(struct nvkm_bios *, int idx,
17 u32 nvbios_M0209Ep(struct nvkm_bios *, int idx,
24 u32 nvbios_M0209Se(struct nvkm_bios *, int ent, int idx, u8 *ver, u8 *hdr);
25 u32 nvbios_M0209Sp(struct nvkm_bios *, int ent, int idx, u8 *ver, u8 *hdr,
H A Dcstep.h11 u16 nvbios_cstepEe(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr);
12 u16 nvbios_cstepEp(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr,
23 u16 nvbios_cstepXe(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr);
24 u16 nvbios_cstepXp(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr,
H A Dtiming.h7 u16 nvbios_timingEe(struct nvkm_bios *, int idx,
9 u16 nvbios_timingEp(struct nvkm_bios *, int idx,
H A Dxpio.h14 u16 dcb_xpio_table(struct nvkm_bios *, u8 idx,
16 u16 dcb_xpio_parse(struct nvkm_bios *, u8 idx,
H A Drammap.h8 u32 nvbios_rammapEe(struct nvkm_bios *, int idx,
12 u32 nvbios_rammapEp(struct nvkm_bios *, int idx,
18 u8 ever, u8 ehdr, u8 ecnt, u8 elen, int idx,
20 u32 nvbios_rammapSp_from_perf(struct nvkm_bios *bios, u32 data, u8 size, int idx,
23 u8 ever, u8 ehdr, u8 ecnt, u8 elen, int idx,
H A Dperf.h18 u16 nvbios_perf_entry(struct nvkm_bios *, int idx,
20 u16 nvbios_perfEp(struct nvkm_bios *, int idx,
31 u32 nvbios_perfSe(struct nvkm_bios *, u32 data, int idx,
33 u32 nvbios_perfSp(struct nvkm_bios *, u32 data, int idx,
H A Dpmu.h13 u32 nvbios_pmuEe(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr);
14 u32 nvbios_pmuEp(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr,
H A Dvmap.h18 u16 nvbios_vmap_entry(struct nvkm_bios *, int idx, u8 *ver, u8 *len);
19 u16 nvbios_vmap_entry_parse(struct nvkm_bios *, int idx, u8 *ver, u8 *len,
H A Ddisp.h10 u16 nvbios_disp_entry(struct nvkm_bios *, u8 idx, u8 *ver, u8 *hdr, u8 *sub);
11 u16 nvbios_disp_parse(struct nvkm_bios *, u8 idx, u8 *ver, u8 *hdr, u8 *sub,
20 u16 nvbios_outp_entry(struct nvkm_bios *, u8 idx,
22 u16 nvbios_outp_parse(struct nvkm_bios *, u8 idx,
32 u16 nvbios_ocfg_entry(struct nvkm_bios *, u16 outp, u8 idx,
34 u16 nvbios_ocfg_parse(struct nvkm_bios *, u16 outp, u8 idx,
H A Dgpio.h42 u16 dcb_gpio_entry(struct nvkm_bios *, int idx, int ent, u8 *ver, u8 *len);
43 u16 dcb_gpio_parse(struct nvkm_bios *, int idx, int ent, u8 *ver, u8 *len,
45 u16 dcb_gpio_match(struct nvkm_bios *, int idx, u8 func, u8 line,
H A DM0203.h24 u32 nvbios_M0203Ee(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr);
25 u32 nvbios_M0203Ep(struct nvkm_bios *, int idx, u8 *ver, u8 *hdr,
H A Dboost.h11 u16 nvbios_boostEe(struct nvkm_bios *, int idx, u8 *, u8 *, u8 *, u8 *);
12 u16 nvbios_boostEp(struct nvkm_bios *, int idx, u8 *, u8 *, u8 *, u8 *,
H A Ddp.h11 u16 nvbios_dpout_parse(struct nvkm_bios *, u8 idx,
26 nvbios_dpcfg_parse(struct nvkm_bios *, u16 outp, u8 idx,
H A Dvolt.h33 u16 nvbios_volt_entry(struct nvkm_bios *, int idx, u8 *ver, u8 *len);
34 u16 nvbios_volt_entry_parse(struct nvkm_bios *, int idx, u8 *ver, u8 *len,
/linux-4.4.14/tools/perf/util/
H A Dperf_regs.c12 int i, idx = 0; perf_reg_value() local
23 idx++; perf_reg_value()
27 regs->cache_regs[id] = regs->regs[idx]; perf_reg_value()
H A Dtrace-event-parse.c177 static int idx; trace_find_next_event() local
183 idx = 0; trace_find_next_event()
187 if (idx < pevent->nr_events && event == pevent->events[idx]) { trace_find_next_event()
188 idx++; trace_find_next_event()
189 if (idx == pevent->nr_events) trace_find_next_event()
191 return pevent->events[idx]; trace_find_next_event()
194 for (idx = 1; idx < pevent->nr_events; idx++) { trace_find_next_event()
195 if (event == pevent->events[idx - 1]) trace_find_next_event()
196 return pevent->events[idx]; trace_find_next_event()
/linux-4.4.14/drivers/net/can/cc770/
H A Dcc770_isa.c175 int idx = pdev->id; cc770_isa_probe() local
179 dev_dbg(&pdev->dev, "probing idx=%d: port=%#lx, mem=%#lx, irq=%d\n", cc770_isa_probe()
180 idx, port[idx], mem[idx], irq[idx]); cc770_isa_probe()
181 if (mem[idx]) { cc770_isa_probe()
182 if (!request_mem_region(mem[idx], iosize, KBUILD_MODNAME)) { cc770_isa_probe()
186 base = ioremap_nocache(mem[idx], iosize); cc770_isa_probe()
192 if (indirect[idx] > 0 || cc770_isa_probe()
193 (indirect[idx] == -1 && indirect[0] > 0)) cc770_isa_probe()
195 if (!request_region(port[idx], iosize, KBUILD_MODNAME)) { cc770_isa_probe()
208 dev->irq = irq[idx]; cc770_isa_probe()
210 if (mem[idx]) { cc770_isa_probe()
212 dev->base_addr = mem[idx]; cc770_isa_probe()
216 priv->reg_base = (void __iomem *)port[idx]; cc770_isa_probe()
217 dev->base_addr = port[idx]; cc770_isa_probe()
228 if (clk[idx]) cc770_isa_probe()
229 clktmp = clk[idx]; cc770_isa_probe()
236 if (cir[idx] != 0xff) { cc770_isa_probe()
237 priv->cpu_interface = cir[idx]; cc770_isa_probe()
254 if (bcr[idx] != 0xff) cc770_isa_probe()
255 priv->bus_config = bcr[idx]; cc770_isa_probe()
261 if (cor[idx] != 0xff) cc770_isa_probe()
262 priv->clkout = cor[idx]; cc770_isa_probe()
283 if (mem[idx]) cc770_isa_probe()
286 if (mem[idx]) cc770_isa_probe()
287 release_mem_region(mem[idx], iosize); cc770_isa_probe()
289 release_region(port[idx], iosize); cc770_isa_probe()
298 int idx = pdev->id; cc770_isa_remove() local
302 if (mem[idx]) { cc770_isa_remove()
304 release_mem_region(mem[idx], CC770_IOSIZE); cc770_isa_remove()
307 release_region(port[idx], CC770_IOSIZE_INDIRECT); cc770_isa_remove()
309 release_region(port[idx], CC770_IOSIZE); cc770_isa_remove()
326 int idx, err; cc770_isa_init() local
328 for (idx = 0; idx < ARRAY_SIZE(cc770_isa_devs); idx++) { cc770_isa_init()
329 if ((port[idx] || mem[idx]) && irq[idx]) { cc770_isa_init()
330 cc770_isa_devs[idx] = cc770_isa_init()
331 platform_device_alloc(KBUILD_MODNAME, idx); cc770_isa_init()
332 if (!cc770_isa_devs[idx]) { cc770_isa_init()
336 err = platform_device_add(cc770_isa_devs[idx]); cc770_isa_init()
338 platform_device_put(cc770_isa_devs[idx]); cc770_isa_init()
343 idx, port[idx], mem[idx], irq[idx]); cc770_isa_init()
344 } else if (idx == 0 || port[idx] || mem[idx]) { cc770_isa_init()
360 while (--idx >= 0) { cc770_isa_init()
361 if (cc770_isa_devs[idx]) cc770_isa_init()
362 platform_device_unregister(cc770_isa_devs[idx]); cc770_isa_init()
371 int idx; cc770_isa_exit() local
374 for (idx = 0; idx < ARRAY_SIZE(cc770_isa_devs); idx++) { cc770_isa_exit()
375 if (cc770_isa_devs[idx]) cc770_isa_exit()
376 platform_device_unregister(cc770_isa_devs[idx]); cc770_isa_exit()
/linux-4.4.14/drivers/gpu/drm/radeon/
H A Ddrm_buffer.c47 int idx; drm_buffer_alloc() local
63 for (idx = 0; idx < nr_pages; ++idx) { drm_buffer_alloc()
65 (*buf)->data[idx] = drm_buffer_alloc()
66 kmalloc(min(PAGE_SIZE, size - idx * PAGE_SIZE), drm_buffer_alloc()
70 if ((*buf)->data[idx] == NULL) { drm_buffer_alloc()
73 idx + 1, size, nr_pages); drm_buffer_alloc()
83 for (; idx >= 0; --idx) drm_buffer_alloc()
84 kfree((*buf)->data[idx]); drm_buffer_alloc()
101 int idx; drm_buffer_copy_from_user() local
110 for (idx = 0; idx < nr_pages; ++idx) { drm_buffer_copy_from_user()
112 if (copy_from_user(buf->data[idx], drm_buffer_copy_from_user()
113 user_data + idx * PAGE_SIZE, drm_buffer_copy_from_user()
114 min(PAGE_SIZE, size - idx * PAGE_SIZE))) { drm_buffer_copy_from_user()
117 user_data, buf, idx); drm_buffer_copy_from_user()
135 int idx; drm_buffer_free() local
136 for (idx = 0; idx < nr_pages; ++idx) drm_buffer_free()
137 kfree(buf->data[idx]); drm_buffer_free()
158 int idx = drm_buffer_index(buf); drm_buffer_read_object() local
162 if (idx + objsize <= PAGE_SIZE) { drm_buffer_read_object()
163 obj = &buf->data[page][idx]; drm_buffer_read_object()
166 int beginsz = PAGE_SIZE - idx; drm_buffer_read_object()
167 memcpy(stack_obj, &buf->data[page][idx], beginsz); drm_buffer_read_object()
H A Devergreen_cs.c755 unsigned idx) evergreen_cs_track_validate_texture()
763 texdw[0] = radeon_get_ib_value(p, idx + 0); evergreen_cs_track_validate_texture()
764 texdw[1] = radeon_get_ib_value(p, idx + 1); evergreen_cs_track_validate_texture()
765 texdw[2] = radeon_get_ib_value(p, idx + 2); evergreen_cs_track_validate_texture()
766 texdw[3] = radeon_get_ib_value(p, idx + 3); evergreen_cs_track_validate_texture()
767 texdw[4] = radeon_get_ib_value(p, idx + 4); evergreen_cs_track_validate_texture()
768 texdw[5] = radeon_get_ib_value(p, idx + 5); evergreen_cs_track_validate_texture()
769 texdw[6] = radeon_get_ib_value(p, idx + 6); evergreen_cs_track_validate_texture()
770 texdw[7] = radeon_get_ib_value(p, idx + 7); evergreen_cs_track_validate_texture()
1049 unsigned idx, unsigned reg) evergreen_packet0_check()
1058 idx, reg); evergreen_packet0_check()
1064 reg, idx); evergreen_packet0_check()
1074 unsigned idx; evergreen_cs_parse_packet0() local
1077 idx = pkt->idx + 1; evergreen_cs_parse_packet0()
1079 for (i = 0; i <= pkt->count; i++, idx++, reg += 4) { evergreen_cs_parse_packet0()
1080 r = evergreen_packet0_check(p, pkt, idx, reg); evergreen_cs_parse_packet0()
1092 * @idx: index into the cs buffer
1094 static int evergreen_cs_handle_reg(struct radeon_cs_parser *p, u32 reg, u32 idx) evergreen_cs_handle_reg() argument
1132 /*tmp =radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1133 ib[idx] = 0;*/ evergreen_cs_handle_reg()
1149 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1152 track->db_depth_control = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1170 track->db_z_info = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1178 ib[idx] &= ~Z_ARRAY_MODE(0xf); evergreen_cs_handle_reg()
1180 ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); evergreen_cs_handle_reg()
1188 ib[idx] |= DB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks)); evergreen_cs_handle_reg()
1189 ib[idx] |= DB_TILE_SPLIT(tile_split) | evergreen_cs_handle_reg()
1198 track->db_s_info = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1202 track->db_depth_view = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1206 track->db_depth_size = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1210 track->db_depth_slice = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1220 track->db_z_read_offset = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1221 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1232 track->db_z_write_offset = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1233 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1244 track->db_s_read_offset = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1245 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1256 track->db_s_write_offset = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1257 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1262 track->vgt_strmout_config = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1266 track->vgt_strmout_buffer_config = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1280 track->vgt_strmout_bo_offset[tmp] = radeon_get_ib_value(p, idx) << 8; evergreen_cs_handle_reg()
1281 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1291 track->vgt_strmout_size[tmp] = radeon_get_ib_value(p, idx) * 4; evergreen_cs_handle_reg()
1301 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1303 track->cb_target_mask = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1307 track->cb_shader_mask = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1316 tmp = radeon_get_ib_value(p, idx) & MSAA_NUM_SAMPLES_MASK; evergreen_cs_handle_reg()
1325 tmp = radeon_get_ib_value(p, idx) & CAYMAN_MSAA_NUM_SAMPLES_MASK; evergreen_cs_handle_reg()
1337 track->cb_color_view[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1345 track->cb_color_view[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1357 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1365 ib[idx] |= CB_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); evergreen_cs_handle_reg()
1375 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1383 ib[idx] |= CB_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); evergreen_cs_handle_reg()
1397 track->cb_color_pitch[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1405 track->cb_color_pitch[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1417 track->cb_color_slice[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1418 track->cb_color_slice_idx[tmp] = idx; evergreen_cs_handle_reg()
1426 track->cb_color_slice[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1427 track->cb_color_slice_idx[tmp] = idx; evergreen_cs_handle_reg()
1451 ib[idx] |= CB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks)); evergreen_cs_handle_reg()
1452 ib[idx] |= CB_TILE_SPLIT(tile_split) | evergreen_cs_handle_reg()
1459 track->cb_color_attrib[tmp] = ib[idx]; evergreen_cs_handle_reg()
1479 ib[idx] |= CB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks)); evergreen_cs_handle_reg()
1480 ib[idx] |= CB_TILE_SPLIT(tile_split) | evergreen_cs_handle_reg()
1487 track->cb_color_attrib[tmp] = ib[idx]; evergreen_cs_handle_reg()
1504 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1521 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1533 track->cb_color_fmask_slice[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1544 track->cb_color_cmask_slice[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1561 track->cb_color_bo_offset[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1562 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1577 track->cb_color_bo_offset[tmp] = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1578 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1589 track->htile_offset = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1590 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1596 track->htile_surface = radeon_get_ib_value(p, idx); evergreen_cs_handle_reg()
1598 ib[idx] |= 3; evergreen_cs_handle_reg()
1707 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1721 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1735 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_cs_handle_reg()
1738 track->sx_misc_kill_all_prims = (radeon_get_ib_value(p, idx) & 0x1) != 0; evergreen_cs_handle_reg()
1741 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx); evergreen_cs_handle_reg()
1777 unsigned idx; evergreen_packet3_check() local
1785 idx = pkt->idx + 1; evergreen_packet3_check()
1786 idx_value = radeon_get_ib_value(p, idx); evergreen_packet3_check()
1800 tmp = radeon_get_ib_value(p, idx + 1); evergreen_packet3_check()
1822 ib[idx + 0] = offset; evergreen_packet3_check()
1823 ib[idx + 1] = (tmp & 0xffffff00) | (upper_32_bits(offset) & 0xff); evergreen_packet3_check()
1866 ((u64)(radeon_get_ib_value(p, idx+1) & 0xff) << 32); evergreen_packet3_check()
1868 ib[idx+0] = offset; evergreen_packet3_check()
1869 ib[idx+1] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
1901 ((u64)(radeon_get_ib_value(p, idx+1) & 0xff) << 32); evergreen_packet3_check()
1903 ib[idx+0] = offset; evergreen_packet3_check()
1904 ib[idx+1] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
1928 radeon_get_ib_value(p, idx+1) + evergreen_packet3_check()
1929 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); evergreen_packet3_check()
1931 ib[idx+1] = offset; evergreen_packet3_check()
1932 ib[idx+2] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
1948 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx); evergreen_packet3_check()
1959 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx); evergreen_packet3_check()
2024 ib[idx+1] = reloc->gpu_offset; evergreen_packet3_check()
2025 ib[idx+2] = upper_32_bits(reloc->gpu_offset) & 0xff; evergreen_packet3_check()
2064 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx); evergreen_packet3_check()
2078 ib[idx+0] = idx_value + (u32)(reloc->gpu_offset & 0xffffffff); evergreen_packet3_check()
2101 (radeon_get_ib_value(p, idx+1) & 0xfffffffc) + evergreen_packet3_check()
2102 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); evergreen_packet3_check()
2104 ib[idx+1] = (ib[idx+1] & 0x3) | (offset & 0xfffffffc); evergreen_packet3_check()
2105 ib[idx+2] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
2119 command = radeon_get_ib_value(p, idx+4); evergreen_packet3_check()
2121 info = radeon_get_ib_value(p, idx+1); evergreen_packet3_check()
2154 tmp = radeon_get_ib_value(p, idx) + evergreen_packet3_check()
2155 ((u64)(radeon_get_ib_value(p, idx+1) & 0xff) << 32); evergreen_packet3_check()
2165 ib[idx] = offset; evergreen_packet3_check()
2166 ib[idx+1] = (ib[idx+1] & 0xffffff00) | (upper_32_bits(offset) & 0xff); evergreen_packet3_check()
2192 tmp = radeon_get_ib_value(p, idx+2) + evergreen_packet3_check()
2193 ((u64)(radeon_get_ib_value(p, idx+3) & 0xff) << 32); evergreen_packet3_check()
2203 ib[idx+2] = offset; evergreen_packet3_check()
2204 ib[idx+3] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
2218 if (radeon_get_ib_value(p, idx + 1) != 0xffffffff || evergreen_packet3_check()
2219 radeon_get_ib_value(p, idx + 2) != 0) { evergreen_packet3_check()
2225 ib[idx+2] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); evergreen_packet3_check()
2242 (radeon_get_ib_value(p, idx+1) & 0xfffffff8) + evergreen_packet3_check()
2243 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); evergreen_packet3_check()
2245 ib[idx+1] = offset & 0xfffffff8; evergreen_packet3_check()
2246 ib[idx+2] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
2264 (radeon_get_ib_value(p, idx+1) & 0xfffffffc) + evergreen_packet3_check()
2265 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); evergreen_packet3_check()
2267 ib[idx+1] = offset & 0xfffffffc; evergreen_packet3_check()
2268 ib[idx+2] = (ib[idx+2] & 0xffffff00) | (upper_32_bits(offset) & 0xff); evergreen_packet3_check()
2286 (radeon_get_ib_value(p, idx+1) & 0xfffffffc) + evergreen_packet3_check()
2287 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); evergreen_packet3_check()
2289 ib[idx+1] = offset & 0xfffffffc; evergreen_packet3_check()
2290 ib[idx+2] = (ib[idx+2] & 0xffffff00) | (upper_32_bits(offset) & 0xff); evergreen_packet3_check()
2302 for (reg = start_reg, idx++; reg <= end_reg; reg += 4, idx++) { evergreen_packet3_check()
2305 r = evergreen_cs_handle_reg(p, reg, idx); evergreen_packet3_check()
2319 for (reg = start_reg, idx++; reg <= end_reg; reg += 4, idx++) { evergreen_packet3_check()
2322 r = evergreen_cs_handle_reg(p, reg, idx); evergreen_packet3_check()
2345 switch (G__SQ_CONSTANT_TYPE(radeon_get_ib_value(p, idx+1+(i*8)+7))) { evergreen_packet3_check()
2354 ib[idx+1+(i*8)+1] |= evergreen_packet3_check()
2362 ib[idx+1+(i*8)+6] |= TEX_TILE_SPLIT(tile_split); evergreen_packet3_check()
2363 ib[idx+1+(i*8)+7] |= evergreen_packet3_check()
2374 tex_dim = ib[idx+1+(i*8)+0] & 0x7; evergreen_packet3_check()
2375 mip_address = ib[idx+1+(i*8)+3]; evergreen_packet3_check()
2394 r = evergreen_cs_track_validate_texture(p, texture, mipmap, idx+1+(i*8)); evergreen_packet3_check()
2397 ib[idx+1+(i*8)+2] += toffset; evergreen_packet3_check()
2398 ib[idx+1+(i*8)+3] += moffset; evergreen_packet3_check()
2409 offset = radeon_get_ib_value(p, idx+1+(i*8)+0); evergreen_packet3_check()
2410 size = radeon_get_ib_value(p, idx+1+(i*8)+1); evergreen_packet3_check()
2414 ib[idx+1+(i*8)+1] = radeon_bo_size(reloc->robj) - offset; evergreen_packet3_check()
2418 ib[idx+1+(i*8)+0] = offset64; evergreen_packet3_check()
2419 ib[idx+1+(i*8)+2] = (ib[idx+1+(i*8)+2] & 0xffffff00) | evergreen_packet3_check()
2491 offset = radeon_get_ib_value(p, idx+1); evergreen_packet3_check()
2492 offset += ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32; evergreen_packet3_check()
2499 ib[idx+1] = offset; evergreen_packet3_check()
2500 ib[idx+2] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
2510 offset = radeon_get_ib_value(p, idx+3); evergreen_packet3_check()
2511 offset += ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; evergreen_packet3_check()
2518 ib[idx+3] = offset; evergreen_packet3_check()
2519 ib[idx+4] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
2535 offset = radeon_get_ib_value(p, idx+0); evergreen_packet3_check()
2536 offset += ((u64)(radeon_get_ib_value(p, idx+1) & 0xff)) << 32UL; evergreen_packet3_check()
2547 ib[idx+0] = offset; evergreen_packet3_check()
2548 ib[idx+1] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
2564 offset = radeon_get_ib_value(p, idx+1); evergreen_packet3_check()
2565 offset += ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32; evergreen_packet3_check()
2572 ib[idx+1] = offset; evergreen_packet3_check()
2573 ib[idx+2] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
2576 reg = radeon_get_ib_value(p, idx+1) << 2; evergreen_packet3_check()
2579 reg, idx + 1); evergreen_packet3_check()
2591 offset = radeon_get_ib_value(p, idx+3); evergreen_packet3_check()
2592 offset += ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; evergreen_packet3_check()
2599 ib[idx+3] = offset; evergreen_packet3_check()
2600 ib[idx+4] = upper_32_bits(offset) & 0xff; evergreen_packet3_check()
2603 reg = radeon_get_ib_value(p, idx+3) << 2; evergreen_packet3_check()
2606 reg, idx + 3); evergreen_packet3_check()
2697 r = radeon_cs_packet_parse(p, &pkt, p->idx); evergreen_cs_parse()
2703 p->idx += pkt.count + 2; evergreen_cs_parse()
2724 } while (p->idx < p->chunk_ib->length_dw); evergreen_cs_parse()
2751 u32 idx; evergreen_dma_cs_parse() local
2756 if (p->idx >= ib_chunk->length_dw) { evergreen_dma_cs_parse()
2758 p->idx, ib_chunk->length_dw); evergreen_dma_cs_parse()
2761 idx = p->idx; evergreen_dma_cs_parse()
2762 header = radeon_get_ib_value(p, idx); evergreen_dma_cs_parse()
2777 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
2780 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
2781 p->idx += count + 7; evergreen_dma_cs_parse()
2785 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
2786 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32; evergreen_dma_cs_parse()
2788 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
2789 ib[idx+2] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2790 p->idx += count + 3; evergreen_dma_cs_parse()
2793 DRM_ERROR("bad DMA_PACKET_WRITE [%6d] 0x%08x sub cmd is not 0 or 8\n", idx, header); evergreen_dma_cs_parse()
2817 src_offset = radeon_get_ib_value(p, idx+2); evergreen_dma_cs_parse()
2818 src_offset |= ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; evergreen_dma_cs_parse()
2819 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
2820 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0xff)) << 32; evergreen_dma_cs_parse()
2831 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
2832 ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
2833 ib[idx+3] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2834 ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2835 p->idx += 5; evergreen_dma_cs_parse()
2840 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) { evergreen_dma_cs_parse()
2842 src_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
2844 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
2846 dst_offset = radeon_get_ib_value(p, idx + 7); evergreen_dma_cs_parse()
2847 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+8) & 0xff)) << 32; evergreen_dma_cs_parse()
2848 ib[idx+7] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
2849 ib[idx+8] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2852 src_offset = radeon_get_ib_value(p, idx+7); evergreen_dma_cs_parse()
2853 src_offset |= ((u64)(radeon_get_ib_value(p, idx+8) & 0xff)) << 32; evergreen_dma_cs_parse()
2854 ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
2855 ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2857 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
2859 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
2871 p->idx += 9; evergreen_dma_cs_parse()
2876 src_offset = radeon_get_ib_value(p, idx+2); evergreen_dma_cs_parse()
2877 src_offset |= ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; evergreen_dma_cs_parse()
2878 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
2879 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0xff)) << 32; evergreen_dma_cs_parse()
2890 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xffffffff); evergreen_dma_cs_parse()
2891 ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xffffffff); evergreen_dma_cs_parse()
2892 ib[idx+3] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2893 ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2894 p->idx += 5; evergreen_dma_cs_parse()
2903 ib[idx+1] += (u32)(src_reloc->gpu_offset & 0xffffffff); evergreen_dma_cs_parse()
2904 ib[idx+2] += upper_32_bits(src_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2905 ib[idx+4] += (u32)(dst_reloc->gpu_offset & 0xffffffff); evergreen_dma_cs_parse()
2906 ib[idx+5] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2908 p->idx += 9; evergreen_dma_cs_parse()
2918 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
2919 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; evergreen_dma_cs_parse()
2920 dst2_offset = radeon_get_ib_value(p, idx+2); evergreen_dma_cs_parse()
2921 dst2_offset |= ((u64)(radeon_get_ib_value(p, idx+5) & 0xff)) << 32; evergreen_dma_cs_parse()
2922 src_offset = radeon_get_ib_value(p, idx+3); evergreen_dma_cs_parse()
2923 src_offset |= ((u64)(radeon_get_ib_value(p, idx+6) & 0xff)) << 32; evergreen_dma_cs_parse()
2939 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
2940 ib[idx+2] += (u32)(dst2_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
2941 ib[idx+3] += (u32)(src_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
2942 ib[idx+4] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2943 ib[idx+5] += upper_32_bits(dst2_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2944 ib[idx+6] += upper_32_bits(src_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2945 p->idx += 7; evergreen_dma_cs_parse()
2949 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) { evergreen_dma_cs_parse()
2958 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
2960 dst2_offset = radeon_get_ib_value(p, idx+2); evergreen_dma_cs_parse()
2962 src_offset = radeon_get_ib_value(p, idx+8); evergreen_dma_cs_parse()
2963 src_offset |= ((u64)(radeon_get_ib_value(p, idx+9) & 0xff)) << 32; evergreen_dma_cs_parse()
2979 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
2980 ib[idx+2] += (u32)(dst2_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
2981 ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
2982 ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
2983 p->idx += 10; evergreen_dma_cs_parse()
2993 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) { evergreen_dma_cs_parse()
2995 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
2997 ib[idx+7] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
2998 ib[idx+8] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
3001 ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
3002 ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
3004 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
3006 p->idx += 12; evergreen_dma_cs_parse()
3011 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) { evergreen_dma_cs_parse()
3020 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
3022 dst2_offset = radeon_get_ib_value(p, idx+2); evergreen_dma_cs_parse()
3024 src_offset = radeon_get_ib_value(p, idx+8); evergreen_dma_cs_parse()
3025 src_offset |= ((u64)(radeon_get_ib_value(p, idx+9) & 0xff)) << 32; evergreen_dma_cs_parse()
3041 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
3042 ib[idx+2] += (u32)(dst2_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
3043 ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
3044 ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
3045 p->idx += 10; evergreen_dma_cs_parse()
3051 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) { evergreen_dma_cs_parse()
3053 src_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
3055 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
3057 dst_offset = radeon_get_ib_value(p, idx+7); evergreen_dma_cs_parse()
3058 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+8) & 0xff)) << 32; evergreen_dma_cs_parse()
3059 ib[idx+7] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
3060 ib[idx+8] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
3063 src_offset = radeon_get_ib_value(p, idx+7); evergreen_dma_cs_parse()
3064 src_offset |= ((u64)(radeon_get_ib_value(p, idx+8) & 0xff)) << 32; evergreen_dma_cs_parse()
3065 ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
3066 ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
3068 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
3070 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
3082 p->idx += 9; evergreen_dma_cs_parse()
3091 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
3092 ib[idx+4] += (u32)(dst_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
3093 p->idx += 13; evergreen_dma_cs_parse()
3098 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) { evergreen_dma_cs_parse()
3107 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
3109 dst2_offset = radeon_get_ib_value(p, idx+2); evergreen_dma_cs_parse()
3111 src_offset = radeon_get_ib_value(p, idx+8); evergreen_dma_cs_parse()
3112 src_offset |= ((u64)(radeon_get_ib_value(p, idx+9) & 0xff)) << 32; evergreen_dma_cs_parse()
3128 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
3129 ib[idx+2] += (u32)(dst2_reloc->gpu_offset >> 8); evergreen_dma_cs_parse()
3130 ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
3131 ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff; evergreen_dma_cs_parse()
3132 p->idx += 10; evergreen_dma_cs_parse()
3135 DRM_ERROR("bad DMA_PACKET_COPY [%6d] 0x%08x invalid sub cmd\n", idx, header); evergreen_dma_cs_parse()
3145 dst_offset = radeon_get_ib_value(p, idx+1); evergreen_dma_cs_parse()
3146 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0x00ff0000)) << 16; evergreen_dma_cs_parse()
3152 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); evergreen_dma_cs_parse()
3153 ib[idx+3] += (upper_32_bits(dst_reloc->gpu_offset) << 16) & 0x00ff0000; evergreen_dma_cs_parse()
3154 p->idx += 4; evergreen_dma_cs_parse()
3157 p->idx += 1; evergreen_dma_cs_parse()
3160 DRM_ERROR("Unknown packet type %d at %d !\n", cmd, idx); evergreen_dma_cs_parse()
3163 } while (p->idx < p->chunk_ib->length_dw); evergreen_dma_cs_parse()
3299 u32 idx = pkt->idx + 1; evergreen_vm_packet3_check() local
3300 u32 idx_value = ib[idx]; evergreen_vm_packet3_check()
3356 reg = ib[idx + 5] * 4; evergreen_vm_packet3_check()
3363 reg = ib[idx + 3] * 4; evergreen_vm_packet3_check()
3384 command = ib[idx + 4]; evergreen_vm_packet3_check()
3385 info = ib[idx + 1]; evergreen_vm_packet3_check()
3422 start_reg = ib[idx + 2]; evergreen_vm_packet3_check()
3450 u32 idx = 0; evergreen_ib_parse() local
3454 pkt.idx = idx; evergreen_ib_parse()
3455 pkt.type = RADEON_CP_PACKET_GET_TYPE(ib->ptr[idx]); evergreen_ib_parse()
3456 pkt.count = RADEON_CP_PACKET_GET_COUNT(ib->ptr[idx]); evergreen_ib_parse()
3464 idx += 1; evergreen_ib_parse()
3467 pkt.opcode = RADEON_CP_PACKET3_GET_OPCODE(ib->ptr[idx]); evergreen_ib_parse()
3469 idx += pkt.count + 2; evergreen_ib_parse()
3478 } while (idx < ib->length_dw); evergreen_ib_parse()
3494 u32 idx = 0; evergreen_dma_ib_parse() local
3498 header = ib->ptr[idx]; evergreen_dma_ib_parse()
3508 idx += count + 7; evergreen_dma_ib_parse()
3512 idx += count + 3; evergreen_dma_ib_parse()
3515 DRM_ERROR("bad DMA_PACKET_WRITE [%6d] 0x%08x sub cmd is not 0 or 8\n", idx, ib->ptr[idx]); evergreen_dma_ib_parse()
3523 idx += 5; evergreen_dma_ib_parse()
3527 idx += 9; evergreen_dma_ib_parse()
3531 idx += 5; evergreen_dma_ib_parse()
3535 idx += 9; evergreen_dma_ib_parse()
3539 idx += 7; evergreen_dma_ib_parse()
3543 idx += 10; evergreen_dma_ib_parse()
3547 idx += 12; evergreen_dma_ib_parse()
3551 idx += 10; evergreen_dma_ib_parse()
3555 idx += 9; evergreen_dma_ib_parse()
3559 idx += 13; evergreen_dma_ib_parse()
3563 idx += 10; evergreen_dma_ib_parse()
3566 DRM_ERROR("bad DMA_PACKET_COPY [%6d] 0x%08x invalid sub cmd\n", idx, ib->ptr[idx]); evergreen_dma_ib_parse()
3571 idx += 4; evergreen_dma_ib_parse()
3574 idx += 1; evergreen_dma_ib_parse()
3577 DRM_ERROR("Unknown packet type %d at %d !\n", cmd, idx); evergreen_dma_ib_parse()
3580 } while (idx < ib->length_dw); evergreen_dma_ib_parse()
752 evergreen_cs_track_validate_texture(struct radeon_cs_parser *p, struct radeon_bo *texture, struct radeon_bo *mipmap, unsigned idx) evergreen_cs_track_validate_texture() argument
1047 evergreen_packet0_check(struct radeon_cs_parser *p, struct radeon_cs_packet *pkt, unsigned idx, unsigned reg) evergreen_packet0_check() argument
H A Dr600_cs.c839 r = radeon_cs_packet_parse(p, &wait_reg_mem, p->idx); r600_cs_common_vline_parse()
850 wait_reg_mem_info = radeon_get_ib_value(p, wait_reg_mem.idx + 1); r600_cs_common_vline_parse()
866 if ((radeon_get_ib_value(p, wait_reg_mem.idx + 2) << 2) != vline_status[0]) { r600_cs_common_vline_parse()
871 if (radeon_get_ib_value(p, wait_reg_mem.idx + 5) != RADEON_VLINE_STAT) { r600_cs_common_vline_parse()
877 r = radeon_cs_packet_parse(p, &p3reloc, p->idx + wait_reg_mem.count + 2); r600_cs_common_vline_parse()
881 h_idx = p->idx - 2; r600_cs_common_vline_parse()
882 p->idx += wait_reg_mem.count + 2; r600_cs_common_vline_parse()
883 p->idx += p3reloc.count + 2; r600_cs_common_vline_parse()
920 unsigned idx, unsigned reg) r600_packet0_check()
929 idx, reg); r600_packet0_check()
935 reg, idx); r600_packet0_check()
945 unsigned idx; r600_cs_parse_packet0() local
948 idx = pkt->idx + 1; r600_cs_parse_packet0()
950 for (i = 0; i <= pkt->count; i++, idx++, reg += 4) { r600_cs_parse_packet0()
951 r = r600_packet0_check(p, pkt, idx, reg); r600_cs_parse_packet0()
963 * @idx: index into the cs buffer
969 static int r600_cs_check_reg(struct radeon_cs_parser *p, u32 reg, u32 idx) r600_cs_check_reg() argument
978 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx); r600_cs_check_reg()
1008 /*tmp =radeon_get_ib_value(p, idx); r600_cs_check_reg()
1009 ib[idx] = 0;*/ r600_cs_check_reg()
1023 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_cs_check_reg()
1026 track->sq_config = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1029 track->db_depth_control = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1041 track->db_depth_info = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1042 ib[idx] &= C_028010_ARRAY_MODE; r600_cs_check_reg()
1045 ib[idx] |= S_028010_ARRAY_MODE(V_028010_ARRAY_2D_TILED_THIN1); r600_cs_check_reg()
1048 ib[idx] |= S_028010_ARRAY_MODE(V_028010_ARRAY_1D_TILED_THIN1); r600_cs_check_reg()
1052 track->db_depth_info = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1057 track->db_depth_view = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1061 track->db_depth_size = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1062 track->db_depth_size_idx = idx; r600_cs_check_reg()
1066 track->vgt_strmout_en = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1070 track->vgt_strmout_buffer_en = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1084 track->vgt_strmout_bo_offset[tmp] = radeon_get_ib_value(p, idx) << 8; r600_cs_check_reg()
1085 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_cs_check_reg()
1096 track->vgt_strmout_size[tmp] = radeon_get_ib_value(p, idx) * 4; r600_cs_check_reg()
1106 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_cs_check_reg()
1109 track->cb_target_mask = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1113 track->cb_shader_mask = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1116 tmp = G_028C04_MSAA_NUM_SAMPLES(radeon_get_ib_value(p, idx)); r600_cs_check_reg()
1122 tmp = G_028808_SPECIAL_OP(radeon_get_ib_value(p, idx)); r600_cs_check_reg()
1142 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1144 ib[idx] |= S_0280A0_ARRAY_MODE(V_0280A0_ARRAY_2D_TILED_THIN1); r600_cs_check_reg()
1147 ib[idx] |= S_0280A0_ARRAY_MODE(V_0280A0_ARRAY_1D_TILED_THIN1); r600_cs_check_reg()
1152 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1165 track->cb_color_view[tmp] = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1177 track->cb_color_size[tmp] = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1178 track->cb_color_size_idx[tmp] = idx; r600_cs_check_reg()
1206 ib[idx] = track->cb_color_base_last[tmp]; r600_cs_check_reg()
1214 track->cb_color_frag_offset[tmp] = (u64)ib[idx] << 8; r600_cs_check_reg()
1215 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_cs_check_reg()
1237 ib[idx] = track->cb_color_base_last[tmp]; r600_cs_check_reg()
1245 track->cb_color_tile_offset[tmp] = (u64)ib[idx] << 8; r600_cs_check_reg()
1246 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_cs_check_reg()
1261 track->cb_color_mask[tmp] = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1281 track->cb_color_bo_offset[tmp] = radeon_get_ib_value(p, idx) << 8; r600_cs_check_reg()
1282 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_cs_check_reg()
1283 track->cb_color_base_last[tmp] = ib[idx]; r600_cs_check_reg()
1295 track->db_offset = radeon_get_ib_value(p, idx) << 8; r600_cs_check_reg()
1296 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_cs_check_reg()
1308 track->htile_offset = radeon_get_ib_value(p, idx) << 8; r600_cs_check_reg()
1309 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_cs_check_reg()
1314 track->htile_surface = radeon_get_ib_value(p, idx); r600_cs_check_reg()
1316 ib[idx] |= 3; r600_cs_check_reg()
1378 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_cs_check_reg()
1387 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_cs_check_reg()
1390 track->sx_misc_kill_all_prims = (radeon_get_ib_value(p, idx) & 0x1) != 0; r600_cs_check_reg()
1393 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx); r600_cs_check_reg()
1462 * @idx: index into the cs buffer
1469 static int r600_check_texture_resource(struct radeon_cs_parser *p, u32 idx, r600_check_texture_resource() argument
1494 word0 = radeon_get_ib_value(p, idx + 0); r600_check_texture_resource()
1501 word1 = radeon_get_ib_value(p, idx + 1); r600_check_texture_resource()
1502 word2 = radeon_get_ib_value(p, idx + 2) << 8; r600_check_texture_resource()
1503 word3 = radeon_get_ib_value(p, idx + 3) << 8; r600_check_texture_resource()
1504 word4 = radeon_get_ib_value(p, idx + 4); r600_check_texture_resource()
1505 word5 = radeon_get_ib_value(p, idx + 5); r600_check_texture_resource()
1610 static bool r600_is_safe_reg(struct radeon_cs_parser *p, u32 reg, u32 idx) r600_is_safe_reg() argument
1616 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx); r600_is_safe_reg()
1622 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx); r600_is_safe_reg()
1632 unsigned idx; r600_packet3_check() local
1640 idx = pkt->idx + 1; r600_packet3_check()
1641 idx_value = radeon_get_ib_value(p, idx); r600_packet3_check()
1655 tmp = radeon_get_ib_value(p, idx + 1); r600_packet3_check()
1677 ib[idx + 0] = offset; r600_packet3_check()
1678 ib[idx + 1] = (tmp & 0xffffff00) | (upper_32_bits(offset) & 0xff); r600_packet3_check()
1716 ((u64)(radeon_get_ib_value(p, idx+1) & 0xff) << 32); r600_packet3_check()
1718 ib[idx+0] = offset; r600_packet3_check()
1719 ib[idx+1] = upper_32_bits(offset) & 0xff; r600_packet3_check()
1735 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx); r600_packet3_check()
1767 (radeon_get_ib_value(p, idx+1) & 0xfffffff0) + r600_packet3_check()
1768 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); r600_packet3_check()
1770 ib[idx+1] = (ib[idx+1] & 0x3) | (offset & 0xfffffff0); r600_packet3_check()
1771 ib[idx+2] = upper_32_bits(offset) & 0xff; r600_packet3_check()
1785 command = radeon_get_ib_value(p, idx+4); r600_packet3_check()
1803 tmp = radeon_get_ib_value(p, idx) + r600_packet3_check()
1804 ((u64)(radeon_get_ib_value(p, idx+1) & 0xff) << 32); r600_packet3_check()
1814 ib[idx] = offset; r600_packet3_check()
1815 ib[idx+1] = (ib[idx+1] & 0xffffff00) | (upper_32_bits(offset) & 0xff); r600_packet3_check()
1833 tmp = radeon_get_ib_value(p, idx+2) + r600_packet3_check()
1834 ((u64)(radeon_get_ib_value(p, idx+3) & 0xff) << 32); r600_packet3_check()
1844 ib[idx+2] = offset; r600_packet3_check()
1845 ib[idx+3] = upper_32_bits(offset) & 0xff; r600_packet3_check()
1855 if (radeon_get_ib_value(p, idx + 1) != 0xffffffff || r600_packet3_check()
1856 radeon_get_ib_value(p, idx + 2) != 0) { r600_packet3_check()
1862 ib[idx+2] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_packet3_check()
1879 (radeon_get_ib_value(p, idx+1) & 0xfffffff8) + r600_packet3_check()
1880 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); r600_packet3_check()
1882 ib[idx+1] = offset & 0xfffffff8; r600_packet3_check()
1883 ib[idx+2] = upper_32_bits(offset) & 0xff; r600_packet3_check()
1901 (radeon_get_ib_value(p, idx+1) & 0xfffffffc) + r600_packet3_check()
1902 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32); r600_packet3_check()
1904 ib[idx+1] = offset & 0xfffffffc; r600_packet3_check()
1905 ib[idx+2] = (ib[idx+2] & 0xffffff00) | (upper_32_bits(offset) & 0xff); r600_packet3_check()
1919 r = r600_cs_check_reg(p, reg, idx+1+i); r600_packet3_check()
1935 r = r600_cs_check_reg(p, reg, idx+1+i); r600_packet3_check()
1957 switch (G__SQ_VTX_CONSTANT_TYPE(radeon_get_ib_value(p, idx+(i*7)+6+1))) { r600_packet3_check()
1968 ib[idx+1+(i*7)+0] |= S_038000_TILE_MODE(V_038000_ARRAY_2D_TILED_THIN1); r600_packet3_check()
1970 ib[idx+1+(i*7)+0] |= S_038000_TILE_MODE(V_038000_ARRAY_1D_TILED_THIN1); r600_packet3_check()
1981 r = r600_check_texture_resource(p, idx+(i*7)+1, r600_packet3_check()
1983 base_offset + radeon_get_ib_value(p, idx+1+(i*7)+2), r600_packet3_check()
1984 mip_offset + radeon_get_ib_value(p, idx+1+(i*7)+3), r600_packet3_check()
1988 ib[idx+1+(i*7)+2] += base_offset; r600_packet3_check()
1989 ib[idx+1+(i*7)+3] += mip_offset; r600_packet3_check()
2000 offset = radeon_get_ib_value(p, idx+1+(i*7)+0); r600_packet3_check()
2001 size = radeon_get_ib_value(p, idx+1+(i*7)+1) + 1; r600_packet3_check()
2006 ib[idx+1+(i*7)+1] = radeon_bo_size(reloc->robj) - offset; r600_packet3_check()
2010 ib[idx+1+(i*8)+0] = offset64; r600_packet3_check()
2011 ib[idx+1+(i*8)+2] = (ib[idx+1+(i*8)+2] & 0xffffff00) | r600_packet3_check()
2107 offset = radeon_get_ib_value(p, idx+1) << 8; r600_packet3_check()
2119 ib[idx+1] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); r600_packet3_check()
2145 offset = radeon_get_ib_value(p, idx+1); r600_packet3_check()
2146 offset += ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32; r600_packet3_check()
2153 ib[idx+1] = offset; r600_packet3_check()
2154 ib[idx+2] = upper_32_bits(offset) & 0xff; r600_packet3_check()
2164 offset = radeon_get_ib_value(p, idx+3); r600_packet3_check()
2165 offset += ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; r600_packet3_check()
2172 ib[idx+3] = offset; r600_packet3_check()
2173 ib[idx+4] = upper_32_bits(offset) & 0xff; r600_packet3_check()
2189 offset = radeon_get_ib_value(p, idx+0); r600_packet3_check()
2190 offset += ((u64)(radeon_get_ib_value(p, idx+1) & 0xff)) << 32UL; r600_packet3_check()
2201 ib[idx+0] = offset; r600_packet3_check()
2202 ib[idx+1] = upper_32_bits(offset) & 0xff; r600_packet3_check()
2218 offset = radeon_get_ib_value(p, idx+1); r600_packet3_check()
2219 offset += ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32; r600_packet3_check()
2226 ib[idx+1] = offset; r600_packet3_check()
2227 ib[idx+2] = upper_32_bits(offset) & 0xff; r600_packet3_check()
2230 reg = radeon_get_ib_value(p, idx+1) << 2; r600_packet3_check()
2231 if (!r600_is_safe_reg(p, reg, idx+1)) r600_packet3_check()
2242 offset = radeon_get_ib_value(p, idx+3); r600_packet3_check()
2243 offset += ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; r600_packet3_check()
2250 ib[idx+3] = offset; r600_packet3_check()
2251 ib[idx+4] = upper_32_bits(offset) & 0xff; r600_packet3_check()
2254 reg = radeon_get_ib_value(p, idx+3) << 2; r600_packet3_check()
2255 if (!r600_is_safe_reg(p, reg, idx+3)) r600_packet3_check()
2292 r = radeon_cs_packet_parse(p, &pkt, p->idx); r600_cs_parse()
2298 p->idx += pkt.count + 2; r600_cs_parse()
2319 } while (p->idx < p->chunk_ib->length_dw); r600_cs_parse()
2441 unsigned idx; r600_dma_cs_next_reloc() local
2449 idx = p->dma_reloc_idx; r600_dma_cs_next_reloc()
2450 if (idx >= p->nrelocs) { r600_dma_cs_next_reloc()
2452 idx, p->nrelocs); r600_dma_cs_next_reloc()
2455 *cs_reloc = &p->relocs[idx]; r600_dma_cs_next_reloc()
2479 u32 idx, idx_value; r600_dma_cs_parse() local
2484 if (p->idx >= ib_chunk->length_dw) { r600_dma_cs_parse()
2486 p->idx, ib_chunk->length_dw); r600_dma_cs_parse()
2489 idx = p->idx; r600_dma_cs_parse()
2490 header = radeon_get_ib_value(p, idx); r600_dma_cs_parse()
2503 dst_offset = radeon_get_ib_value(p, idx+1); r600_dma_cs_parse()
2506 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); r600_dma_cs_parse()
2507 p->idx += count + 5; r600_dma_cs_parse()
2509 dst_offset = radeon_get_ib_value(p, idx+1); r600_dma_cs_parse()
2510 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32; r600_dma_cs_parse()
2512 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); r600_dma_cs_parse()
2513 ib[idx+2] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; r600_dma_cs_parse()
2514 p->idx += count + 3; r600_dma_cs_parse()
2534 idx_value = radeon_get_ib_value(p, idx + 2); r600_dma_cs_parse()
2538 src_offset = radeon_get_ib_value(p, idx+1); r600_dma_cs_parse()
2540 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8); r600_dma_cs_parse()
2542 dst_offset = radeon_get_ib_value(p, idx+5); r600_dma_cs_parse()
2543 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+6) & 0xff)) << 32; r600_dma_cs_parse()
2544 ib[idx+5] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); r600_dma_cs_parse()
2545 ib[idx+6] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; r600_dma_cs_parse()
2548 src_offset = radeon_get_ib_value(p, idx+5); r600_dma_cs_parse()
2549 src_offset |= ((u64)(radeon_get_ib_value(p, idx+6) & 0xff)) << 32; r600_dma_cs_parse()
2550 ib[idx+5] += (u32)(src_reloc->gpu_offset & 0xfffffffc); r600_dma_cs_parse()
2551 ib[idx+6] += upper_32_bits(src_reloc->gpu_offset) & 0xff; r600_dma_cs_parse()
2553 dst_offset = radeon_get_ib_value(p, idx+1); r600_dma_cs_parse()
2555 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8); r600_dma_cs_parse()
2557 p->idx += 7; r600_dma_cs_parse()
2560 src_offset = radeon_get_ib_value(p, idx+2); r600_dma_cs_parse()
2561 src_offset |= ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32; r600_dma_cs_parse()
2562 dst_offset = radeon_get_ib_value(p, idx+1); r600_dma_cs_parse()
2563 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0xff)) << 32; r600_dma_cs_parse()
2565 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); r600_dma_cs_parse()
2566 ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xfffffffc); r600_dma_cs_parse()
2567 ib[idx+3] += upper_32_bits(dst_reloc->gpu_offset) & 0xff; r600_dma_cs_parse()
2568 ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff; r600_dma_cs_parse()
2569 p->idx += 5; r600_dma_cs_parse()
2571 src_offset = radeon_get_ib_value(p, idx+2); r600_dma_cs_parse()
2572 src_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0xff)) << 32; r600_dma_cs_parse()
2573 dst_offset = radeon_get_ib_value(p, idx+1); r600_dma_cs_parse()
2574 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0xff0000)) << 16; r600_dma_cs_parse()
2576 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); r600_dma_cs_parse()
2577 ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xfffffffc); r600_dma_cs_parse()
2578 ib[idx+3] += upper_32_bits(src_reloc->gpu_offset) & 0xff; r600_dma_cs_parse()
2579 ib[idx+3] += (upper_32_bits(dst_reloc->gpu_offset) & 0xff) << 16; r600_dma_cs_parse()
2580 p->idx += 4; r600_dma_cs_parse()
2604 dst_offset = radeon_get_ib_value(p, idx+1); r600_dma_cs_parse()
2605 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0x00ff0000)) << 16; r600_dma_cs_parse()
2611 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc); r600_dma_cs_parse()
2612 ib[idx+3] += (upper_32_bits(dst_reloc->gpu_offset) << 16) & 0x00ff0000; r600_dma_cs_parse()
2613 p->idx += 4; r600_dma_cs_parse()
2616 p->idx += 1; r600_dma_cs_parse()
2619 DRM_ERROR("Unknown packet type %d at %d !\n", cmd, idx); r600_dma_cs_parse()
2622 } while (p->idx < p->chunk_ib->length_dw); r600_dma_cs_parse()
918 r600_packet0_check(struct radeon_cs_parser *p, struct radeon_cs_packet *pkt, unsigned idx, unsigned reg) r600_packet0_check() argument
H A Datom.c180 uint32_t idx, val = 0xCDCDCDCD, align, arg; atom_get_src_int() local
186 idx = U16(*ptr); atom_get_src_int()
189 DEBUG("REG[0x%04X]", idx); atom_get_src_int()
190 idx += gctx->reg_block; atom_get_src_int()
193 val = gctx->card->reg_read(gctx->card, idx); atom_get_src_int()
217 idx, 0); atom_get_src_int()
221 idx = U8(*ptr); atom_get_src_int()
225 val = get_unaligned_le32((u32 *)&ctx->ps[idx]); atom_get_src_int()
227 DEBUG("PS[0x%02X,0x%04X]", idx, val); atom_get_src_int()
230 idx = U8(*ptr); atom_get_src_int()
233 DEBUG("WS[0x%02X]", idx); atom_get_src_int()
234 switch (idx) { atom_get_src_int()
263 val = ctx->ws[idx]; atom_get_src_int()
267 idx = U16(*ptr); atom_get_src_int()
271 DEBUG("ID[0x%04X+%04X]", idx, gctx->data_block); atom_get_src_int()
273 DEBUG("ID[0x%04X]", idx); atom_get_src_int()
275 val = U32(idx + gctx->data_block); atom_get_src_int()
278 idx = U8(*ptr); atom_get_src_int()
280 if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) { atom_get_src_int()
282 gctx->fb_base + (idx * 4), gctx->scratch_size_bytes); atom_get_src_int()
285 val = gctx->scratch[(gctx->fb_base / 4) + idx]; atom_get_src_int()
287 DEBUG("FB[0x%02X]", idx); atom_get_src_int()
317 idx = U8(*ptr); atom_get_src_int()
320 DEBUG("PLL[0x%02X]", idx); atom_get_src_int()
321 val = gctx->card->pll_read(gctx->card, idx); atom_get_src_int()
324 idx = U8(*ptr); atom_get_src_int()
327 DEBUG("MC[0x%02X]", idx); atom_get_src_int()
328 val = gctx->card->mc_read(gctx->card, idx); atom_get_src_int()
453 val, idx; atom_put_dst() local
462 idx = U16(*ptr); atom_put_dst()
464 DEBUG("REG[0x%04X]", idx); atom_put_dst()
465 idx += gctx->reg_block; atom_put_dst()
468 if (idx == 0) atom_put_dst()
469 gctx->card->reg_write(gctx->card, idx, atom_put_dst()
472 gctx->card->reg_write(gctx->card, idx, val); atom_put_dst()
494 idx, val); atom_put_dst()
498 idx = U8(*ptr); atom_put_dst()
500 DEBUG("PS[0x%02X]", idx); atom_put_dst()
501 ctx->ps[idx] = cpu_to_le32(val); atom_put_dst()
504 idx = U8(*ptr); atom_put_dst()
506 DEBUG("WS[0x%02X]", idx); atom_put_dst()
507 switch (idx) { atom_put_dst()
533 ctx->ws[idx] = val; atom_put_dst()
537 idx = U8(*ptr); atom_put_dst()
539 if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) { atom_put_dst()
541 gctx->fb_base + (idx * 4), gctx->scratch_size_bytes); atom_put_dst()
543 gctx->scratch[(gctx->fb_base / 4) + idx] = val; atom_put_dst()
544 DEBUG("FB[0x%02X]", idx); atom_put_dst()
547 idx = U8(*ptr); atom_put_dst()
549 DEBUG("PLL[0x%02X]", idx); atom_put_dst()
550 gctx->card->pll_write(gctx->card, idx, val); atom_put_dst()
553 idx = U8(*ptr); atom_put_dst()
555 DEBUG("MC[0x%02X]", idx); atom_put_dst()
556 gctx->card->mc_write(gctx->card, idx, val); atom_put_dst()
622 int idx = U8((*ptr)++); atom_op_calltable() local
625 if (idx < ATOM_TABLE_NAMES_CNT) atom_op_calltable()
626 SDEBUG(" table: %d (%s)\n", idx, atom_table_names[idx]); atom_op_calltable()
628 SDEBUG(" table: %d\n", idx); atom_op_calltable()
629 if (U16(ctx->ctx->cmd_table + 4 + 2 * idx)) atom_op_calltable()
630 r = atom_execute_table_locked(ctx->ctx, idx, ctx->ps + ctx->ps_shift); atom_op_calltable()
836 int idx = U8(*ptr); atom_op_setdatablock() local
838 SDEBUG(" block: %d\n", idx); atom_op_setdatablock()
839 if (!idx) atom_op_setdatablock()
841 else if (idx == 255) atom_op_setdatablock()
844 ctx->ctx->data_block = U16(ctx->ctx->data_table + 4 + 2 * idx); atom_op_setdatablock()
1367 int idx = CU16(ctx->data_table + offset); atom_parse_data_header() local
1374 *size = CU16(idx); atom_parse_data_header()
1376 *frev = CU8(idx + 2); atom_parse_data_header()
1378 *crev = CU8(idx + 3); atom_parse_data_header()
1379 *data_start = idx; atom_parse_data_header()
1387 int idx = CU16(ctx->cmd_table + offset); atom_parse_cmd_header() local
1394 *frev = CU8(idx + 2); atom_parse_cmd_header()
1396 *crev = CU8(idx + 3); atom_parse_cmd_header()
/linux-4.4.14/arch/sh/kernel/cpu/sh3/
H A Dclock-sh3.c32 int idx = ((frqcr & 0x2000) >> 11) | (frqcr & 0x0003); master_clk_init() local
34 clk->rate *= pfc_divisors[idx]; master_clk_init()
44 int idx = ((frqcr & 0x2000) >> 11) | (frqcr & 0x0003); module_clk_recalc() local
46 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
56 int idx = ((frqcr & 0x8000) >> 13) | ((frqcr & 0x0030) >> 4); bus_clk_recalc() local
58 return clk->parent->rate / stc_multipliers[idx]; bus_clk_recalc()
68 int idx = ((frqcr & 0x4000) >> 12) | ((frqcr & 0x000c) >> 2); cpu_clk_recalc() local
70 return clk->parent->rate / ifc_divisors[idx]; cpu_clk_recalc()
84 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
86 if (idx < ARRAY_SIZE(sh3_clk_ops)) arch_init_clk_ops()
87 *ops = sh3_clk_ops[idx]; arch_init_clk_ops()
H A Dclock-sh7706.c28 int idx = ((frqcr & 0x2000) >> 11) | (frqcr & 0x0003); master_clk_init() local
30 clk->rate *= pfc_divisors[idx]; master_clk_init()
40 int idx = ((frqcr & 0x2000) >> 11) | (frqcr & 0x0003); module_clk_recalc() local
42 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
52 int idx = ((frqcr & 0x8000) >> 13) | ((frqcr & 0x0030) >> 4); bus_clk_recalc() local
54 return clk->parent->rate / stc_multipliers[idx]; bus_clk_recalc()
64 int idx = ((frqcr & 0x4000) >> 12) | ((frqcr & 0x000c) >> 2); cpu_clk_recalc() local
66 return clk->parent->rate / ifc_divisors[idx]; cpu_clk_recalc()
80 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
82 if (idx < ARRAY_SIZE(sh7706_clk_ops)) arch_init_clk_ops()
83 *ops = sh7706_clk_ops[idx]; arch_init_clk_ops()
H A Dclock-sh7709.c28 int idx = ((frqcr & 0x2000) >> 11) | (frqcr & 0x0003); master_clk_init() local
30 clk->rate *= pfc_divisors[idx]; master_clk_init()
40 int idx = ((frqcr & 0x2000) >> 11) | (frqcr & 0x0003); module_clk_recalc() local
42 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
52 int idx = (frqcr & 0x0080) ? bus_clk_recalc() local
55 return clk->parent->rate * stc_multipliers[idx]; bus_clk_recalc()
65 int idx = ((frqcr & 0x4000) >> 12) | ((frqcr & 0x000c) >> 2); cpu_clk_recalc() local
67 return clk->parent->rate / ifc_divisors[idx]; cpu_clk_recalc()
81 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
83 if (idx < ARRAY_SIZE(sh7709_clk_ops)) arch_init_clk_ops()
84 *ops = sh7709_clk_ops[idx]; arch_init_clk_ops()
H A Dclock-sh7705.c44 int idx = __raw_readw(FRQCR) & 0x0003; module_clk_recalc() local
45 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
54 int idx = (__raw_readw(FRQCR) & 0x0300) >> 8; bus_clk_recalc() local
55 return clk->parent->rate / stc_multipliers[idx]; bus_clk_recalc()
64 int idx = (__raw_readw(FRQCR) & 0x0030) >> 4; cpu_clk_recalc() local
65 return clk->parent->rate / ifc_divisors[idx]; cpu_clk_recalc()
79 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
81 if (idx < ARRAY_SIZE(sh7705_clk_ops)) arch_init_clk_ops()
82 *ops = sh7705_clk_ops[idx]; arch_init_clk_ops()
H A Dclock-sh7710.c38 int idx = (__raw_readw(FRQCR) & 0x0007); module_clk_recalc() local
39 return clk->parent->rate / md_table[idx]; module_clk_recalc()
48 int idx = (__raw_readw(FRQCR) & 0x0700) >> 8; bus_clk_recalc() local
49 return clk->parent->rate / md_table[idx]; bus_clk_recalc()
58 int idx = (__raw_readw(FRQCR) & 0x0070) >> 4; cpu_clk_recalc() local
59 return clk->parent->rate / md_table[idx]; cpu_clk_recalc()
73 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
75 if (idx < ARRAY_SIZE(sh7710_clk_ops)) arch_init_clk_ops()
76 *ops = sh7710_clk_ops[idx]; arch_init_clk_ops()
H A Dclock-sh7712.c27 int idx = (frqcr & 0x0300) >> 8; master_clk_init() local
29 clk->rate *= multipliers[idx]; master_clk_init()
39 int idx = frqcr & 0x0007; module_clk_recalc() local
41 return clk->parent->rate / divisors[idx]; module_clk_recalc()
51 int idx = (frqcr & 0x0030) >> 4; cpu_clk_recalc() local
53 return clk->parent->rate / divisors[idx]; cpu_clk_recalc()
66 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
68 if (idx < ARRAY_SIZE(sh7712_clk_ops)) arch_init_clk_ops()
69 *ops = sh7712_clk_ops[idx]; arch_init_clk_ops()
/linux-4.4.14/arch/sh/kernel/cpu/sh5/
H A Dclock-sh5.c27 int idx = (__raw_readl(cprc_base + 0x00) >> 6) & 0x0007; master_clk_init() local
28 clk->rate *= ifc_table[idx]; master_clk_init()
37 int idx = (__raw_readw(cprc_base) >> 12) & 0x0007; module_clk_recalc() local
38 return clk->parent->rate / ifc_table[idx]; module_clk_recalc()
47 int idx = (__raw_readw(cprc_base) >> 3) & 0x0007; bus_clk_recalc() local
48 return clk->parent->rate / ifc_table[idx]; bus_clk_recalc()
57 int idx = (__raw_readw(cprc_base) & 0x0007); cpu_clk_recalc() local
58 return clk->parent->rate / ifc_table[idx]; cpu_clk_recalc()
72 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
77 if (idx < ARRAY_SIZE(sh5_clk_ops)) arch_init_clk_ops()
78 *ops = sh5_clk_ops[idx]; arch_init_clk_ops()
/linux-4.4.14/arch/arm64/kernel/
H A Dperf_regs.c10 u64 perf_reg_value(struct pt_regs *regs, int idx) perf_reg_value() argument
12 if (WARN_ON_ONCE((u32)idx >= PERF_REG_ARM64_MAX)) perf_reg_value()
21 if ((u32)idx == PERF_REG_ARM64_SP) perf_reg_value()
23 if ((u32)idx == PERF_REG_ARM64_LR) perf_reg_value()
27 if ((u32)idx == PERF_REG_ARM64_SP) perf_reg_value()
30 if ((u32)idx == PERF_REG_ARM64_PC) perf_reg_value()
33 return regs->regs[idx]; perf_reg_value()
H A Dperf_event.c254 static inline int armv8pmu_counter_valid(struct arm_pmu *cpu_pmu, int idx) armv8pmu_counter_valid() argument
256 return idx >= ARMV8_IDX_CYCLE_COUNTER && armv8pmu_counter_valid()
257 idx <= ARMV8_IDX_COUNTER_LAST(cpu_pmu); armv8pmu_counter_valid()
260 static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx) armv8pmu_counter_has_overflowed() argument
262 return pmnc & BIT(ARMV8_IDX_TO_COUNTER(idx)); armv8pmu_counter_has_overflowed()
265 static inline int armv8pmu_select_counter(int idx) armv8pmu_select_counter() argument
267 u32 counter = ARMV8_IDX_TO_COUNTER(idx); armv8pmu_select_counter()
271 return idx; armv8pmu_select_counter()
278 int idx = hwc->idx; armv8pmu_read_counter() local
281 if (!armv8pmu_counter_valid(cpu_pmu, idx)) armv8pmu_read_counter()
283 smp_processor_id(), idx); armv8pmu_read_counter()
284 else if (idx == ARMV8_IDX_CYCLE_COUNTER) armv8pmu_read_counter()
286 else if (armv8pmu_select_counter(idx) == idx) armv8pmu_read_counter()
296 int idx = hwc->idx; armv8pmu_write_counter() local
298 if (!armv8pmu_counter_valid(cpu_pmu, idx)) armv8pmu_write_counter()
300 smp_processor_id(), idx); armv8pmu_write_counter()
301 else if (idx == ARMV8_IDX_CYCLE_COUNTER) armv8pmu_write_counter()
303 else if (armv8pmu_select_counter(idx) == idx) armv8pmu_write_counter()
307 static inline void armv8pmu_write_evtype(int idx, u32 val) armv8pmu_write_evtype() argument
309 if (armv8pmu_select_counter(idx) == idx) { armv8pmu_write_evtype()
315 static inline int armv8pmu_enable_counter(int idx) armv8pmu_enable_counter() argument
317 u32 counter = ARMV8_IDX_TO_COUNTER(idx); armv8pmu_enable_counter()
319 return idx; armv8pmu_enable_counter()
322 static inline int armv8pmu_disable_counter(int idx) armv8pmu_disable_counter() argument
324 u32 counter = ARMV8_IDX_TO_COUNTER(idx); armv8pmu_disable_counter()
326 return idx; armv8pmu_disable_counter()
329 static inline int armv8pmu_enable_intens(int idx) armv8pmu_enable_intens() argument
331 u32 counter = ARMV8_IDX_TO_COUNTER(idx); armv8pmu_enable_intens()
333 return idx; armv8pmu_enable_intens()
336 static inline int armv8pmu_disable_intens(int idx) armv8pmu_disable_intens() argument
338 u32 counter = ARMV8_IDX_TO_COUNTER(idx); armv8pmu_disable_intens()
345 return idx; armv8pmu_disable_intens()
368 int idx = hwc->idx; armv8pmu_enable_event() local
379 armv8pmu_disable_counter(idx); armv8pmu_enable_event()
384 armv8pmu_write_evtype(idx, hwc->config_base); armv8pmu_enable_event()
389 armv8pmu_enable_intens(idx); armv8pmu_enable_event()
394 armv8pmu_enable_counter(idx); armv8pmu_enable_event()
405 int idx = hwc->idx; armv8pmu_disable_event() local
415 armv8pmu_disable_counter(idx); armv8pmu_disable_event()
420 armv8pmu_disable_intens(idx); armv8pmu_disable_event()
432 int idx; armv8pmu_handle_irq() local
450 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { armv8pmu_handle_irq()
451 struct perf_event *event = cpuc->events[idx]; armv8pmu_handle_irq()
462 if (!armv8pmu_counter_has_overflowed(pmovsr, idx)) armv8pmu_handle_irq()
512 int idx; armv8pmu_get_event_idx() local
529 for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) { armv8pmu_get_event_idx()
530 if (!test_and_set_bit(idx, cpuc->used_mask)) armv8pmu_get_event_idx()
531 return idx; armv8pmu_get_event_idx()
567 u32 idx, nb_cnt = cpu_pmu->num_events; armv8pmu_reset() local
570 for (idx = ARMV8_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) { armv8pmu_reset()
571 armv8pmu_disable_counter(idx); armv8pmu_reset()
572 armv8pmu_disable_intens(idx); armv8pmu_reset()
/linux-4.4.14/arch/sh/kernel/cpu/sh4a/
H A Dubc.c18 #define UBC_CBR(idx) (0xff200000 + (0x20 * idx))
19 #define UBC_CRR(idx) (0xff200004 + (0x20 * idx))
20 #define UBC_CAR(idx) (0xff200008 + (0x20 * idx))
21 #define UBC_CAMR(idx) (0xff20000c + (0x20 * idx))
35 static void sh4a_ubc_enable(struct arch_hw_breakpoint *info, int idx) sh4a_ubc_enable() argument
37 __raw_writel(UBC_CBR_CE | info->len | info->type, UBC_CBR(idx)); sh4a_ubc_enable()
38 __raw_writel(info->address, UBC_CAR(idx)); sh4a_ubc_enable()
41 static void sh4a_ubc_disable(struct arch_hw_breakpoint *info, int idx) sh4a_ubc_disable() argument
43 __raw_writel(0, UBC_CBR(idx)); sh4a_ubc_disable()
44 __raw_writel(0, UBC_CAR(idx)); sh4a_ubc_disable()
H A Dclock-sh7770.c33 int idx = ((__raw_readl(FRQCR) >> 28) & 0x000f); module_clk_recalc() local
34 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
43 int idx = (__raw_readl(FRQCR) & 0x000f); bus_clk_recalc() local
44 return clk->parent->rate / bfc_divisors[idx]; bus_clk_recalc()
53 int idx = ((__raw_readl(FRQCR) >> 24) & 0x000f); cpu_clk_recalc() local
54 return clk->parent->rate / ifc_divisors[idx]; cpu_clk_recalc()
68 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
70 if (idx < ARRAY_SIZE(sh7770_clk_ops)) arch_init_clk_ops()
71 *ops = sh7770_clk_ops[idx]; arch_init_clk_ops()
H A Dclock-sh7780.c36 int idx = (__raw_readl(FRQCR) & 0x0003); module_clk_recalc() local
37 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
46 int idx = ((__raw_readl(FRQCR) >> 16) & 0x0007); bus_clk_recalc() local
47 return clk->parent->rate / bfc_divisors[idx]; bus_clk_recalc()
56 int idx = ((__raw_readl(FRQCR) >> 24) & 0x0001); cpu_clk_recalc() local
57 return clk->parent->rate / ifc_divisors[idx]; cpu_clk_recalc()
71 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
73 if (idx < ARRAY_SIZE(sh7780_clk_ops)) arch_init_clk_ops()
74 *ops = sh7780_clk_ops[idx]; arch_init_clk_ops()
79 int idx = ((__raw_readl(FRQCR) >> 20) & 0x0007); shyway_clk_recalc() local
80 return clk->parent->rate / cfc_divisors[idx]; shyway_clk_recalc()
H A Dclock-sh7763.c36 int idx = ((__raw_readl(FRQCR) >> 4) & 0x07); module_clk_recalc() local
37 return clk->parent->rate / p0fc_divisors[idx]; module_clk_recalc()
46 int idx = ((__raw_readl(FRQCR) >> 16) & 0x07); bus_clk_recalc() local
47 return clk->parent->rate / bfc_divisors[idx]; bus_clk_recalc()
65 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
67 if (idx < ARRAY_SIZE(sh7763_clk_ops)) arch_init_clk_ops()
68 *ops = sh7763_clk_ops[idx]; arch_init_clk_ops()
73 int idx = ((__raw_readl(FRQCR) >> 20) & 0x07); shyway_clk_recalc() local
74 return clk->parent->rate / cfc_divisors[idx]; shyway_clk_recalc()
H A Dperf_event.c17 #define PPC_CCBR(idx) (0xff200800 + (sizeof(u32) * idx))
18 #define PPC_PMCTR(idx) (0xfc100000 + (sizeof(u32) * idx))
230 static u64 sh4a_pmu_read(int idx) sh4a_pmu_read() argument
232 return __raw_readl(PPC_PMCTR(idx)); sh4a_pmu_read()
235 static void sh4a_pmu_disable(struct hw_perf_event *hwc, int idx) sh4a_pmu_disable() argument
239 tmp = __raw_readl(PPC_CCBR(idx)); sh4a_pmu_disable()
241 __raw_writel(tmp, PPC_CCBR(idx)); sh4a_pmu_disable()
244 static void sh4a_pmu_enable(struct hw_perf_event *hwc, int idx) sh4a_pmu_enable() argument
250 tmp |= idx ? PMCAT_CLR1 : PMCAT_CLR0; sh4a_pmu_enable()
253 tmp = __raw_readl(PPC_CCBR(idx)); sh4a_pmu_enable()
255 __raw_writel(tmp, PPC_CCBR(idx)); sh4a_pmu_enable()
257 __raw_writel(__raw_readl(PPC_CCBR(idx)) | CCBR_DUC, PPC_CCBR(idx)); sh4a_pmu_enable()
/linux-4.4.14/drivers/media/pci/pt3/
H A Dpt3_dma.c25 static u32 get_dma_base(int idx) get_dma_base() argument
29 i = (idx == 1 || idx == 2) ? 3 - idx : idx; get_dma_base()
70 static u8 *next_unit(struct pt3_adapter *adap, int *idx, int *ofs) next_unit() argument
75 (*idx)++; next_unit()
76 if (*idx == adap->num_bufs) next_unit()
77 *idx = 0; next_unit()
79 return &adap->buffer[*idx].data[*ofs]; next_unit()
84 int idx, ofs; pt3_proc_dma() local
86 idx = adap->buf_idx; pt3_proc_dma()
89 if (adap->buffer[idx].data[ofs] == PT3_BUF_CANARY) pt3_proc_dma()
92 while (*next_unit(adap, &idx, &ofs) != PT3_BUF_CANARY) { pt3_proc_dma()
102 adap->buffer[idx].data, ofs / TS_PACKET_SZ); pt3_proc_dma()
108 adap->buf_idx = idx; pt3_proc_dma()
116 int idx, ofs; pt3_init_dmabuf() local
119 idx = 0; pt3_init_dmabuf()
123 while (idx < adap->num_bufs) { pt3_init_dmabuf()
128 idx++; pt3_init_dmabuf()
129 p = adap->buffer[idx].data; pt3_init_dmabuf()
159 int idx, ofs; pt3_alloc_dmabuf() local
178 idx = 0; pt3_alloc_dmabuf()
197 data_addr = adap->buffer[idx].b_addr + ofs; pt3_alloc_dmabuf()
210 idx++; pt3_alloc_dmabuf()
211 if (idx >= adap->num_bufs) { pt3_alloc_dmabuf()
/linux-4.4.14/arch/blackfin/kernel/cplb-nompu/
H A Dcplbmgr.c39 static inline void write_dcplb_data(int cpu, int idx, unsigned long data, write_dcplb_data() argument
43 bfin_write32(DCPLB_DATA0 + idx * 4, data); write_dcplb_data()
44 bfin_write32(DCPLB_ADDR0 + idx * 4, addr); write_dcplb_data()
48 dcplb_tbl[cpu][idx].addr = addr; write_dcplb_data()
49 dcplb_tbl[cpu][idx].data = data; write_dcplb_data()
53 static inline void write_icplb_data(int cpu, int idx, unsigned long data, write_icplb_data() argument
57 bfin_write32(ICPLB_DATA0 + idx * 4, data); write_icplb_data()
58 bfin_write32(ICPLB_ADDR0 + idx * 4, addr); write_icplb_data()
62 icplb_tbl[cpu][idx].addr = addr; write_icplb_data()
63 icplb_tbl[cpu][idx].data = data; write_icplb_data()
100 int idx; icplb_miss() local
108 idx = 0; icplb_miss()
110 eaddr = icplb_bounds[idx].eaddr; icplb_miss()
114 } while (++idx < icplb_nr_bounds); icplb_miss()
116 if (unlikely(idx == icplb_nr_bounds)) icplb_miss()
119 i_data = icplb_bounds[idx].data; icplb_miss()
136 idx = evict_one_icplb(cpu); icplb_miss()
138 write_icplb_data(cpu, idx, i_data, addr); icplb_miss()
147 int idx; dcplb_miss() local
155 idx = 0; dcplb_miss()
157 eaddr = dcplb_bounds[idx].eaddr; dcplb_miss()
161 } while (++idx < dcplb_nr_bounds); dcplb_miss()
163 if (unlikely(idx == dcplb_nr_bounds)) dcplb_miss()
166 d_data = dcplb_bounds[idx].data; dcplb_miss()
208 idx = evict_one_dcplb(cpu); dcplb_miss()
210 write_dcplb_data(cpu, idx, d_data, addr); dcplb_miss()
/linux-4.4.14/arch/metag/mm/
H A Dhighmem.c42 enum fixed_addresses idx; kmap_atomic() local
52 idx = type + KM_TYPE_NR * smp_processor_id(); kmap_atomic()
53 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic()
55 BUG_ON(!pte_none(*(kmap_pte - idx))); kmap_atomic()
57 set_pte(kmap_pte - idx, mk_pte(page, PAGE_KERNEL)); kmap_atomic()
66 int idx, type; __kunmap_atomic() local
70 idx = type + KM_TYPE_NR * smp_processor_id(); __kunmap_atomic()
78 pte_clear(&init_mm, vaddr, kmap_pte-idx); __kunmap_atomic()
95 enum fixed_addresses idx; kmap_atomic_pfn() local
103 idx = type + KM_TYPE_NR * smp_processor_id(); kmap_atomic_pfn()
104 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic_pfn()
106 BUG_ON(!pte_none(*(kmap_pte - idx))); kmap_atomic_pfn()
108 set_pte(kmap_pte - idx, pfn_pte(pfn, PAGE_KERNEL)); kmap_atomic_pfn()
/linux-4.4.14/drivers/gpu/drm/nouveau/include/nvkm/core/
H A Ddevice.h167 int (*bar )(struct nvkm_device *, int idx, struct nvkm_bar **);
168 int (*bios )(struct nvkm_device *, int idx, struct nvkm_bios **);
169 int (*bus )(struct nvkm_device *, int idx, struct nvkm_bus **);
170 int (*clk )(struct nvkm_device *, int idx, struct nvkm_clk **);
171 int (*devinit)(struct nvkm_device *, int idx, struct nvkm_devinit **);
172 int (*fb )(struct nvkm_device *, int idx, struct nvkm_fb **);
173 int (*fuse )(struct nvkm_device *, int idx, struct nvkm_fuse **);
174 int (*gpio )(struct nvkm_device *, int idx, struct nvkm_gpio **);
175 int (*i2c )(struct nvkm_device *, int idx, struct nvkm_i2c **);
176 int (*ibus )(struct nvkm_device *, int idx, struct nvkm_subdev **);
177 int (*imem )(struct nvkm_device *, int idx, struct nvkm_instmem **);
178 int (*ltc )(struct nvkm_device *, int idx, struct nvkm_ltc **);
179 int (*mc )(struct nvkm_device *, int idx, struct nvkm_mc **);
180 int (*mmu )(struct nvkm_device *, int idx, struct nvkm_mmu **);
181 int (*mxm )(struct nvkm_device *, int idx, struct nvkm_subdev **);
182 int (*pci )(struct nvkm_device *, int idx, struct nvkm_pci **);
183 int (*pmu )(struct nvkm_device *, int idx, struct nvkm_pmu **);
184 int (*therm )(struct nvkm_device *, int idx, struct nvkm_therm **);
185 int (*timer )(struct nvkm_device *, int idx, struct nvkm_timer **);
186 int (*volt )(struct nvkm_device *, int idx, struct nvkm_volt **);
188 int (*bsp )(struct nvkm_device *, int idx, struct nvkm_engine **);
189 int (*ce[3] )(struct nvkm_device *, int idx, struct nvkm_engine **);
190 int (*cipher )(struct nvkm_device *, int idx, struct nvkm_engine **);
191 int (*disp )(struct nvkm_device *, int idx, struct nvkm_disp **);
192 int (*dma )(struct nvkm_device *, int idx, struct nvkm_dma **);
193 int (*fifo )(struct nvkm_device *, int idx, struct nvkm_fifo **);
194 int (*gr )(struct nvkm_device *, int idx, struct nvkm_gr **);
195 int (*ifb )(struct nvkm_device *, int idx, struct nvkm_engine **);
196 int (*me )(struct nvkm_device *, int idx, struct nvkm_engine **);
197 int (*mpeg )(struct nvkm_device *, int idx, struct nvkm_engine **);
198 int (*msenc )(struct nvkm_device *, int idx, struct nvkm_engine **);
199 int (*mspdec )(struct nvkm_device *, int idx, struct nvkm_engine **);
200 int (*msppp )(struct nvkm_device *, int idx, struct nvkm_engine **);
201 int (*msvld )(struct nvkm_device *, int idx, struct nvkm_engine **);
202 int (*pm )(struct nvkm_device *, int idx, struct nvkm_pm **);
203 int (*sec )(struct nvkm_device *, int idx, struct nvkm_engine **);
204 int (*sw )(struct nvkm_device *, int idx, struct nvkm_sw **);
205 int (*vic )(struct nvkm_device *, int idx, struct nvkm_engine **);
206 int (*vp )(struct nvkm_device *, int idx, struct nvkm_engine **);
/linux-4.4.14/drivers/net/wireless/mediatek/mt7601u/
H A Dmain.c58 unsigned int idx = 0; mt7601u_add_interface() local
59 unsigned int wcid = GROUP_WCID(idx); mt7601u_add_interface()
64 * - shift vif idx mt7601u_add_interface()
66 mvif->idx = idx; mt7601u_add_interface()
71 mvif->group_wcid.idx = wcid; mt7601u_add_interface()
82 unsigned int wcid = mvif->group_wcid.idx; mt7601u_remove_interface()
195 int i, idx = 0; mt76_wcid_alloc() local
198 idx = ffs(~dev->wcid_mask[i]); mt76_wcid_alloc()
199 if (!idx) mt76_wcid_alloc()
202 idx--; mt76_wcid_alloc()
203 dev->wcid_mask[i] |= BIT(idx); mt76_wcid_alloc()
207 idx = i * BITS_PER_LONG + idx; mt76_wcid_alloc()
208 if (idx > 119) mt76_wcid_alloc()
211 return idx; mt76_wcid_alloc()
222 int idx = 0; mt7601u_sta_add() local
226 idx = mt76_wcid_alloc(dev); mt7601u_sta_add()
227 if (idx < 0) { mt7601u_sta_add()
232 msta->wcid.idx = idx; mt7601u_sta_add()
234 mt7601u_mac_wcid_setup(dev, idx, mvif->idx, sta->addr); mt7601u_sta_add()
235 mt76_clear(dev, MT_WCID_DROP(idx), MT_WCID_DROP_MASK(idx)); mt7601u_sta_add()
236 rcu_assign_pointer(dev->wcid[idx], &msta->wcid); mt7601u_sta_add()
251 int idx = msta->wcid.idx; mt7601u_sta_remove() local
254 rcu_assign_pointer(dev->wcid[idx], NULL); mt7601u_sta_remove()
255 mt76_set(dev, MT_WCID_DROP(idx), MT_WCID_DROP_MASK(idx)); mt7601u_sta_remove()
256 dev->wcid_mask[idx / BITS_PER_LONG] &= ~BIT(idx % BITS_PER_LONG); mt7601u_sta_remove()
257 mt7601u_mac_wcid_setup(dev, idx, 0, NULL); mt7601u_sta_remove()
300 int idx = key->keyidx; mt7601u_set_key() local
304 key->hw_key_idx = wcid->idx; mt7601u_set_key()
305 wcid->hw_key_idx = idx; mt7601u_set_key()
307 if (idx == wcid->hw_key_idx) mt7601u_set_key()
314 if (key || wcid->hw_key_idx == idx) { mt7601u_set_key()
315 ret = mt76_mac_wcid_set_key(dev, wcid->idx, key); mt7601u_set_key()
320 return mt76_mac_shared_key_setup(dev, mvif->idx, idx, key); mt7601u_set_key()
323 return mt76_mac_wcid_set_key(dev, msta->wcid.idx, key); mt7601u_set_key()
344 WARN_ON(msta->wcid.idx > GROUP_WCID(0)); mt76_ampdu_action()
348 mt76_set(dev, MT_WCID_ADDR(msta->wcid.idx) + 4, BIT(16 + tid)); mt76_ampdu_action()
351 mt76_clear(dev, MT_WCID_ADDR(msta->wcid.idx) + 4, mt76_ampdu_action()
387 rate.idx = rates->rate[0].idx; mt76_sta_rate_tbl_update()
H A Dmac.c22 u8 idx = MT76_GET(MT_TXWI_RATE_MCS, rate); mt76_mac_process_tx_rate() local
24 txrate->idx = 0; mt76_mac_process_tx_rate()
30 txrate->idx = idx + 4; mt76_mac_process_tx_rate()
33 if (idx >= 8) mt76_mac_process_tx_rate()
34 idx -= 8; mt76_mac_process_tx_rate()
36 txrate->idx = idx; mt76_mac_process_tx_rate()
43 txrate->idx = idx; mt76_mac_process_tx_rate()
68 rate[last_rate + 1].idx = -1; mt76_mac_fill_tx_status()
70 cur_idx = rate[last_rate].idx + st->retry; mt76_mac_fill_tx_status()
73 rate[i].idx = max_t(int, 0, cur_idx - i); mt76_mac_fill_tx_status()
105 rate_idx = rate->idx; mt76_mac_tx_rate_val()
106 nss = 1 + (rate->idx >> 3); mt76_mac_tx_rate_val()
117 r = &dev->hw->wiphy->bands[band]->bitrates[rate->idx]; mt76_mac_tx_rate_val()
347 mt7601u_mac_wcid_setup(struct mt7601u_dev *dev, u8 idx, u8 vif_idx, u8 *mac) mt7601u_mac_wcid_setup() argument
355 mt76_wr(dev, MT_WCID_ATTR(idx), attr); mt7601u_mac_wcid_setup()
360 mt7601u_addr_wr(dev, MT_WCID_ADDR(idx), zmac); mt7601u_mac_wcid_setup()
391 u8 idx = MT76_GET(MT_RXWI_RATE_MCS, rate); mt76_mac_process_rate() local
395 if (WARN_ON(idx >= 8)) mt76_mac_process_rate()
396 idx = 0; mt76_mac_process_rate()
397 idx += 4; mt76_mac_process_rate()
399 status->rate_idx = idx; mt76_mac_process_rate()
402 if (idx >= 8) { mt76_mac_process_rate()
403 idx -= 8; mt76_mac_process_rate()
407 if (WARN_ON(idx >= 4)) mt76_mac_process_rate()
408 idx = 0; mt76_mac_process_rate()
410 status->rate_idx = idx; mt76_mac_process_rate()
417 status->rate_idx = idx; mt76_mac_process_rate()
514 int mt76_mac_wcid_set_key(struct mt7601u_dev *dev, u8 idx, mt76_mac_wcid_set_key() argument
526 trace_set_key(dev, idx); mt76_mac_wcid_set_key()
528 mt7601u_wr_copy(dev, MT_WCID_KEY(idx), key_data, sizeof(key_data)); mt76_mac_wcid_set_key()
541 mt7601u_wr_copy(dev, MT_WCID_IV(idx), iv_data, sizeof(iv_data)); mt76_mac_wcid_set_key()
543 val = mt7601u_rr(dev, MT_WCID_ATTR(idx)); mt76_mac_wcid_set_key()
550 mt7601u_wr(dev, MT_WCID_ATTR(idx), val); mt76_mac_wcid_set_key()
/linux-4.4.14/arch/mips/mm/
H A Dhighmem.c48 int idx, type; kmap_atomic() local
56 idx = type + KM_TYPE_NR*smp_processor_id(); kmap_atomic()
57 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic()
59 BUG_ON(!pte_none(*(kmap_pte - idx))); kmap_atomic()
61 set_pte(kmap_pte-idx, mk_pte(page, PAGE_KERNEL)); kmap_atomic()
82 int idx = type + KM_TYPE_NR * smp_processor_id(); __kunmap_atomic() local
84 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); __kunmap_atomic()
90 pte_clear(&init_mm, vaddr, kmap_pte-idx); __kunmap_atomic()
107 int idx, type; kmap_atomic_pfn() local
113 idx = type + KM_TYPE_NR*smp_processor_id(); kmap_atomic_pfn()
114 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic_pfn()
115 set_pte(kmap_pte-idx, pfn_pte(pfn, PAGE_KERNEL)); kmap_atomic_pfn()
H A Dtlb-r3k.c104 int idx; local_flush_tlb_range() local
109 idx = read_c0_index(); local_flush_tlb_range()
112 if (idx < 0) /* BARRIER */ local_flush_tlb_range()
141 int idx; local_flush_tlb_kernel_range() local
146 idx = read_c0_index(); local_flush_tlb_kernel_range()
149 if (idx < 0) /* BARRIER */ local_flush_tlb_kernel_range()
166 int oldpid, newpid, idx; local_flush_tlb_page() local
178 idx = read_c0_index(); local_flush_tlb_page()
181 if (idx < 0) /* BARRIER */ local_flush_tlb_page()
194 int idx, pid; __update_tlb() local
216 idx = read_c0_index(); __update_tlb()
219 if (idx < 0) { /* BARRIER */ __update_tlb()
H A Dtlb-r8k.c91 signed long idx; local_flush_tlb_range() local
97 idx = read_c0_tlbset(); local_flush_tlb_range()
98 if (idx < 0) local_flush_tlb_range()
101 write_c0_entryhi(CKSEG0 + (idx << (PAGE_SHIFT + 1))); local_flush_tlb_range()
131 signed long idx; local_flush_tlb_kernel_range() local
137 idx = read_c0_tlbset(); local_flush_tlb_kernel_range()
138 if (idx < 0) local_flush_tlb_kernel_range()
141 write_c0_entryhi(CKSEG0 + (idx << (PAGE_SHIFT + 1))); local_flush_tlb_kernel_range()
153 signed long idx; local_flush_tlb_page() local
165 idx = read_c0_tlbset(); local_flush_tlb_page()
166 if (idx < 0) local_flush_tlb_page()
170 write_c0_entryhi(CKSEG0 + (idx << (PAGE_SHIFT + 1))); local_flush_tlb_page()
H A Dtlb-r4k.c138 int idx; local_flush_tlb_range() local
145 idx = read_c0_index(); local_flush_tlb_range()
148 if (idx < 0) local_flush_tlb_range()
151 write_c0_entryhi(UNIQUE_ENTRYHI(idx)); local_flush_tlb_range()
184 int idx; local_flush_tlb_kernel_range() local
191 idx = read_c0_index(); local_flush_tlb_kernel_range()
194 if (idx < 0) local_flush_tlb_kernel_range()
197 write_c0_entryhi(UNIQUE_ENTRYHI(idx)); local_flush_tlb_kernel_range()
217 int oldpid, newpid, idx; local_flush_tlb_page() local
228 idx = read_c0_index(); local_flush_tlb_page()
231 if (idx < 0) local_flush_tlb_page()
234 write_c0_entryhi(UNIQUE_ENTRYHI(idx)); local_flush_tlb_page()
254 int oldpid, idx; local_flush_tlb_one() local
264 idx = read_c0_index(); local_flush_tlb_one()
267 if (idx >= 0) { local_flush_tlb_one()
269 write_c0_entryhi(UNIQUE_ENTRYHI(idx)); local_flush_tlb_one()
292 int idx, pid; __update_tlb() local
312 idx = read_c0_index(); __update_tlb()
324 if (idx < 0) __update_tlb()
352 if (idx < 0) __update_tlb()
/linux-4.4.14/arch/arm/kernel/
H A Dperf_regs.c9 u64 perf_reg_value(struct pt_regs *regs, int idx) perf_reg_value() argument
11 if (WARN_ON_ONCE((u32)idx >= PERF_REG_ARM_MAX)) perf_reg_value()
14 return regs->uregs[idx]; perf_reg_value()
H A Dperf_event_xscale.c151 int idx; xscale1pmu_handle_irq() local
173 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { xscale1pmu_handle_irq()
174 struct perf_event *event = cpuc->events[idx]; xscale1pmu_handle_irq()
180 if (!xscale1_pmnc_counter_has_overflowed(pmnc, idx)) xscale1pmu_handle_irq()
210 int idx = hwc->idx; xscale1pmu_enable_event() local
212 switch (idx) { xscale1pmu_enable_event()
228 WARN_ONCE(1, "invalid counter number (%d)\n", idx); xscale1pmu_enable_event()
246 int idx = hwc->idx; xscale1pmu_disable_event() local
248 switch (idx) { xscale1pmu_disable_event()
262 WARN_ONCE(1, "invalid counter number (%d)\n", idx); xscale1pmu_disable_event()
322 int counter = hwc->idx; xscale1pmu_read_counter()
343 int counter = hwc->idx; xscale1pmu_write_counter()
498 int idx; xscale2pmu_handle_irq() local
514 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { xscale2pmu_handle_irq()
515 struct perf_event *event = cpuc->events[idx]; xscale2pmu_handle_irq()
521 if (!xscale2_pmnc_counter_has_overflowed(of_flags, idx)) xscale2pmu_handle_irq()
551 int idx = hwc->idx; xscale2pmu_enable_event() local
556 switch (idx) { xscale2pmu_enable_event()
581 WARN_ONCE(1, "invalid counter number (%d)\n", idx); xscale2pmu_enable_event()
597 int idx = hwc->idx; xscale2pmu_disable_event() local
602 switch (idx) { xscale2pmu_disable_event()
632 WARN_ONCE(1, "invalid counter number (%d)\n", idx); xscale2pmu_disable_event()
647 int idx = xscale1pmu_get_event_idx(cpuc, event); xscale2pmu_get_event_idx() local
648 if (idx >= 0) xscale2pmu_get_event_idx()
652 idx = XSCALE_COUNTER3; xscale2pmu_get_event_idx()
654 idx = XSCALE_COUNTER2; xscale2pmu_get_event_idx()
656 return idx; xscale2pmu_get_event_idx()
686 int counter = hwc->idx; xscale2pmu_read_counter()
713 int counter = hwc->idx; xscale2pmu_write_counter()
H A Dperf_event_v7.c622 static inline int armv7_pmnc_counter_valid(struct arm_pmu *cpu_pmu, int idx) armv7_pmnc_counter_valid() argument
624 return idx >= ARMV7_IDX_CYCLE_COUNTER && armv7_pmnc_counter_valid()
625 idx <= ARMV7_IDX_COUNTER_LAST(cpu_pmu); armv7_pmnc_counter_valid()
628 static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx) armv7_pmnc_counter_has_overflowed() argument
630 return pmnc & BIT(ARMV7_IDX_TO_COUNTER(idx)); armv7_pmnc_counter_has_overflowed()
633 static inline void armv7_pmnc_select_counter(int idx) armv7_pmnc_select_counter() argument
635 u32 counter = ARMV7_IDX_TO_COUNTER(idx); armv7_pmnc_select_counter()
644 int idx = hwc->idx; armv7pmu_read_counter() local
647 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) { armv7pmu_read_counter()
649 smp_processor_id(), idx); armv7pmu_read_counter()
650 } else if (idx == ARMV7_IDX_CYCLE_COUNTER) { armv7pmu_read_counter()
653 armv7_pmnc_select_counter(idx); armv7pmu_read_counter()
664 int idx = hwc->idx; armv7pmu_write_counter() local
666 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) { armv7pmu_write_counter()
668 smp_processor_id(), idx); armv7pmu_write_counter()
669 } else if (idx == ARMV7_IDX_CYCLE_COUNTER) { armv7pmu_write_counter()
672 armv7_pmnc_select_counter(idx); armv7pmu_write_counter()
677 static inline void armv7_pmnc_write_evtsel(int idx, u32 val) armv7_pmnc_write_evtsel() argument
679 armv7_pmnc_select_counter(idx); armv7_pmnc_write_evtsel()
684 static inline void armv7_pmnc_enable_counter(int idx) armv7_pmnc_enable_counter() argument
686 u32 counter = ARMV7_IDX_TO_COUNTER(idx); armv7_pmnc_enable_counter()
690 static inline void armv7_pmnc_disable_counter(int idx) armv7_pmnc_disable_counter() argument
692 u32 counter = ARMV7_IDX_TO_COUNTER(idx); armv7_pmnc_disable_counter()
696 static inline void armv7_pmnc_enable_intens(int idx) armv7_pmnc_enable_intens() argument
698 u32 counter = ARMV7_IDX_TO_COUNTER(idx); armv7_pmnc_enable_intens()
702 static inline void armv7_pmnc_disable_intens(int idx) armv7_pmnc_disable_intens() argument
704 u32 counter = ARMV7_IDX_TO_COUNTER(idx); armv7_pmnc_disable_intens()
771 int idx = hwc->idx; armv7pmu_enable_event() local
773 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) { armv7pmu_enable_event()
775 smp_processor_id(), idx); armv7pmu_enable_event()
788 armv7_pmnc_disable_counter(idx); armv7pmu_enable_event()
795 if (cpu_pmu->set_event_filter || idx != ARMV7_IDX_CYCLE_COUNTER) armv7pmu_enable_event()
796 armv7_pmnc_write_evtsel(idx, hwc->config_base); armv7pmu_enable_event()
801 armv7_pmnc_enable_intens(idx); armv7pmu_enable_event()
806 armv7_pmnc_enable_counter(idx); armv7pmu_enable_event()
817 int idx = hwc->idx; armv7pmu_disable_event() local
819 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) { armv7pmu_disable_event()
821 smp_processor_id(), idx); armv7pmu_disable_event()
833 armv7_pmnc_disable_counter(idx); armv7pmu_disable_event()
838 armv7_pmnc_disable_intens(idx); armv7pmu_disable_event()
850 int idx; armv7pmu_handle_irq() local
868 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { armv7pmu_handle_irq()
869 struct perf_event *event = cpuc->events[idx]; armv7pmu_handle_irq()
880 if (!armv7_pmnc_counter_has_overflowed(pmnc, idx)) armv7pmu_handle_irq()
930 int idx; armv7pmu_get_event_idx() local
947 for (idx = ARMV7_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) { armv7pmu_get_event_idx()
948 if (!test_and_set_bit(idx, cpuc->used_mask)) armv7pmu_get_event_idx()
949 return idx; armv7pmu_get_event_idx()
985 u32 idx, nb_cnt = cpu_pmu->num_events; armv7pmu_reset() local
988 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) { armv7pmu_reset()
989 armv7_pmnc_disable_counter(idx); armv7pmu_reset()
990 armv7_pmnc_disable_intens(idx); armv7pmu_reset()
1270 static void krait_evt_setup(int idx, u32 config_base) krait_evt_setup() argument
1292 armv7_pmnc_write_evtsel(idx, val); krait_evt_setup()
1352 int idx = hwc->idx; krait_pmu_disable_event() local
1360 armv7_pmnc_disable_counter(idx); krait_pmu_disable_event()
1369 armv7_pmnc_disable_intens(idx); krait_pmu_disable_event()
1378 int idx = hwc->idx; krait_pmu_enable_event() local
1389 armv7_pmnc_disable_counter(idx); krait_pmu_enable_event()
1397 krait_evt_setup(idx, hwc->config_base); krait_pmu_enable_event()
1399 armv7_pmnc_write_evtsel(idx, hwc->config_base); krait_pmu_enable_event()
1402 armv7_pmnc_enable_intens(idx); krait_pmu_enable_event()
1405 armv7_pmnc_enable_counter(idx); krait_pmu_enable_event()
1414 u32 idx, nb_cnt = cpu_pmu->num_events; krait_pmu_reset() local
1428 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) { krait_pmu_reset()
1429 armv7_pmnc_select_counter(idx); krait_pmu_reset()
1464 int idx; krait_pmu_get_event_idx() local
1485 idx = armv7pmu_get_event_idx(cpuc, event); krait_pmu_get_event_idx()
1486 if (idx < 0 && bit >= 0) krait_pmu_get_event_idx()
1489 return idx; krait_pmu_get_event_idx()
1616 static void scorpion_evt_setup(int idx, u32 config_base) scorpion_evt_setup() argument
1638 armv7_pmnc_write_evtsel(idx, val); scorpion_evt_setup()
1684 int idx = hwc->idx; scorpion_pmu_disable_event() local
1692 armv7_pmnc_disable_counter(idx); scorpion_pmu_disable_event()
1701 armv7_pmnc_disable_intens(idx); scorpion_pmu_disable_event()
1710 int idx = hwc->idx; scorpion_pmu_enable_event() local
1721 armv7_pmnc_disable_counter(idx); scorpion_pmu_enable_event()
1729 scorpion_evt_setup(idx, hwc->config_base); scorpion_pmu_enable_event()
1730 else if (idx != ARMV7_IDX_CYCLE_COUNTER) scorpion_pmu_enable_event()
1731 armv7_pmnc_write_evtsel(idx, hwc->config_base); scorpion_pmu_enable_event()
1734 armv7_pmnc_enable_intens(idx); scorpion_pmu_enable_event()
1737 armv7_pmnc_enable_counter(idx); scorpion_pmu_enable_event()
1746 u32 idx, nb_cnt = cpu_pmu->num_events; scorpion_pmu_reset() local
1761 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) { scorpion_pmu_reset()
1762 armv7_pmnc_select_counter(idx); scorpion_pmu_reset()
1796 int idx; scorpion_pmu_get_event_idx() local
1814 idx = armv7pmu_get_event_idx(cpuc, event); scorpion_pmu_get_event_idx()
1815 if (idx < 0 && bit >= 0) scorpion_pmu_get_event_idx()
1818 return idx; scorpion_pmu_get_event_idx()
H A Dperf_event_v6.c238 int counter = hwc->idx; armv6pmu_read_counter()
256 int counter = hwc->idx; armv6pmu_write_counter()
274 int idx = hwc->idx; armv6pmu_enable_event() local
276 if (ARMV6_CYCLE_COUNTER == idx) { armv6pmu_enable_event()
279 } else if (ARMV6_COUNTER0 == idx) { armv6pmu_enable_event()
283 } else if (ARMV6_COUNTER1 == idx) { armv6pmu_enable_event()
288 WARN_ONCE(1, "invalid counter number (%d)\n", idx); armv6pmu_enable_event()
313 int idx; armv6pmu_handle_irq() local
327 for (idx = 0; idx < cpu_pmu->num_events; ++idx) { armv6pmu_handle_irq()
328 struct perf_event *event = cpuc->events[idx]; armv6pmu_handle_irq()
339 if (!armv6_pmcr_counter_has_overflowed(pmcr, idx)) armv6pmu_handle_irq()
421 int idx = hwc->idx; armv6pmu_disable_event() local
423 if (ARMV6_CYCLE_COUNTER == idx) { armv6pmu_disable_event()
426 } else if (ARMV6_COUNTER0 == idx) { armv6pmu_disable_event()
429 } else if (ARMV6_COUNTER1 == idx) { armv6pmu_disable_event()
433 WARN_ONCE(1, "invalid counter number (%d)\n", idx); armv6pmu_disable_event()
456 int idx = hwc->idx; armv6mpcore_pmu_disable_event() local
458 if (ARMV6_CYCLE_COUNTER == idx) { armv6mpcore_pmu_disable_event()
460 } else if (ARMV6_COUNTER0 == idx) { armv6mpcore_pmu_disable_event()
462 } else if (ARMV6_COUNTER1 == idx) { armv6mpcore_pmu_disable_event()
465 WARN_ONCE(1, "invalid counter number (%d)\n", idx); armv6mpcore_pmu_disable_event()
/linux-4.4.14/drivers/media/dvb-core/
H A Ddvb_ringbuffer.c208 ssize_t dvb_ringbuffer_pkt_read_user(struct dvb_ringbuffer *rbuf, size_t idx, dvb_ringbuffer_pkt_read_user() argument
215 pktlen = rbuf->data[idx] << 8; dvb_ringbuffer_pkt_read_user()
216 pktlen |= rbuf->data[(idx + 1) % rbuf->size]; dvb_ringbuffer_pkt_read_user()
220 idx = (idx + DVB_RINGBUFFER_PKTHDRSIZE + offset) % rbuf->size; dvb_ringbuffer_pkt_read_user()
222 split = ((idx + len) > rbuf->size) ? rbuf->size - idx : 0; dvb_ringbuffer_pkt_read_user()
224 if (copy_to_user(buf, rbuf->data+idx, split)) dvb_ringbuffer_pkt_read_user()
228 idx = 0; dvb_ringbuffer_pkt_read_user()
230 if (copy_to_user(buf, rbuf->data+idx, todo)) dvb_ringbuffer_pkt_read_user()
236 ssize_t dvb_ringbuffer_pkt_read(struct dvb_ringbuffer *rbuf, size_t idx, dvb_ringbuffer_pkt_read() argument
243 pktlen = rbuf->data[idx] << 8; dvb_ringbuffer_pkt_read()
244 pktlen |= rbuf->data[(idx + 1) % rbuf->size]; dvb_ringbuffer_pkt_read()
248 idx = (idx + DVB_RINGBUFFER_PKTHDRSIZE + offset) % rbuf->size; dvb_ringbuffer_pkt_read()
250 split = ((idx + len) > rbuf->size) ? rbuf->size - idx : 0; dvb_ringbuffer_pkt_read()
252 memcpy(buf, rbuf->data+idx, split); dvb_ringbuffer_pkt_read()
255 idx = 0; dvb_ringbuffer_pkt_read()
257 memcpy(buf, rbuf->data+idx, todo); dvb_ringbuffer_pkt_read()
261 void dvb_ringbuffer_pkt_dispose(struct dvb_ringbuffer *rbuf, size_t idx) dvb_ringbuffer_pkt_dispose() argument
265 rbuf->data[(idx + 2) % rbuf->size] = PKT_DISPOSED; dvb_ringbuffer_pkt_dispose()
280 ssize_t dvb_ringbuffer_pkt_next(struct dvb_ringbuffer *rbuf, size_t idx, size_t* pktlen) dvb_ringbuffer_pkt_next() argument
286 if (idx == -1) { dvb_ringbuffer_pkt_next()
287 idx = rbuf->pread; dvb_ringbuffer_pkt_next()
289 curpktlen = rbuf->data[idx] << 8; dvb_ringbuffer_pkt_next()
290 curpktlen |= rbuf->data[(idx + 1) % rbuf->size]; dvb_ringbuffer_pkt_next()
291 idx = (idx + curpktlen + DVB_RINGBUFFER_PKTHDRSIZE) % rbuf->size; dvb_ringbuffer_pkt_next()
294 consumed = (idx - rbuf->pread) % rbuf->size; dvb_ringbuffer_pkt_next()
298 curpktlen = rbuf->data[idx] << 8; dvb_ringbuffer_pkt_next()
299 curpktlen |= rbuf->data[(idx + 1) % rbuf->size]; dvb_ringbuffer_pkt_next()
300 curpktstatus = rbuf->data[(idx + 2) % rbuf->size]; dvb_ringbuffer_pkt_next()
304 return idx; dvb_ringbuffer_pkt_next()
308 idx = (idx + curpktlen + DVB_RINGBUFFER_PKTHDRSIZE) % rbuf->size; dvb_ringbuffer_pkt_next()
/linux-4.4.14/arch/powerpc/mm/
H A Dhighmem.c35 int idx, type; kmap_atomic_prot() local
43 idx = type + KM_TYPE_NR*smp_processor_id(); kmap_atomic_prot()
44 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic_prot()
46 BUG_ON(!pte_none(*(kmap_pte-idx))); kmap_atomic_prot()
48 __set_pte_at(&init_mm, vaddr, kmap_pte-idx, mk_pte(page, prot), 1); kmap_atomic_prot()
70 unsigned int idx; __kunmap_atomic() local
72 idx = type + KM_TYPE_NR * smp_processor_id(); __kunmap_atomic()
73 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); __kunmap_atomic()
79 pte_clear(&init_mm, vaddr, kmap_pte-idx); __kunmap_atomic()
/linux-4.4.14/arch/sh/kernel/cpu/sh2a/
H A Dclock-sh7201.c39 int idx = (__raw_readw(FREQCR) & 0x0007); module_clk_recalc() local
40 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
49 int idx = (__raw_readw(FREQCR) & 0x0007); bus_clk_recalc() local
50 return clk->parent->rate / pfc_divisors[idx]; bus_clk_recalc()
59 int idx = ((__raw_readw(FREQCR) >> 4) & 0x0007); cpu_clk_recalc() local
60 return clk->parent->rate / ifc_divisors[idx]; cpu_clk_recalc()
74 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
83 if (idx < ARRAY_SIZE(sh7201_clk_ops)) arch_init_clk_ops()
84 *ops = sh7201_clk_ops[idx]; arch_init_clk_ops()
H A Dclock-sh7203.c41 int idx = (__raw_readw(FREQCR) & 0x0007); module_clk_recalc() local
42 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
51 int idx = (__raw_readw(FREQCR) & 0x0007); bus_clk_recalc() local
52 return clk->parent->rate / pfc_divisors[idx-2]; bus_clk_recalc()
70 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
79 if (idx < ARRAY_SIZE(sh7203_clk_ops)) arch_init_clk_ops()
80 *ops = sh7203_clk_ops[idx]; arch_init_clk_ops()
H A Dclock-sh7206.c38 int idx = (__raw_readw(FREQCR) & 0x0007); module_clk_recalc() local
39 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
57 int idx = (__raw_readw(FREQCR) & 0x0007); cpu_clk_recalc() local
58 return clk->parent->rate / ifc_divisors[idx]; cpu_clk_recalc()
72 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
81 if (idx < ARRAY_SIZE(sh7206_clk_ops)) arch_init_clk_ops()
82 *ops = sh7206_clk_ops[idx]; arch_init_clk_ops()
/linux-4.4.14/arch/sh/kernel/cpu/sh4/
H A Dclock-sh4.c40 int idx = (__raw_readw(FRQCR) & 0x0007); module_clk_recalc() local
41 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
50 int idx = (__raw_readw(FRQCR) >> 3) & 0x0007; bus_clk_recalc() local
51 return clk->parent->rate / bfc_divisors[idx]; bus_clk_recalc()
60 int idx = (__raw_readw(FRQCR) >> 6) & 0x0007; cpu_clk_recalc() local
61 return clk->parent->rate / ifc_divisors[idx]; cpu_clk_recalc()
75 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
77 if (idx < ARRAY_SIZE(sh4_clk_ops)) arch_init_clk_ops()
78 *ops = sh4_clk_ops[idx]; arch_init_clk_ops()
H A Dperf_event.c205 static u64 sh7750_pmu_read(int idx) sh7750_pmu_read() argument
207 return (u64)((u64)(__raw_readl(PMCTRH(idx)) & 0xffff) << 32) | sh7750_pmu_read()
208 __raw_readl(PMCTRL(idx)); sh7750_pmu_read()
211 static void sh7750_pmu_disable(struct hw_perf_event *hwc, int idx) sh7750_pmu_disable() argument
215 tmp = __raw_readw(PMCR(idx)); sh7750_pmu_disable()
217 __raw_writew(tmp, PMCR(idx)); sh7750_pmu_disable()
220 static void sh7750_pmu_enable(struct hw_perf_event *hwc, int idx) sh7750_pmu_enable() argument
222 __raw_writew(__raw_readw(PMCR(idx)) | PMCR_PMCLR, PMCR(idx)); sh7750_pmu_enable()
223 __raw_writew(hwc->config | PMCR_PMEN | PMCR_PMST, PMCR(idx)); sh7750_pmu_enable()
/linux-4.4.14/arch/microblaze/mm/
H A Dhighmem.c38 int idx, type; kmap_atomic_prot() local
47 idx = type + KM_TYPE_NR*smp_processor_id(); kmap_atomic_prot()
48 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic_prot()
50 BUG_ON(!pte_none(*(kmap_pte-idx))); kmap_atomic_prot()
52 set_pte_at(&init_mm, vaddr, kmap_pte-idx, mk_pte(page, prot)); kmap_atomic_prot()
73 unsigned int idx; __kunmap_atomic() local
75 idx = type + KM_TYPE_NR * smp_processor_id(); __kunmap_atomic()
76 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); __kunmap_atomic()
82 pte_clear(&init_mm, vaddr, kmap_pte-idx); __kunmap_atomic()
/linux-4.4.14/drivers/irqchip/
H A Dirq-bcm7120-l2.c64 unsigned int idx; bcm7120_l2_intc_irq_handle() local
68 for (idx = 0; idx < b->n_words; idx++) { bcm7120_l2_intc_irq_handle()
69 int base = idx * IRQS_PER_WORD; bcm7120_l2_intc_irq_handle()
76 pending = irq_reg_readl(gc, b->stat_offset[idx]) & bcm7120_l2_intc_irq_handle()
78 data->irq_map_mask[idx]; bcm7120_l2_intc_irq_handle()
118 unsigned int idx; bcm7120_l2_intc_init_one() local
135 for (idx = 0; idx < data->n_words; idx++) { bcm7120_l2_intc_init_one()
137 l1_data->irq_map_mask[idx] |= bcm7120_l2_intc_init_one()
139 irq * data->n_words + idx); bcm7120_l2_intc_init_one()
141 l1_data->irq_map_mask[idx] = 0xffffffff; bcm7120_l2_intc_init_one()
143 valid_mask[idx] |= l1_data->irq_map_mask[idx]; bcm7120_l2_intc_init_one()
229 unsigned int idx, irq, flags; bcm7120_l2_intc_probe() local
254 for (idx = 0; idx < data->n_words; idx++) { bcm7120_l2_intc_probe()
255 __raw_writel(data->irq_fwd_mask[idx], bcm7120_l2_intc_probe()
256 data->pair_base[idx] + bcm7120_l2_intc_probe()
257 data->en_offset[idx]); bcm7120_l2_intc_probe()
290 for (idx = 0; idx < data->n_words; idx++) { bcm7120_l2_intc_probe()
291 irq = idx * IRQS_PER_WORD; bcm7120_l2_intc_probe()
294 gc->unused = 0xffffffff & ~valid_mask[idx]; bcm7120_l2_intc_probe()
298 gc->reg_base = data->pair_base[idx]; bcm7120_l2_intc_probe()
299 ct->regs.mask = data->en_offset[idx]; bcm7120_l2_intc_probe()
334 for (idx = 0; idx < MAX_MAPPINGS; idx++) { bcm7120_l2_intc_probe()
335 if (data->map_base[idx]) bcm7120_l2_intc_probe()
336 iounmap(data->map_base[idx]); bcm7120_l2_intc_probe()
H A Dirq-bcm7038-l1.c123 unsigned int idx; bcm7038_l1_irq_handle() local
133 for (idx = 0; idx < intc->n_words; idx++) { bcm7038_l1_irq_handle()
134 int base = idx * IRQS_PER_WORD; bcm7038_l1_irq_handle()
139 pending = l1_readl(cpu->map_base + reg_status(intc, idx)) & bcm7038_l1_irq_handle()
140 ~cpu->mask_cache[idx]; bcm7038_l1_irq_handle()
220 unsigned int idx, bcm7038_l1_init_one()
228 if (of_address_to_resource(dn, idx, &res)) bcm7038_l1_init_one()
240 cpu = intc->cpus[idx] = kzalloc(sizeof(*cpu) + n_words * sizeof(u32), bcm7038_l1_init_one()
254 parent_irq = irq_of_parse_and_map(dn, idx); bcm7038_l1_init_one()
289 int idx, ret; bcm7038_l1_of_init() local
296 for_each_possible_cpu(idx) { for_each_possible_cpu()
297 ret = bcm7038_l1_init_one(dn, idx, intc); for_each_possible_cpu()
299 if (idx) for_each_possible_cpu()
320 for_each_possible_cpu(idx) { for_each_possible_cpu()
321 struct bcm7038_l1_cpu *cpu = intc->cpus[idx]; for_each_possible_cpu()
219 bcm7038_l1_init_one(struct device_node *dn, unsigned int idx, struct bcm7038_l1_chip *intc) bcm7038_l1_init_one() argument
/linux-4.4.14/arch/arm/mm/
H A Dhighmem.c21 static inline void set_fixmap_pte(int idx, pte_t pte) set_fixmap_pte() argument
23 unsigned long vaddr = __fix_to_virt(idx); set_fixmap_pte()
57 unsigned int idx; kmap_atomic() local
82 idx = FIX_KMAP_BEGIN + type + KM_TYPE_NR * smp_processor_id(); kmap_atomic()
83 vaddr = __fix_to_virt(idx); kmap_atomic()
96 set_fixmap_pte(idx, mk_pte(page, kmap_prot)); kmap_atomic()
105 int idx, type; __kunmap_atomic() local
109 idx = FIX_KMAP_BEGIN + type + KM_TYPE_NR * smp_processor_id(); __kunmap_atomic()
114 BUG_ON(vaddr != __fix_to_virt(idx)); __kunmap_atomic()
115 set_fixmap_pte(idx, __pte(0)); __kunmap_atomic()
117 (void) idx; /* to kill a warning */ __kunmap_atomic()
132 int idx, type; kmap_atomic_pfn() local
141 idx = FIX_KMAP_BEGIN + type + KM_TYPE_NR * smp_processor_id(); kmap_atomic_pfn()
142 vaddr = __fix_to_virt(idx); kmap_atomic_pfn()
146 set_fixmap_pte(idx, pfn_pte(pfn, kmap_prot)); kmap_atomic_pfn()
/linux-4.4.14/arch/x86/um/
H A Dtls_32.c66 int idx; get_free_idx() local
71 for (idx = 0; idx < GDT_ENTRY_TLS_ENTRIES; idx++) get_free_idx()
72 if (!t->arch.tls_array[idx].present) get_free_idx()
73 return idx + GDT_ENTRY_TLS_MIN; get_free_idx()
95 int idx; load_TLS() local
97 for (idx = GDT_ENTRY_TLS_MIN; idx < GDT_ENTRY_TLS_MAX; idx++) { load_TLS()
99 &to->thread.arch.tls_array[idx - GDT_ENTRY_TLS_MIN]; load_TLS()
108 curr->tls.entry_number = idx; load_TLS()
204 int idx, int flushed) set_tls_entry()
208 if (idx < GDT_ENTRY_TLS_MIN || idx > GDT_ENTRY_TLS_MAX) set_tls_entry()
211 t->arch.tls_array[idx - GDT_ENTRY_TLS_MIN].tls = *info; set_tls_entry()
212 t->arch.tls_array[idx - GDT_ENTRY_TLS_MIN].present = 1; set_tls_entry()
213 t->arch.tls_array[idx - GDT_ENTRY_TLS_MIN].flushed = flushed; set_tls_entry()
221 int idx, ret = -EFAULT; arch_copy_tls() local
232 idx = info.entry_number; arch_copy_tls()
234 ret = set_tls_entry(new, &info, idx, 0); arch_copy_tls()
241 int idx) get_tls_entry()
248 if (idx < GDT_ENTRY_TLS_MIN || idx > GDT_ENTRY_TLS_MAX) get_tls_entry()
251 if (!t->arch.tls_array[idx - GDT_ENTRY_TLS_MIN].present) get_tls_entry()
254 *info = t->arch.tls_array[idx - GDT_ENTRY_TLS_MIN].tls; get_tls_entry()
262 !t->arch.tls_array[idx - GDT_ENTRY_TLS_MIN].flushed)) { get_tls_entry()
275 info->entry_number = idx; get_tls_entry()
282 int idx, ret; SYSCALL_DEFINE1() local
290 idx = info.entry_number; SYSCALL_DEFINE1()
292 if (idx == -1) { SYSCALL_DEFINE1()
293 idx = get_free_idx(current); SYSCALL_DEFINE1()
294 if (idx < 0) SYSCALL_DEFINE1()
295 return idx; SYSCALL_DEFINE1()
296 info.entry_number = idx; SYSCALL_DEFINE1()
298 if (put_user(idx, &user_desc->entry_number)) SYSCALL_DEFINE1()
305 return set_tls_entry(current, &info, idx, 1); SYSCALL_DEFINE1()
313 int ptrace_set_thread_area(struct task_struct *child, int idx, ptrace_set_thread_area() argument
324 return set_tls_entry(child, &info, idx, 0); ptrace_set_thread_area()
330 int idx, ret; SYSCALL_DEFINE1() local
335 if (get_user(idx, &user_desc->entry_number)) SYSCALL_DEFINE1()
338 ret = get_tls_entry(current, &info, idx); SYSCALL_DEFINE1()
352 int ptrace_get_thread_area(struct task_struct *child, int idx, ptrace_get_thread_area() argument
361 ret = get_tls_entry(child, &info, idx); ptrace_get_thread_area()
203 set_tls_entry(struct task_struct* task, struct user_desc *info, int idx, int flushed) set_tls_entry() argument
240 get_tls_entry(struct task_struct *task, struct user_desc *info, int idx) get_tls_entry() argument
/linux-4.4.14/drivers/staging/lustre/lustre/obdclass/
H A Dlprocfs_counters.c44 void lprocfs_counter_add(struct lprocfs_stats *stats, int idx, long amount) lprocfs_counter_add() argument
54 LASSERTF(0 <= idx && idx < stats->ls_num, lprocfs_counter_add()
55 "idx %d, ls_num %hu\n", idx, stats->ls_num); lprocfs_counter_add()
63 header = &stats->ls_cnt_header[idx]; lprocfs_counter_add()
64 percpu_cntr = lprocfs_stats_counter_get(stats, smp_id, idx); lprocfs_counter_add()
92 void lprocfs_counter_sub(struct lprocfs_stats *stats, int idx, long amount) lprocfs_counter_sub() argument
102 LASSERTF(0 <= idx && idx < stats->ls_num, lprocfs_counter_sub()
103 "idx %d, ls_num %hu\n", idx, stats->ls_num); lprocfs_counter_sub()
111 header = &stats->ls_cnt_header[idx]; lprocfs_counter_sub()
112 percpu_cntr = lprocfs_stats_counter_get(stats, smp_id, idx); lprocfs_counter_sub()
/linux-4.4.14/sound/isa/gus/
H A Dgus_mem_proc.c61 int idx; snd_gf1_mem_proc_init() local
66 for (idx = 0; idx < 4; idx++) { snd_gf1_mem_proc_init()
67 if (gus->gf1.mem_alloc.banks_8[idx].size > 0) { snd_gf1_mem_proc_init()
72 sprintf(name, "gus-ram-%i", idx); snd_gf1_mem_proc_init()
78 priv->address = gus->gf1.mem_alloc.banks_8[idx].address; snd_gf1_mem_proc_init()
79 priv->size = entry->size = gus->gf1.mem_alloc.banks_8[idx].size; snd_gf1_mem_proc_init()
83 for (idx = 0; idx < 4; idx++) { snd_gf1_mem_proc_init()
84 if (gus->gf1.rom_present & (1 << idx)) { snd_gf1_mem_proc_init()
90 sprintf(name, "gus-rom-%i", idx); snd_gf1_mem_proc_init()
96 priv->address = idx * 4096 * 1024; snd_gf1_mem_proc_init()
H A Dgus_mixer.c158 unsigned int idx, max; snd_gf1_new_mixer() local
179 for (idx = 0; idx < max; idx++) { snd_gf1_new_mixer()
180 if ((err = snd_ctl_add(card, snd_ctl_new1(&snd_gf1_controls[idx], gus))) < 0) snd_gf1_new_mixer()
184 for (idx = 0; idx < ARRAY_SIZE(snd_ics_controls); idx++) { snd_gf1_new_mixer()
185 if ((err = snd_ctl_add(card, snd_ctl_new1(&snd_ics_controls[idx], gus))) < 0) snd_gf1_new_mixer()
/linux-4.4.14/arch/x86/kernel/cpu/
H A Dperf_event_intel_uncore_nhmex.c243 if (hwc->idx >= UNCORE_PMC_IDX_FIXED) nhmex_uncore_msr_enable_event()
364 reg1->idx = 0; nhmex_bbox_hw_config()
376 if (reg1->idx != EXTRA_REG_NONE) { nhmex_bbox_msr_enable_event()
449 reg1->idx = 0; nhmex_sbox_hw_config()
461 if (reg1->idx != EXTRA_REG_NONE) { nhmex_sbox_msr_enable_event()
546 static bool nhmex_mbox_get_shared_reg(struct intel_uncore_box *box, int idx, u64 config) nhmex_mbox_get_shared_reg() argument
553 if (idx < EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) { nhmex_mbox_get_shared_reg()
554 er = &box->shared_regs[idx]; nhmex_mbox_get_shared_reg()
570 idx -= EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; nhmex_mbox_get_shared_reg()
571 if (WARN_ON_ONCE(idx >= 4)) nhmex_mbox_get_shared_reg()
583 if (__BITS_VALUE(atomic_read(&er->ref), idx, 8)) { nhmex_mbox_get_shared_reg()
585 mask |= NHMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); nhmex_mbox_get_shared_reg()
587 mask |= WSMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); nhmex_mbox_get_shared_reg()
591 atomic_add(1 << (idx * 8), &er->ref); nhmex_mbox_get_shared_reg()
594 NHMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); nhmex_mbox_get_shared_reg()
597 WSMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); nhmex_mbox_get_shared_reg()
607 static void nhmex_mbox_put_shared_reg(struct intel_uncore_box *box, int idx) nhmex_mbox_put_shared_reg() argument
611 if (idx < EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) { nhmex_mbox_put_shared_reg()
612 er = &box->shared_regs[idx]; nhmex_mbox_put_shared_reg()
617 idx -= EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; nhmex_mbox_put_shared_reg()
619 atomic_sub(1 << (idx * 8), &er->ref); nhmex_mbox_put_shared_reg()
626 u64 idx, orig_idx = __BITS_VALUE(reg1->idx, 0, 8); nhmex_mbox_alter_er() local
630 idx = orig_idx - EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; nhmex_mbox_alter_er()
632 config &= NHMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); nhmex_mbox_alter_er()
634 config &= WSMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); nhmex_mbox_alter_er()
636 idx = new_idx - orig_idx; nhmex_mbox_alter_er()
637 config <<= 3 * idx; nhmex_mbox_alter_er() local
639 idx = orig_idx - new_idx; nhmex_mbox_alter_er()
640 config >>= 3 * idx; nhmex_mbox_alter_er() local
652 hwc->config += idx << NHMEX_M_PMON_CTL_INC_SEL_SHIFT; nhmex_mbox_alter_er()
654 hwc->config -= idx << NHMEX_M_PMON_CTL_INC_SEL_SHIFT; nhmex_mbox_alter_er()
656 reg1->idx = ~0xff | new_idx; nhmex_mbox_alter_er()
666 int i, idx[2], alloc = 0; nhmex_mbox_get_constraint() local
669 idx[0] = __BITS_VALUE(reg1->idx, 0, 8); nhmex_mbox_get_constraint()
670 idx[1] = __BITS_VALUE(reg1->idx, 1, 8); nhmex_mbox_get_constraint()
674 idx[i] = 0xff; nhmex_mbox_get_constraint()
676 if (idx[i] == 0xff) nhmex_mbox_get_constraint()
679 if (!nhmex_mbox_get_shared_reg(box, idx[i], nhmex_mbox_get_constraint()
686 if (reg2->idx != EXTRA_REG_NONE && nhmex_mbox_get_constraint()
688 !nhmex_mbox_get_shared_reg(box, reg2->idx, reg2->config)) nhmex_mbox_get_constraint()
698 if (idx[0] != 0xff && idx[0] != __BITS_VALUE(reg1->idx, 0, 8)) nhmex_mbox_get_constraint()
699 nhmex_mbox_alter_er(event, idx[0], true); nhmex_mbox_get_constraint()
701 if (reg2->idx != EXTRA_REG_NONE) nhmex_mbox_get_constraint()
706 if (idx[0] != 0xff && !(alloc & 0x1) && nhmex_mbox_get_constraint()
707 idx[0] >= EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) { nhmex_mbox_get_constraint()
714 BUG_ON(__BITS_VALUE(reg1->idx, 1, 8) != 0xff); nhmex_mbox_get_constraint()
715 idx[0] -= EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; nhmex_mbox_get_constraint()
716 idx[0] = (idx[0] + 1) % 4; nhmex_mbox_get_constraint()
717 idx[0] += EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; nhmex_mbox_get_constraint()
718 if (idx[0] != __BITS_VALUE(reg1->idx, 0, 8)) { nhmex_mbox_get_constraint()
719 config1 = nhmex_mbox_alter_er(event, idx[0], false); nhmex_mbox_get_constraint()
725 nhmex_mbox_put_shared_reg(box, idx[0]); nhmex_mbox_get_constraint()
727 nhmex_mbox_put_shared_reg(box, idx[1]); nhmex_mbox_get_constraint()
740 nhmex_mbox_put_shared_reg(box, __BITS_VALUE(reg1->idx, 0, 8)); nhmex_mbox_put_constraint()
742 nhmex_mbox_put_shared_reg(box, __BITS_VALUE(reg1->idx, 1, 8)); nhmex_mbox_put_constraint()
746 nhmex_mbox_put_shared_reg(box, reg2->idx); nhmex_mbox_put_constraint()
753 if (er->idx < EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) nhmex_mbox_extra_reg_idx()
754 return er->idx; nhmex_mbox_extra_reg_idx()
755 return er->idx + (er->event >> NHMEX_M_PMON_CTL_INC_SEL_SHIFT) - 0xd; nhmex_mbox_extra_reg_idx()
778 if (WARN_ON_ONCE(msr >= 0xffff || er->idx >= 0xff)) nhmex_mbox_hw_config()
782 if (er->idx == EXTRA_REG_NHMEX_M_PLD) nhmex_mbox_hw_config()
787 reg1->idx &= ~(0xff << (reg_idx * 8)); nhmex_mbox_hw_config()
789 reg1->idx |= nhmex_mbox_extra_reg_idx(er) << (reg_idx * 8); nhmex_mbox_hw_config()
799 reg2->idx = EXTRA_REG_NHMEX_M_FILTER; nhmex_mbox_hw_config()
812 static u64 nhmex_mbox_shared_reg_config(struct intel_uncore_box *box, int idx) nhmex_mbox_shared_reg_config() argument
818 if (idx < EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) nhmex_mbox_shared_reg_config()
819 return box->shared_regs[idx].config; nhmex_mbox_shared_reg_config()
833 int idx; nhmex_mbox_msr_enable_event() local
835 idx = __BITS_VALUE(reg1->idx, 0, 8); nhmex_mbox_msr_enable_event()
836 if (idx != 0xff) nhmex_mbox_msr_enable_event()
838 nhmex_mbox_shared_reg_config(box, idx)); nhmex_mbox_msr_enable_event()
839 idx = __BITS_VALUE(reg1->idx, 1, 8); nhmex_mbox_msr_enable_event()
840 if (idx != 0xff) nhmex_mbox_msr_enable_event()
842 nhmex_mbox_shared_reg_config(box, idx)); nhmex_mbox_msr_enable_event()
844 if (reg2->idx != EXTRA_REG_NONE) { nhmex_mbox_msr_enable_event()
943 if (reg1->idx % 2) { nhmex_rbox_alter_er()
944 reg1->idx--; nhmex_rbox_alter_er()
947 reg1->idx++; nhmex_rbox_alter_er()
952 switch (reg1->idx % 6) { nhmex_rbox_alter_er()
978 int idx, er_idx; nhmex_rbox_get_constraint() local
985 idx = reg1->idx % 6; nhmex_rbox_get_constraint()
988 er_idx = idx; nhmex_rbox_get_constraint()
992 er_idx += (reg1->idx / 6) * 5; nhmex_rbox_get_constraint()
996 if (idx < 2) { nhmex_rbox_get_constraint()
1002 } else if (idx == 2 || idx == 3) { nhmex_rbox_get_constraint()
1007 u64 mask = 0xff << ((idx - 2) * 8); nhmex_rbox_get_constraint()
1008 if (!__BITS_VALUE(atomic_read(&er->ref), idx - 2, 8) || nhmex_rbox_get_constraint()
1010 atomic_add(1 << ((idx - 2) * 8), &er->ref); nhmex_rbox_get_constraint()
1036 idx ^= 1; nhmex_rbox_get_constraint()
1037 if (idx != reg1->idx % 6) { nhmex_rbox_get_constraint()
1038 if (idx == 2) nhmex_rbox_get_constraint()
1040 else if (idx == 3) nhmex_rbox_get_constraint()
1046 if (idx != reg1->idx % 6) nhmex_rbox_get_constraint()
1059 int idx, er_idx; nhmex_rbox_put_constraint() local
1064 idx = reg1->idx % 6; nhmex_rbox_put_constraint()
1065 er_idx = idx; nhmex_rbox_put_constraint()
1068 er_idx += (reg1->idx / 6) * 5; nhmex_rbox_put_constraint()
1071 if (idx == 2 || idx == 3) nhmex_rbox_put_constraint()
1072 atomic_sub(1 << ((idx - 2) * 8), &er->ref); nhmex_rbox_put_constraint()
1084 int idx; nhmex_rbox_hw_config() local
1086 idx = (event->hw.config & NHMEX_R_PMON_CTL_EV_SEL_MASK) >> nhmex_rbox_hw_config()
1088 if (idx >= 0x18) nhmex_rbox_hw_config()
1091 reg1->idx = idx; nhmex_rbox_hw_config()
1094 switch (idx % 6) { nhmex_rbox_hw_config()
1109 int idx, port; nhmex_rbox_msr_enable_event() local
1111 idx = reg1->idx; nhmex_rbox_msr_enable_event()
1112 port = idx / 6 + box->pmu->pmu_idx * 4; nhmex_rbox_msr_enable_event()
1114 switch (idx % 6) { nhmex_rbox_msr_enable_event()
1124 uncore_shared_reg_config(box, 2 + (idx / 6) * 5)); nhmex_rbox_msr_enable_event()
H A Dperf_event_intel_uncore.h16 #define UNCORE_PCI_DEV_DATA(type, idx) ((type << 8) | idx)
165 unsigned uncore_pci_event_ctl(struct intel_uncore_box *box, int idx) uncore_pci_event_ctl() argument
167 return idx * 4 + box->pmu->type->event_ctl; uncore_pci_event_ctl()
171 unsigned uncore_pci_perf_ctr(struct intel_uncore_box *box, int idx) uncore_pci_perf_ctr() argument
173 return idx * 8 + box->pmu->type->perf_ctr; uncore_pci_perf_ctr()
204 unsigned uncore_msr_event_ctl(struct intel_uncore_box *box, int idx) uncore_msr_event_ctl() argument
207 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + uncore_msr_event_ctl()
212 unsigned uncore_msr_perf_ctr(struct intel_uncore_box *box, int idx) uncore_msr_perf_ctr() argument
215 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + uncore_msr_perf_ctr()
238 unsigned uncore_event_ctl(struct intel_uncore_box *box, int idx) uncore_event_ctl() argument
241 return uncore_pci_event_ctl(box, idx); uncore_event_ctl()
243 return uncore_msr_event_ctl(box, idx); uncore_event_ctl()
247 unsigned uncore_perf_ctr(struct intel_uncore_box *box, int idx) uncore_perf_ctr() argument
250 return uncore_pci_perf_ctr(box, idx); uncore_perf_ctr()
252 return uncore_msr_perf_ctr(box, idx); uncore_perf_ctr()
324 u64 uncore_shared_reg_config(struct intel_uncore_box *box, int idx);
H A Dperf_event_intel_cstate.c112 bool (*test)(int idx);
133 bool test_core(int idx) test_core() argument
147 if (idx == PERF_CSTATE_CORE_C3_RES || test_core()
148 idx == PERF_CSTATE_CORE_C6_RES) test_core()
169 if (idx == PERF_CSTATE_CORE_C3_RES || test_core()
170 idx == PERF_CSTATE_CORE_C6_RES || test_core()
171 idx == PERF_CSTATE_CORE_C7_RES) test_core()
177 if (idx == PERF_CSTATE_CORE_C1_RES || test_core()
178 idx == PERF_CSTATE_CORE_C6_RES) test_core()
260 bool test_pkg(int idx) test_pkg() argument
274 if (idx == PERF_CSTATE_CORE_C3_RES || test_pkg()
275 idx == PERF_CSTATE_CORE_C6_RES || test_pkg()
276 idx == PERF_CSTATE_CORE_C7_RES) test_pkg()
296 if (idx == PERF_CSTATE_PKG_C2_RES || test_pkg()
297 idx == PERF_CSTATE_PKG_C3_RES || test_pkg()
298 idx == PERF_CSTATE_PKG_C6_RES || test_pkg()
299 idx == PERF_CSTATE_PKG_C7_RES) test_pkg()
305 if (idx == PERF_CSTATE_CORE_C6_RES) test_pkg()
309 if (idx == PERF_CSTATE_PKG_C2_RES || test_pkg()
310 idx == PERF_CSTATE_PKG_C3_RES || test_pkg()
311 idx == PERF_CSTATE_PKG_C6_RES || test_pkg()
312 idx == PERF_CSTATE_PKG_C7_RES || test_pkg()
313 idx == PERF_CSTATE_PKG_C8_RES || test_pkg()
314 idx == PERF_CSTATE_PKG_C9_RES || test_pkg()
315 idx == PERF_CSTATE_PKG_C10_RES) test_pkg()
420 event->hw.idx = -1; cstate_pmu_event_init()
/linux-4.4.14/kernel/sched/
H A Dcpudeadline.c41 swap(cp->elements[cpu_a].idx, cp->elements[cpu_b].idx); cpudl_exchange()
44 static void cpudl_heapify(struct cpudl *cp, int idx) cpudl_heapify() argument
50 l = left_child(idx); cpudl_heapify()
51 r = right_child(idx); cpudl_heapify()
52 largest = idx; cpudl_heapify()
54 if ((l < cp->size) && dl_time_before(cp->elements[idx].dl, cpudl_heapify()
60 if (largest == idx) cpudl_heapify()
63 /* Push idx down the heap one level and bump one up */ cpudl_heapify()
64 cpudl_exchange(cp, largest, idx); cpudl_heapify()
65 idx = largest; cpudl_heapify()
69 static void cpudl_change_key(struct cpudl *cp, int idx, u64 new_dl) cpudl_change_key() argument
71 WARN_ON(idx == IDX_INVALID || !cpu_present(idx)); cpudl_change_key()
73 if (dl_time_before(new_dl, cp->elements[idx].dl)) { cpudl_change_key()
74 cp->elements[idx].dl = new_dl; cpudl_change_key()
75 cpudl_heapify(cp, idx); cpudl_change_key()
77 cp->elements[idx].dl = new_dl; cpudl_change_key()
78 while (idx > 0 && dl_time_before(cp->elements[parent(idx)].dl, cpudl_change_key()
79 cp->elements[idx].dl)) { cpudl_change_key()
80 cpudl_exchange(cp, idx, parent(idx)); cpudl_change_key()
81 idx = parent(idx); cpudl_change_key()
140 old_idx = cp->elements[cpu].idx; cpudl_set()
155 cp->elements[new_cpu].idx = old_idx; cpudl_set()
156 cp->elements[cpu].idx = IDX_INVALID; cpudl_set()
173 cp->elements[cpu].idx = cp->size - 1; cpudl_set()
228 cp->elements[i].idx = IDX_INVALID; cpudl_init()
/linux-4.4.14/arch/blackfin/kernel/cplb-mpu/
H A Dcplbmgr.c103 int idx; dcplb_miss() local
127 int idx = page >> 5; dcplb_miss() local
130 if (mask[idx] & bit) dcplb_miss()
149 int idx = page >> 5; dcplb_miss() local
152 if (mask[idx] & bit) dcplb_miss()
156 if (mask[idx] & bit) dcplb_miss()
160 idx = evict_one_dcplb(cpu); dcplb_miss()
163 dcplb_tbl[cpu][idx].addr = addr; dcplb_miss()
164 dcplb_tbl[cpu][idx].data = d_data; dcplb_miss()
167 bfin_write32(DCPLB_DATA0 + idx * 4, d_data); dcplb_miss()
168 bfin_write32(DCPLB_ADDR0 + idx * 4, addr); dcplb_miss()
178 int idx; icplb_miss() local
195 for (idx = first_switched_icplb; idx < MAX_CPLBS; idx++) { icplb_miss()
196 if (icplb_tbl[cpu][idx].data & CPLB_VALID) { icplb_miss()
197 unsigned long this_addr = icplb_tbl[cpu][idx].addr; icplb_miss()
227 int idx = page >> 5; icplb_miss() local
231 if (mask[idx] & bit) icplb_miss()
258 int idx = page >> 5; icplb_miss() local
262 if (mask[idx] & bit) icplb_miss()
267 idx = evict_one_icplb(cpu); icplb_miss()
269 icplb_tbl[cpu][idx].addr = addr; icplb_miss()
270 icplb_tbl[cpu][idx].data = i_data; icplb_miss()
273 bfin_write32(ICPLB_DATA0 + idx * 4, i_data); icplb_miss()
274 bfin_write32(ICPLB_ADDR0 + idx * 4, addr); icplb_miss()
287 int idx = faulting_cplb_index(status); dcplb_protection_fault() local
288 unsigned long data = dcplb_tbl[cpu][idx].data; dcplb_protection_fault()
292 dcplb_tbl[cpu][idx].data = data; dcplb_protection_fault()
293 bfin_write32(DCPLB_DATA0 + idx * 4, data); dcplb_protection_fault()
/linux-4.4.14/mm/
H A Dhugetlb_cgroup.c61 int idx; hugetlb_cgroup_have_usage() local
63 for (idx = 0; idx < hugetlb_max_hstate; idx++) { hugetlb_cgroup_have_usage()
64 if (page_counter_read(&h_cg->hugepage[idx])) hugetlb_cgroup_have_usage()
75 int idx; hugetlb_cgroup_css_alloc() local
82 for (idx = 0; idx < HUGE_MAX_HSTATE; idx++) hugetlb_cgroup_css_alloc()
83 page_counter_init(&h_cgroup->hugepage[idx], hugetlb_cgroup_css_alloc()
84 &parent_h_cgroup->hugepage[idx]); hugetlb_cgroup_css_alloc()
87 for (idx = 0; idx < HUGE_MAX_HSTATE; idx++) hugetlb_cgroup_css_alloc()
88 page_counter_init(&h_cgroup->hugepage[idx], NULL); hugetlb_cgroup_css_alloc()
109 static void hugetlb_cgroup_move_parent(int idx, struct hugetlb_cgroup *h_cg, hugetlb_cgroup_move_parent() argument
130 page_counter_charge(&parent->hugepage[idx], nr_pages); hugetlb_cgroup_move_parent()
132 counter = &h_cg->hugepage[idx]; hugetlb_cgroup_move_parent()
150 int idx = 0; hugetlb_cgroup_css_offline() local
156 hugetlb_cgroup_move_parent(idx, h_cg, page); for_each_hstate()
159 idx++; for_each_hstate()
165 int hugetlb_cgroup_charge_cgroup(int idx, unsigned long nr_pages, hugetlb_cgroup_charge_cgroup() argument
178 if (huge_page_order(&hstates[idx]) < HUGETLB_CGROUP_MIN_ORDER) hugetlb_cgroup_charge_cgroup()
189 if (!page_counter_try_charge(&h_cg->hugepage[idx], nr_pages, &counter)) hugetlb_cgroup_charge_cgroup()
198 void hugetlb_cgroup_commit_charge(int idx, unsigned long nr_pages, hugetlb_cgroup_commit_charge() argument
212 void hugetlb_cgroup_uncharge_page(int idx, unsigned long nr_pages, hugetlb_cgroup_uncharge_page() argument
224 page_counter_uncharge(&h_cg->hugepage[idx], nr_pages); hugetlb_cgroup_uncharge_page()
228 void hugetlb_cgroup_uncharge_cgroup(int idx, unsigned long nr_pages, hugetlb_cgroup_uncharge_cgroup() argument
234 if (huge_page_order(&hstates[idx]) < HUGETLB_CGROUP_MIN_ORDER) hugetlb_cgroup_uncharge_cgroup()
237 page_counter_uncharge(&h_cg->hugepage[idx], nr_pages); hugetlb_cgroup_uncharge_cgroup()
275 int ret, idx; hugetlb_cgroup_write() local
287 idx = MEMFILE_IDX(of_cft(of)->private); hugetlb_cgroup_write()
292 ret = page_counter_limit(&h_cg->hugepage[idx], nr_pages); hugetlb_cgroup_write()
336 static void __init __hugetlb_cgroup_file_init(int idx) __hugetlb_cgroup_file_init() argument
340 struct hstate *h = &hstates[idx]; __hugetlb_cgroup_file_init()
348 cft->private = MEMFILE_PRIVATE(idx, RES_LIMIT); __hugetlb_cgroup_file_init()
355 cft->private = MEMFILE_PRIVATE(idx, RES_USAGE); __hugetlb_cgroup_file_init()
361 cft->private = MEMFILE_PRIVATE(idx, RES_MAX_USAGE); __hugetlb_cgroup_file_init()
368 cft->private = MEMFILE_PRIVATE(idx, RES_FAILCNT); __hugetlb_cgroup_file_init()
H A Dearly_ioremap.c49 static inline void __init __late_set_fixmap(enum fixed_addresses idx, __late_set_fixmap() argument
57 static inline void __init __late_clear_fixmap(enum fixed_addresses idx) __late_clear_fixmap() argument
103 enum fixed_addresses idx; __early_ioremap() local
143 idx = FIX_BTMAP_BEGIN - NR_FIX_BTMAPS*slot; __early_ioremap()
146 __late_set_fixmap(idx, phys_addr, prot); __early_ioremap()
148 __early_set_fixmap(idx, phys_addr, prot); __early_ioremap()
150 --idx; __early_ioremap()
165 enum fixed_addresses idx; early_iounmap() local
195 idx = FIX_BTMAP_BEGIN - NR_FIX_BTMAPS*slot; early_iounmap()
198 __late_clear_fixmap(idx); early_iounmap()
200 __early_set_fixmap(idx, 0, FIXMAP_PAGE_CLEAR); early_iounmap()
201 --idx; early_iounmap()
H A Dswap_cgroup.c42 unsigned long idx, max; swap_cgroup_prepare() local
46 for (idx = 0; idx < ctrl->length; idx++) { swap_cgroup_prepare()
50 ctrl->map[idx] = page; swap_cgroup_prepare()
54 max = idx; swap_cgroup_prepare()
55 for (idx = 0; idx < max; idx++) swap_cgroup_prepare()
56 __free_page(ctrl->map[idx]); swap_cgroup_prepare()
/linux-4.4.14/arch/arm/mach-omap2/
H A Dcm2xxx_3xxx.h53 static inline u32 omap2_cm_read_mod_reg(s16 module, u16 idx) omap2_cm_read_mod_reg() argument
55 return readl_relaxed(cm_base + module + idx); omap2_cm_read_mod_reg()
58 static inline void omap2_cm_write_mod_reg(u32 val, s16 module, u16 idx) omap2_cm_write_mod_reg() argument
60 writel_relaxed(val, cm_base + module + idx); omap2_cm_write_mod_reg()
65 s16 idx) omap2_cm_rmw_mod_reg_bits()
69 v = omap2_cm_read_mod_reg(module, idx); omap2_cm_rmw_mod_reg_bits()
72 omap2_cm_write_mod_reg(v, module, idx); omap2_cm_rmw_mod_reg_bits()
78 static inline u32 omap2_cm_read_mod_bits_shift(s16 domain, s16 idx, u32 mask) omap2_cm_read_mod_bits_shift() argument
82 v = omap2_cm_read_mod_reg(domain, idx); omap2_cm_read_mod_bits_shift()
89 static inline u32 omap2_cm_set_mod_reg_bits(u32 bits, s16 module, s16 idx) omap2_cm_set_mod_reg_bits() argument
91 return omap2_cm_rmw_mod_reg_bits(bits, bits, module, idx); omap2_cm_set_mod_reg_bits()
94 static inline u32 omap2_cm_clear_mod_reg_bits(u32 bits, s16 module, s16 idx) omap2_cm_clear_mod_reg_bits() argument
96 return omap2_cm_rmw_mod_reg_bits(bits, 0x0, module, idx); omap2_cm_clear_mod_reg_bits()
64 omap2_cm_rmw_mod_reg_bits(u32 mask, u32 bits, s16 module, s16 idx) omap2_cm_rmw_mod_reg_bits() argument
H A Dprcm_mpu_44xx_54xx.h29 extern u32 omap4_prcm_mpu_read_inst_reg(s16 inst, u16 idx);
30 extern void omap4_prcm_mpu_write_inst_reg(u32 val, s16 inst, u16 idx);
32 s16 idx);
H A Dprminst44xx.h23 extern u32 omap4_prminst_read_inst_reg(u8 part, s16 inst, u16 idx);
24 extern void omap4_prminst_write_inst_reg(u32 val, u8 part, s16 inst, u16 idx);
26 s16 inst, u16 idx);
/linux-4.4.14/net/ipv6/netfilter/
H A Dip6t_NPT.c46 unsigned int i, idx; ip6t_npt_map_pfx() local
57 idx = i / 32; ip6t_npt_map_pfx()
58 addr->s6_addr32[idx] &= mask; ip6t_npt_map_pfx()
59 addr->s6_addr32[idx] |= ~mask & npt->dst_pfx.in6.s6_addr32[idx]; ip6t_npt_map_pfx()
63 idx = 3; ip6t_npt_map_pfx()
65 for (idx = 4; idx < ARRAY_SIZE(addr->s6_addr16); idx++) { ip6t_npt_map_pfx()
66 if ((__force __sum16)addr->s6_addr16[idx] != ip6t_npt_map_pfx()
70 if (idx == ARRAY_SIZE(addr->s6_addr16)) ip6t_npt_map_pfx()
74 sum = ~csum_fold(csum_add(csum_unfold((__force __sum16)addr->s6_addr16[idx]), ip6t_npt_map_pfx()
78 *(__force __sum16 *)&addr->s6_addr16[idx] = sum; ip6t_npt_map_pfx()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/clk/
H A Dgk104.c145 read_clk(struct gk104_clk *clk, int idx) read_clk() argument
148 u32 sctl = nvkm_rd32(device, 0x137250 + (idx * 4)); read_clk()
151 if (idx < 7) { read_clk()
153 if (ssel & (1 << idx)) { read_clk()
154 sclk = read_pll(clk, 0x137000 + (idx * 0x20)); read_clk()
157 sclk = read_div(clk, idx, 0x137160, 0x1371d0); read_clk()
161 u32 ssrc = nvkm_rd32(device, 0x137160 + (idx * 0x04)); read_clk()
163 sclk = read_div(clk, idx, 0x137160, 0x1371d0); read_clk()
172 sclk = read_div(clk, idx, 0x137160, 0x1371d0); read_clk()
223 calc_div(struct gk104_clk *clk, int idx, u32 ref, u32 freq, u32 *ddiv) calc_div() argument
234 calc_src(struct gk104_clk *clk, int idx, u32 freq, u32 *dsrc, u32 *ddiv) calc_src() argument
256 sclk = read_vco(clk, 0x137160 + (idx * 4)); calc_src()
257 if (idx < 7) calc_src()
258 sclk = calc_div(clk, idx, sclk, freq, ddiv); calc_src()
263 calc_pll(struct gk104_clk *clk, int idx, u32 freq, u32 *coef) calc_pll() argument
270 ret = nvbios_pll_parse(bios, 0x137000 + (idx * 0x20), &limits); calc_pll()
274 limits.refclk = read_div(clk, idx, 0x137120, 0x137140); calc_pll()
288 struct nvkm_cstate *cstate, int idx, int dom) calc_clk()
290 struct gk104_clk_info *info = &clk->eng[idx]; calc_clk()
300 clk0 = calc_src(clk, idx, freq, &src0, &div0); calc_clk()
301 clk0 = calc_div(clk, idx, clk0, freq, &div1D); calc_clk()
304 if (clk0 != freq && (0x0000ff87 & (1 << idx))) { calc_clk()
305 if (idx <= 7) calc_clk()
306 clk1 = calc_pll(clk, idx, freq, &info->coef); calc_clk()
309 clk1 = calc_div(clk, idx, clk1, freq, &div1P); calc_clk()
330 info->ssel = (1 << idx); calc_clk()
357 gk104_clk_prog_0(struct gk104_clk *clk, int idx) gk104_clk_prog_0() argument
359 struct gk104_clk_info *info = &clk->eng[idx]; gk104_clk_prog_0()
362 nvkm_mask(device, 0x1371d0 + (idx * 0x04), 0x8000003f, info->ddiv); gk104_clk_prog_0()
363 nvkm_wr32(device, 0x137160 + (idx * 0x04), info->dsrc); gk104_clk_prog_0()
368 gk104_clk_prog_1_0(struct gk104_clk *clk, int idx) gk104_clk_prog_1_0() argument
371 nvkm_mask(device, 0x137100, (1 << idx), 0x00000000); gk104_clk_prog_1_0()
373 if (!(nvkm_rd32(device, 0x137100) & (1 << idx))) gk104_clk_prog_1_0()
379 gk104_clk_prog_1_1(struct gk104_clk *clk, int idx) gk104_clk_prog_1_1() argument
382 nvkm_mask(device, 0x137160 + (idx * 0x04), 0x00000100, 0x00000000); gk104_clk_prog_1_1()
386 gk104_clk_prog_2(struct gk104_clk *clk, int idx) gk104_clk_prog_2() argument
388 struct gk104_clk_info *info = &clk->eng[idx]; gk104_clk_prog_2()
390 const u32 addr = 0x137000 + (idx * 0x20); gk104_clk_prog_2()
405 gk104_clk_prog_3(struct gk104_clk *clk, int idx) gk104_clk_prog_3() argument
407 struct gk104_clk_info *info = &clk->eng[idx]; gk104_clk_prog_3()
410 nvkm_mask(device, 0x137250 + (idx * 0x04), 0x00003f00, info->mdiv); gk104_clk_prog_3()
412 nvkm_mask(device, 0x137250 + (idx * 0x04), 0x0000003f, info->mdiv); gk104_clk_prog_3()
416 gk104_clk_prog_4_0(struct gk104_clk *clk, int idx) gk104_clk_prog_4_0() argument
418 struct gk104_clk_info *info = &clk->eng[idx]; gk104_clk_prog_4_0()
421 nvkm_mask(device, 0x137100, (1 << idx), info->ssel); gk104_clk_prog_4_0()
423 u32 tmp = nvkm_rd32(device, 0x137100) & (1 << idx); gk104_clk_prog_4_0()
431 gk104_clk_prog_4_1(struct gk104_clk *clk, int idx) gk104_clk_prog_4_1() argument
433 struct gk104_clk_info *info = &clk->eng[idx]; gk104_clk_prog_4_1()
436 nvkm_mask(device, 0x137160 + (idx * 0x04), 0x40000000, 0x40000000); gk104_clk_prog_4_1()
437 nvkm_mask(device, 0x137160 + (idx * 0x04), 0x00000100, 0x00000100); gk104_clk_prog_4_1()
287 calc_clk(struct gk104_clk *clk, struct nvkm_cstate *cstate, int idx, int dom) calc_clk() argument
H A Dgf100.c125 read_clk(struct gf100_clk *clk, int idx) read_clk() argument
128 u32 sctl = nvkm_rd32(device, 0x137250 + (idx * 4)); read_clk()
132 if (ssel & (1 << idx)) { read_clk()
133 if (idx < 7) read_clk()
134 sclk = read_pll(clk, 0x137000 + (idx * 0x20)); read_clk()
139 sclk = read_div(clk, idx, 0x137160, 0x1371d0); read_clk()
202 calc_div(struct gf100_clk *clk, int idx, u32 ref, u32 freq, u32 *ddiv) calc_div() argument
213 calc_src(struct gf100_clk *clk, int idx, u32 freq, u32 *dsrc, u32 *ddiv) calc_src() argument
235 sclk = read_vco(clk, 0x137160 + (idx * 4)); calc_src()
236 if (idx < 7) calc_src()
237 sclk = calc_div(clk, idx, sclk, freq, ddiv); calc_src()
242 calc_pll(struct gf100_clk *clk, int idx, u32 freq, u32 *coef) calc_pll() argument
249 ret = nvbios_pll_parse(bios, 0x137000 + (idx * 0x20), &limits); calc_pll()
253 limits.refclk = read_div(clk, idx, 0x137120, 0x137140); calc_pll()
266 calc_clk(struct gf100_clk *clk, struct nvkm_cstate *cstate, int idx, int dom) calc_clk() argument
268 struct gf100_clk_info *info = &clk->eng[idx]; calc_clk()
278 clk0 = calc_src(clk, idx, freq, &src0, &div0); calc_clk()
279 clk0 = calc_div(clk, idx, clk0, freq, &div1D); calc_clk()
282 if (clk0 != freq && (0x00004387 & (1 << idx))) { calc_clk()
283 if (idx <= 7) calc_clk()
284 clk1 = calc_pll(clk, idx, freq, &info->coef); calc_clk()
287 clk1 = calc_div(clk, idx, clk1, freq, &div1P); calc_clk()
309 info->ssel = (1 << idx); calc_clk()
336 gf100_clk_prog_0(struct gf100_clk *clk, int idx) gf100_clk_prog_0() argument
338 struct gf100_clk_info *info = &clk->eng[idx]; gf100_clk_prog_0()
340 if (idx < 7 && !info->ssel) { gf100_clk_prog_0()
341 nvkm_mask(device, 0x1371d0 + (idx * 0x04), 0x80003f3f, info->ddiv); gf100_clk_prog_0()
342 nvkm_wr32(device, 0x137160 + (idx * 0x04), info->dsrc); gf100_clk_prog_0()
347 gf100_clk_prog_1(struct gf100_clk *clk, int idx) gf100_clk_prog_1() argument
350 nvkm_mask(device, 0x137100, (1 << idx), 0x00000000); gf100_clk_prog_1()
352 if (!(nvkm_rd32(device, 0x137100) & (1 << idx))) gf100_clk_prog_1()
358 gf100_clk_prog_2(struct gf100_clk *clk, int idx) gf100_clk_prog_2() argument
360 struct gf100_clk_info *info = &clk->eng[idx]; gf100_clk_prog_2()
362 const u32 addr = 0x137000 + (idx * 0x20); gf100_clk_prog_2()
363 if (idx <= 7) { gf100_clk_prog_2()
379 gf100_clk_prog_3(struct gf100_clk *clk, int idx) gf100_clk_prog_3() argument
381 struct gf100_clk_info *info = &clk->eng[idx]; gf100_clk_prog_3()
384 nvkm_mask(device, 0x137100, (1 << idx), info->ssel); gf100_clk_prog_3()
386 u32 tmp = nvkm_rd32(device, 0x137100) & (1 << idx); gf100_clk_prog_3()
394 gf100_clk_prog_4(struct gf100_clk *clk, int idx) gf100_clk_prog_4() argument
396 struct gf100_clk_info *info = &clk->eng[idx]; gf100_clk_prog_4()
398 nvkm_mask(device, 0x137250 + (idx * 0x04), 0x00003f3f, info->mdiv); gf100_clk_prog_4()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/bios/
H A Ddisp.c59 nvbios_disp_entry(struct nvkm_bios *bios, u8 idx, u8 *ver, u8 *len, u8 *sub) nvbios_disp_entry() argument
63 if (data && idx < cnt) nvbios_disp_entry()
64 return data + hdr + (idx * *len); nvbios_disp_entry()
70 nvbios_disp_parse(struct nvkm_bios *bios, u8 idx, u8 *ver, u8 *len, u8 *sub, nvbios_disp_parse() argument
73 u16 data = nvbios_disp_entry(bios, idx, ver, len, sub); nvbios_disp_parse()
82 nvbios_outp_entry(struct nvkm_bios *bios, u8 idx, nvbios_outp_entry() argument
86 u16 data = nvbios_disp_parse(bios, idx, ver, len, hdr, &info); nvbios_outp_entry()
96 nvbios_outp_parse(struct nvkm_bios *bios, u8 idx, nvbios_outp_parse() argument
99 u16 data = nvbios_outp_entry(bios, idx, ver, hdr, cnt, len); nvbios_outp_parse()
119 u16 data, idx = 0; nvbios_outp_match() local
120 while ((data = nvbios_outp_parse(bios, idx++, ver, hdr, cnt, len, info)) || *ver) { nvbios_outp_match()
130 nvbios_ocfg_entry(struct nvkm_bios *bios, u16 outp, u8 idx, nvbios_ocfg_entry() argument
133 if (idx < *cnt) nvbios_ocfg_entry()
134 return outp + *hdr + (idx * *len); nvbios_ocfg_entry()
139 nvbios_ocfg_parse(struct nvkm_bios *bios, u16 outp, u8 idx, nvbios_ocfg_parse() argument
142 u16 data = nvbios_ocfg_entry(bios, outp, idx, ver, hdr, cnt, len); nvbios_ocfg_parse()
155 u16 data, idx = 0; nvbios_ocfg_match() local
156 while ((data = nvbios_ocfg_parse(bios, outp, idx++, ver, hdr, cnt, len, info))) { nvbios_ocfg_match()
H A DP0260.c58 nvbios_P0260Ee(struct nvkm_bios *bios, int idx, u8 *ver, u8 *len) nvbios_P0260Ee() argument
62 if (data && idx < cnt) nvbios_P0260Ee()
63 return data + hdr + (idx * *len); nvbios_P0260Ee()
68 nvbios_P0260Ep(struct nvkm_bios *bios, int idx, u8 *ver, u8 *len, nvbios_P0260Ep() argument
71 u32 data = nvbios_P0260Ee(bios, idx, ver, len); nvbios_P0260Ep()
84 nvbios_P0260Xe(struct nvkm_bios *bios, int idx, u8 *ver, u8 *xsz) nvbios_P0260Xe() argument
88 if (data && idx < xnr) nvbios_P0260Xe()
89 return data + hdr + (cnt * len) + (idx * *xsz); nvbios_P0260Xe()
94 nvbios_P0260Xp(struct nvkm_bios *bios, int idx, u8 *ver, u8 *hdr, nvbios_P0260Xp() argument
97 u32 data = nvbios_P0260Xe(bios, idx, ver, hdr); nvbios_P0260Xp()
H A Dboost.c59 nvbios_boostEe(struct nvkm_bios *bios, int idx, nvbios_boostEe() argument
64 if (data && idx < *cnt) { nvbios_boostEe()
65 data = data + *hdr + (idx * (*len + (snr * ssz))); nvbios_boostEe()
75 nvbios_boostEp(struct nvkm_bios *bios, int idx, nvbios_boostEp() argument
78 u16 data = nvbios_boostEe(bios, idx, ver, hdr, cnt, len); nvbios_boostEp()
92 u32 data, idx = 0; nvbios_boostEm() local
93 while ((data = nvbios_boostEp(bios, idx++, ver, hdr, cnt, len, info))) { nvbios_boostEm()
101 nvbios_boostSe(struct nvkm_bios *bios, int idx, nvbios_boostSe() argument
104 if (data && idx < cnt) { nvbios_boostSe()
105 data = data + *hdr + (idx * len); nvbios_boostSe()
113 nvbios_boostSp(struct nvkm_bios *bios, int idx, nvbios_boostSp() argument
117 data = nvbios_boostSe(bios, idx, data, ver, hdr, cnt, len); nvbios_boostSp()
H A Dcstep.c59 nvbios_cstepEe(struct nvkm_bios *bios, int idx, u8 *ver, u8 *hdr) nvbios_cstepEe() argument
63 if (data && idx < cnt) { nvbios_cstepEe()
64 data = data + *hdr + (idx * len); nvbios_cstepEe()
72 nvbios_cstepEp(struct nvkm_bios *bios, int idx, u8 *ver, u8 *hdr, nvbios_cstepEp() argument
75 u16 data = nvbios_cstepEe(bios, idx, ver, hdr); nvbios_cstepEp()
88 u32 data, idx = 0; nvbios_cstepEm() local
89 while ((data = nvbios_cstepEp(bios, idx++, ver, hdr, info))) { nvbios_cstepEm()
97 nvbios_cstepXe(struct nvkm_bios *bios, int idx, u8 *ver, u8 *hdr) nvbios_cstepXe() argument
101 if (data && idx < xnr) { nvbios_cstepXe()
102 data = data + *hdr + (cnt * len) + (idx * xsz); nvbios_cstepXe()
110 nvbios_cstepXp(struct nvkm_bios *bios, int idx, u8 *ver, u8 *hdr, nvbios_cstepXp() argument
113 u16 data = nvbios_cstepXe(bios, idx, ver, hdr); nvbios_cstepXp()
H A Ddp.c58 nvbios_dpout_entry(struct nvkm_bios *bios, u8 idx, nvbios_dpout_entry() argument
62 if (data && idx < *cnt) { nvbios_dpout_entry()
63 u16 outp = nvbios_rd16(bios, data + *hdr + idx * *len); nvbios_dpout_entry()
87 nvbios_dpout_parse(struct nvkm_bios *bios, u8 idx, nvbios_dpout_parse() argument
91 u16 data = nvbios_dpout_entry(bios, idx, ver, hdr, cnt, len); nvbios_dpout_parse()
133 u16 data, idx = 0; nvbios_dpout_match() local
134 while ((data = nvbios_dpout_parse(bios, idx++, ver, hdr, cnt, len, info)) || *ver) { nvbios_dpout_match()
144 nvbios_dpcfg_entry(struct nvkm_bios *bios, u16 outp, u8 idx, nvbios_dpcfg_entry() argument
155 if (idx < *cnt) nvbios_dpcfg_entry()
156 return outp + *hdr + (idx * *len); nvbios_dpcfg_entry()
162 nvbios_dpcfg_parse(struct nvkm_bios *bios, u16 outp, u8 idx, nvbios_dpcfg_parse() argument
166 u16 data = nvbios_dpcfg_entry(bios, outp, idx, ver, hdr, cnt, len); nvbios_dpcfg_parse()
196 u8 idx = 0xff; nvbios_dpcfg_match() local
201 idx = (pc * 10) + vsoff[vs] + pe; nvbios_dpcfg_match()
203 idx += nvbios_rd08(bios, outp + 0x11) * 40; nvbios_dpcfg_match()
205 while ((data = nvbios_dpcfg_entry(bios, outp, ++idx, nvbios_dpcfg_match()
213 return nvbios_dpcfg_parse(bios, outp, idx, ver, hdr, cnt, len, info); nvbios_dpcfg_match()
H A DM0205.c75 nvbios_M0205Ee(struct nvkm_bios *bios, int idx, nvbios_M0205Ee() argument
80 if (data && idx < *cnt) { nvbios_M0205Ee()
81 data = data + *hdr + idx * (*len + (snr * ssz)); nvbios_M0205Ee()
91 nvbios_M0205Ep(struct nvkm_bios *bios, int idx, nvbios_M0205Ep() argument
95 u32 data = nvbios_M0205Ee(bios, idx, ver, hdr, cnt, len); nvbios_M0205Ep()
108 nvbios_M0205Se(struct nvkm_bios *bios, int ent, int idx, u8 *ver, u8 *hdr) nvbios_M0205Se() argument
113 if (data && idx < cnt) { nvbios_M0205Se()
114 data = data + *hdr + idx * len; nvbios_M0205Se()
122 nvbios_M0205Sp(struct nvkm_bios *bios, int ent, int idx, u8 *ver, u8 *hdr, nvbios_M0205Sp() argument
125 u32 data = nvbios_M0205Se(bios, ent, idx, ver, hdr); nvbios_M0205Sp()
H A DM0209.c58 nvbios_M0209Ee(struct nvkm_bios *bios, int idx, nvbios_M0209Ee() argument
63 if (data && idx < *cnt) { nvbios_M0209Ee()
64 data = data + *hdr + idx * (*len + (snr * ssz)); nvbios_M0209Ee()
74 nvbios_M0209Ep(struct nvkm_bios *bios, int idx, nvbios_M0209Ep() argument
77 u32 data = nvbios_M0209Ee(bios, idx, ver, hdr, cnt, len); nvbios_M0209Ep()
95 nvbios_M0209Se(struct nvkm_bios *bios, int ent, int idx, u8 *ver, u8 *hdr) nvbios_M0209Se() argument
100 if (data && idx < cnt) { nvbios_M0209Se()
101 data = data + *hdr + idx * len; nvbios_M0209Se()
109 nvbios_M0209Sp(struct nvkm_bios *bios, int ent, int idx, u8 *ver, u8 *hdr, nvbios_M0209Sp() argument
116 u32 i, data = nvbios_M0209Se(bios, ent, idx, ver, hdr); nvbios_M0209Sp()
H A Dpmu.c33 int idx = 0; weirdo_pointer() local
34 if (nvbios_image(bios, idx++, &image)) { weirdo_pointer()
36 while (nvbios_image(bios, idx++, &image)) { weirdo_pointer()
65 nvbios_pmuEe(struct nvkm_bios *bios, int idx, u8 *ver, u8 *hdr) nvbios_pmuEe() argument
69 if (data && idx < cnt) { nvbios_pmuEe()
70 data = data + *hdr + (idx * len); nvbios_pmuEe()
78 nvbios_pmuEp(struct nvkm_bios *bios, int idx, u8 *ver, u8 *hdr, nvbios_pmuEp() argument
81 u32 data = nvbios_pmuEe(bios, idx, ver, hdr); nvbios_pmuEp()
96 u8 ver, hdr, idx = 0; nvbios_pmuRm() local
99 while ((data = nvbios_pmuEp(bios, idx++, &ver, &hdr, &pmuE))) { nvbios_pmuRm()
/linux-4.4.14/lib/
H A Dfind_bit.c84 unsigned long idx; find_first_bit() local
86 for (idx = 0; idx * BITS_PER_LONG < size; idx++) { find_first_bit()
87 if (addr[idx]) find_first_bit()
88 return min(idx * BITS_PER_LONG + __ffs(addr[idx]), size); find_first_bit()
102 unsigned long idx; find_first_zero_bit() local
104 for (idx = 0; idx * BITS_PER_LONG < size; idx++) { find_first_zero_bit()
105 if (addr[idx] != ~0UL) find_first_zero_bit()
106 return min(idx * BITS_PER_LONG + ffz(addr[idx]), size); find_first_zero_bit()
119 unsigned long idx = (size-1) / BITS_PER_LONG; find_last_bit() local
122 val &= addr[idx]; find_last_bit()
124 return idx * BITS_PER_LONG + __fls(val); find_last_bit()
127 } while (idx--); find_last_bit()
/linux-4.4.14/drivers/media/usb/pvrusb2/
H A Dpvrusb2-std.c123 unsigned int idx; find_std_name() local
125 for (idx = 0; idx < arrSize; idx++) { find_std_name()
126 p = arrPtr + idx; find_std_name()
283 unsigned int idx; match_std() local
284 for (idx = 0; idx < ARRAY_SIZE(generic_standards); idx++) { match_std()
285 if (generic_standards[idx].id & id) { match_std()
286 return generic_standards + idx; match_std()
295 int idx; pvr2_std_fill() local
299 idx = std->index; pvr2_std_fill()
301 std->index = idx; pvr2_std_fill()
305 pvr2_trace(PVR2_TRACE_STD,"Set up standard idx=%u name=%s", pvr2_std_fill()
323 unsigned int idx,bcnt,idx2; pvr2_std_create_enum() local
374 for (idx = 0; idx < std_cnt; idx++) pvr2_std_create_enum()
375 stddefs[idx].index = idx; pvr2_std_create_enum()
377 idx = 0; pvr2_std_create_enum()
380 for (idx2 = 0; (idx2 < ARRAY_SIZE(std_mixes)) && (idx < std_cnt); pvr2_std_create_enum()
383 if (pvr2_std_fill(stddefs+idx,std_mixes[idx2])) idx++; pvr2_std_create_enum()
386 for (idmsk = 1, cmsk = id; cmsk && (idx < std_cnt); idmsk <<= 1) { pvr2_std_create_enum()
389 if (!pvr2_std_fill(stddefs+idx,idmsk)) continue; pvr2_std_create_enum()
390 idx++; pvr2_std_create_enum()
H A Dpvrusb2-ioread.c55 unsigned int idx; pvr2_ioread_init() local
60 for (idx = 0; idx < BUFFER_COUNT; idx++) { pvr2_ioread_init()
61 cp->buffer_storage[idx] = kmalloc(BUFFER_SIZE,GFP_KERNEL); pvr2_ioread_init()
62 if (!(cp->buffer_storage[idx])) break; pvr2_ioread_init()
65 if (idx < BUFFER_COUNT) { pvr2_ioread_init()
67 for (idx = 0; idx < BUFFER_COUNT; idx++) { pvr2_ioread_init()
68 if (!(cp->buffer_storage[idx])) continue; pvr2_ioread_init()
69 kfree(cp->buffer_storage[idx]); pvr2_ioread_init()
78 unsigned int idx; pvr2_ioread_done() local
81 for (idx = 0; idx < BUFFER_COUNT; idx++) { pvr2_ioread_done()
82 if (!(cp->buffer_storage[idx])) continue; pvr2_ioread_done()
83 kfree(cp->buffer_storage[idx]); pvr2_ioread_done()
205 unsigned int idx; pvr2_ioread_setup() local
231 for (idx = 0; idx < BUFFER_COUNT; idx++) { pvr2_ioread_setup()
232 bp = pvr2_stream_get_buffer(sp,idx); pvr2_ioread_setup()
234 cp->buffer_storage[idx], pvr2_ioread_setup()
315 unsigned int idx; pvr2_ioread_filter() local
330 for (idx = cp->c_data_offs; idx < cp->c_data_len; idx++) { pvr2_ioread_filter()
332 if (cp->c_data_ptr[idx] == pvr2_ioread_filter()
343 cp->c_data_offs += idx; pvr2_ioread_filter()
344 cp->sync_trashed_count += idx; pvr2_ioread_filter()
H A Dpvrusb2-encoder.c44 unsigned int idx,addr; pvr2_encoder_write_words() local
65 for (idx = 0; idx < chunkCnt; idx++) { pvr2_encoder_write_words()
66 addr = idx + offs; pvr2_encoder_write_words()
70 PVR2_DECOMPOSE_LE(hdw->cmd_buffer, bAddr,data[idx]); pvr2_encoder_write_words()
90 unsigned int idx; pvr2_encoder_read_words() local
124 for (idx = 0; idx < chunkCnt; idx++) { pvr2_encoder_read_words()
125 data[idx] = PVR2_COMPOSE_LE(hdw->cmd_buffer,idx*4); pvr2_encoder_read_words()
151 unsigned int idx; pvr2_encoder_cmd() local
223 for (idx = 0; idx < arg_cnt_send; idx++) { pvr2_encoder_cmd()
224 wrData[idx+4] = argp[idx]; pvr2_encoder_cmd()
226 for (; idx < ARRAY_SIZE(wrData) - 4; idx++) { pvr2_encoder_cmd()
227 wrData[idx+4] = 0; pvr2_encoder_cmd()
230 ret = pvr2_encoder_write_words(hdw,MBOX_BASE,wrData,idx); pvr2_encoder_cmd()
263 for (idx = 4; idx < arg_cnt_send; idx++) { pvr2_encoder_cmd()
267 idx-3,wrData[idx]); pvr2_encoder_cmd()
304 for (idx = 0; idx < arg_cnt_recv; idx++) { pvr2_encoder_cmd()
305 argp[idx] = rdData[idx+4]; pvr2_encoder_cmd()
322 unsigned int idx; pvr2_encoder_vcmd() local
336 for (idx = 0; idx < args; idx++) { pvr2_encoder_vcmd()
337 data[idx] = va_arg(vl, u32); pvr2_encoder_vcmd()
/linux-4.4.14/arch/x86/kernel/
H A Dtls.c22 int idx; get_free_idx() local
24 for (idx = 0; idx < GDT_ENTRY_TLS_ENTRIES; idx++) get_free_idx()
25 if (desc_empty(&t->tls_array[idx])) get_free_idx()
26 return idx + GDT_ENTRY_TLS_MIN; get_free_idx()
82 static void set_tls_desc(struct task_struct *p, int idx, set_tls_desc() argument
86 struct desc_struct *desc = &t->tls_array[idx - GDT_ENTRY_TLS_MIN]; set_tls_desc()
112 int do_set_thread_area(struct task_struct *p, int idx, do_set_thread_area() argument
124 if (idx == -1) do_set_thread_area()
125 idx = info.entry_number; do_set_thread_area()
131 if (idx == -1 && can_allocate) { do_set_thread_area()
132 idx = get_free_idx(); do_set_thread_area()
133 if (idx < 0) do_set_thread_area()
134 return idx; do_set_thread_area()
135 if (put_user(idx, &u_info->entry_number)) do_set_thread_area()
139 if (idx < GDT_ENTRY_TLS_MIN || idx > GDT_ENTRY_TLS_MAX) do_set_thread_area()
142 set_tls_desc(p, idx, &info, 1); do_set_thread_area()
157 static void fill_user_desc(struct user_desc *info, int idx, fill_user_desc() argument
162 info->entry_number = idx; fill_user_desc()
176 int do_get_thread_area(struct task_struct *p, int idx, do_get_thread_area() argument
181 if (idx == -1 && get_user(idx, &u_info->entry_number)) do_get_thread_area()
184 if (idx < GDT_ENTRY_TLS_MIN || idx > GDT_ENTRY_TLS_MAX) do_get_thread_area()
187 fill_user_desc(&info, idx, do_get_thread_area()
188 &p->thread.tls_array[idx - GDT_ENTRY_TLS_MIN]); do_get_thread_area()
/linux-4.4.14/arch/xtensa/mm/
H A Dhighmem.c42 enum fixed_addresses idx; kmap_atomic() local
50 idx = kmap_idx(kmap_atomic_idx_push(), kmap_atomic()
52 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic()
54 BUG_ON(!pte_none(*(kmap_pte + idx))); kmap_atomic()
56 set_pte(kmap_pte + idx, mk_pte(page, PAGE_KERNEL_EXEC)); kmap_atomic()
66 int idx = kmap_idx(kmap_atomic_idx(), __kunmap_atomic() local
75 pte_clear(&init_mm, kvaddr, kmap_pte + idx); __kunmap_atomic()
/linux-4.4.14/arch/sh/mm/
H A Dkmap.c34 enum fixed_addresses idx; kmap_coherent() local
42 idx = FIX_CMAP_END - kmap_coherent()
46 vaddr = __fix_to_virt(idx); kmap_coherent()
48 BUG_ON(!pte_none(*(kmap_coherent_pte - idx))); kmap_coherent()
49 set_pte(kmap_coherent_pte - idx, mk_pte(page, PAGE_KERNEL)); kmap_coherent()
58 enum fixed_addresses idx = __virt_to_fix(vaddr); kunmap_coherent() local
63 pte_clear(&init_mm, vaddr, kmap_coherent_pte - idx); kunmap_coherent()
H A Dioremap_fixed.c49 enum fixed_addresses idx0, idx; ioremap_fixed() local
86 idx = idx0; ioremap_fixed()
89 __set_fixmap(idx, phys_addr, prot); ioremap_fixed()
91 idx++; ioremap_fixed()
101 enum fixed_addresses idx; iounmap_fixed() local
123 idx = FIX_IOREMAP_BEGIN + slot + nrpages - 1; iounmap_fixed()
125 __clear_fixmap(idx, __pgprot(_PAGE_WIRED)); iounmap_fixed()
126 --idx; iounmap_fixed()
/linux-4.4.14/sound/
H A Dlast.c27 int idx, ok = 0; alsa_sound_last_init() local
30 for (idx = 0; idx < SNDRV_CARDS; idx++) alsa_sound_last_init()
31 if (snd_cards[idx] != NULL) { alsa_sound_last_init()
32 printk(KERN_INFO " #%i: %s\n", idx, snd_cards[idx]->longname); alsa_sound_last_init()
/linux-4.4.14/tools/perf/tests/
H A Devsel-roundtrip-name.c10 int type, op, err = 0, ret = 0, i, idx; perf_evsel__roundtrip_cache_name_test() local
33 idx = 0; perf_evsel__roundtrip_cache_name_test()
45 if (evsel->idx != idx) perf_evsel__roundtrip_cache_name_test()
48 ++idx; perf_evsel__roundtrip_cache_name_test()
84 if (strcmp(perf_evsel__name(evsel), names[evsel->idx])) { evlist__for_each()
86 pr_debug("%s != %s\n", perf_evsel__name(evsel), names[evsel->idx]); evlist__for_each()
H A Dbpf.c149 static int __test__bpf(int idx) __test__bpf() argument
157 bpf_testcase_table[idx].prog_id, __test__bpf()
161 bpf_testcase_table[idx].msg_compile_fail); __test__bpf()
162 if (idx == 0) __test__bpf()
169 bpf_testcase_table[idx].name); __test__bpf()
176 bpf_testcase_table[idx].target_func, __test__bpf()
177 bpf_testcase_table[idx].expect_result); __test__bpf()
/linux-4.4.14/drivers/input/misc/
H A Dad714x.c214 static void ad714x_button_state_machine(struct ad714x_chip *ad714x, int idx) ad714x_button_state_machine() argument
216 struct ad714x_button_plat *hw = &ad714x->hw->button[idx]; ad714x_button_state_machine()
217 struct ad714x_button_drv *sw = &ad714x->sw->button[idx]; ad714x_button_state_machine()
223 dev_dbg(ad714x->dev, "button %d touched\n", idx); ad714x_button_state_machine()
233 dev_dbg(ad714x->dev, "button %d released\n", idx); ad714x_button_state_machine()
249 static void ad714x_slider_cal_sensor_val(struct ad714x_chip *ad714x, int idx) ad714x_slider_cal_sensor_val() argument
251 struct ad714x_slider_plat *hw = &ad714x->hw->slider[idx]; ad714x_slider_cal_sensor_val()
267 static void ad714x_slider_cal_highest_stage(struct ad714x_chip *ad714x, int idx) ad714x_slider_cal_highest_stage() argument
269 struct ad714x_slider_plat *hw = &ad714x->hw->slider[idx]; ad714x_slider_cal_highest_stage()
270 struct ad714x_slider_drv *sw = &ad714x->sw->slider[idx]; ad714x_slider_cal_highest_stage()
275 dev_dbg(ad714x->dev, "slider %d highest_stage:%d\n", idx, ad714x_slider_cal_highest_stage()
292 static void ad714x_slider_cal_abs_pos(struct ad714x_chip *ad714x, int idx) ad714x_slider_cal_abs_pos() argument
294 struct ad714x_slider_plat *hw = &ad714x->hw->slider[idx]; ad714x_slider_cal_abs_pos()
295 struct ad714x_slider_drv *sw = &ad714x->sw->slider[idx]; ad714x_slider_cal_abs_pos()
300 dev_dbg(ad714x->dev, "slider %d absolute position:%d\n", idx, ad714x_slider_cal_abs_pos()
314 static void ad714x_slider_cal_flt_pos(struct ad714x_chip *ad714x, int idx) ad714x_slider_cal_flt_pos() argument
316 struct ad714x_slider_drv *sw = &ad714x->sw->slider[idx]; ad714x_slider_cal_flt_pos()
321 dev_dbg(ad714x->dev, "slider %d filter position:%d\n", idx, ad714x_slider_cal_flt_pos()
325 static void ad714x_slider_use_com_int(struct ad714x_chip *ad714x, int idx) ad714x_slider_use_com_int() argument
327 struct ad714x_slider_plat *hw = &ad714x->hw->slider[idx]; ad714x_slider_use_com_int()
332 static void ad714x_slider_use_thr_int(struct ad714x_chip *ad714x, int idx) ad714x_slider_use_thr_int() argument
334 struct ad714x_slider_plat *hw = &ad714x->hw->slider[idx]; ad714x_slider_use_thr_int()
339 static void ad714x_slider_state_machine(struct ad714x_chip *ad714x, int idx) ad714x_slider_state_machine() argument
341 struct ad714x_slider_plat *hw = &ad714x->hw->slider[idx]; ad714x_slider_state_machine()
342 struct ad714x_slider_drv *sw = &ad714x->sw->slider[idx]; ad714x_slider_state_machine()
358 ad714x_slider_use_com_int(ad714x, idx); ad714x_slider_state_machine()
359 dev_dbg(ad714x->dev, "slider %d touched\n", idx); ad714x_slider_state_machine()
365 ad714x_slider_cal_sensor_val(ad714x, idx); ad714x_slider_state_machine()
366 ad714x_slider_cal_highest_stage(ad714x, idx); ad714x_slider_state_machine()
367 ad714x_slider_cal_abs_pos(ad714x, idx); ad714x_slider_state_machine()
376 ad714x_slider_cal_sensor_val(ad714x, idx); ad714x_slider_state_machine()
377 ad714x_slider_cal_highest_stage(ad714x, idx); ad714x_slider_state_machine()
378 ad714x_slider_cal_abs_pos(ad714x, idx); ad714x_slider_state_machine()
379 ad714x_slider_cal_flt_pos(ad714x, idx); ad714x_slider_state_machine()
386 ad714x_slider_use_thr_int(ad714x, idx); ad714x_slider_state_machine()
390 idx); ad714x_slider_state_machine()
408 static void ad714x_wheel_cal_highest_stage(struct ad714x_chip *ad714x, int idx) ad714x_wheel_cal_highest_stage() argument
410 struct ad714x_wheel_plat *hw = &ad714x->hw->wheel[idx]; ad714x_wheel_cal_highest_stage()
411 struct ad714x_wheel_drv *sw = &ad714x->sw->wheel[idx]; ad714x_wheel_cal_highest_stage()
417 dev_dbg(ad714x->dev, "wheel %d highest_stage:%d\n", idx, ad714x_wheel_cal_highest_stage()
421 static void ad714x_wheel_cal_sensor_val(struct ad714x_chip *ad714x, int idx) ad714x_wheel_cal_sensor_val() argument
423 struct ad714x_wheel_plat *hw = &ad714x->hw->wheel[idx]; ad714x_wheel_cal_sensor_val()
450 static void ad714x_wheel_cal_abs_pos(struct ad714x_chip *ad714x, int idx) ad714x_wheel_cal_abs_pos() argument
452 struct ad714x_wheel_plat *hw = &ad714x->hw->wheel[idx]; ad714x_wheel_cal_abs_pos()
453 struct ad714x_wheel_drv *sw = &ad714x->sw->wheel[idx]; ad714x_wheel_cal_abs_pos()
481 static void ad714x_wheel_cal_flt_pos(struct ad714x_chip *ad714x, int idx) ad714x_wheel_cal_flt_pos() argument
483 struct ad714x_wheel_plat *hw = &ad714x->hw->wheel[idx]; ad714x_wheel_cal_flt_pos()
484 struct ad714x_wheel_drv *sw = &ad714x->sw->wheel[idx]; ad714x_wheel_cal_flt_pos()
497 static void ad714x_wheel_use_com_int(struct ad714x_chip *ad714x, int idx) ad714x_wheel_use_com_int() argument
499 struct ad714x_wheel_plat *hw = &ad714x->hw->wheel[idx]; ad714x_wheel_use_com_int()
504 static void ad714x_wheel_use_thr_int(struct ad714x_chip *ad714x, int idx) ad714x_wheel_use_thr_int() argument
506 struct ad714x_wheel_plat *hw = &ad714x->hw->wheel[idx]; ad714x_wheel_use_thr_int()
511 static void ad714x_wheel_state_machine(struct ad714x_chip *ad714x, int idx) ad714x_wheel_state_machine() argument
513 struct ad714x_wheel_plat *hw = &ad714x->hw->wheel[idx]; ad714x_wheel_state_machine()
514 struct ad714x_wheel_drv *sw = &ad714x->sw->wheel[idx]; ad714x_wheel_state_machine()
530 ad714x_wheel_use_com_int(ad714x, idx); ad714x_wheel_state_machine()
531 dev_dbg(ad714x->dev, "wheel %d touched\n", idx); ad714x_wheel_state_machine()
537 ad714x_wheel_cal_sensor_val(ad714x, idx); ad714x_wheel_state_machine()
538 ad714x_wheel_cal_highest_stage(ad714x, idx); ad714x_wheel_state_machine()
539 ad714x_wheel_cal_abs_pos(ad714x, idx); ad714x_wheel_state_machine()
548 ad714x_wheel_cal_sensor_val(ad714x, idx); ad714x_wheel_state_machine()
549 ad714x_wheel_cal_highest_stage(ad714x, idx); ad714x_wheel_state_machine()
550 ad714x_wheel_cal_abs_pos(ad714x, idx); ad714x_wheel_state_machine()
551 ad714x_wheel_cal_flt_pos(ad714x, idx); ad714x_wheel_state_machine()
559 ad714x_wheel_use_thr_int(ad714x, idx); ad714x_wheel_state_machine()
564 idx); ad714x_wheel_state_machine()
575 static void touchpad_cal_sensor_val(struct ad714x_chip *ad714x, int idx) touchpad_cal_sensor_val() argument
577 struct ad714x_touchpad_plat *hw = &ad714x->hw->touchpad[idx]; touchpad_cal_sensor_val()
595 static void touchpad_cal_highest_stage(struct ad714x_chip *ad714x, int idx) touchpad_cal_highest_stage() argument
597 struct ad714x_touchpad_plat *hw = &ad714x->hw->touchpad[idx]; touchpad_cal_highest_stage()
598 struct ad714x_touchpad_drv *sw = &ad714x->sw->touchpad[idx]; touchpad_cal_highest_stage()
607 idx, sw->x_highest_stage, sw->y_highest_stage); touchpad_cal_highest_stage()
616 static int touchpad_check_second_peak(struct ad714x_chip *ad714x, int idx) touchpad_check_second_peak() argument
618 struct ad714x_touchpad_plat *hw = &ad714x->hw->touchpad[idx]; touchpad_check_second_peak()
619 struct ad714x_touchpad_drv *sw = &ad714x->sw->touchpad[idx]; touchpad_check_second_peak()
655 static void touchpad_cal_abs_pos(struct ad714x_chip *ad714x, int idx) touchpad_cal_abs_pos() argument
657 struct ad714x_touchpad_plat *hw = &ad714x->hw->touchpad[idx]; touchpad_cal_abs_pos()
658 struct ad714x_touchpad_drv *sw = &ad714x->sw->touchpad[idx]; touchpad_cal_abs_pos()
665 dev_dbg(ad714x->dev, "touchpad %d absolute position:(%d, %d)\n", idx, touchpad_cal_abs_pos()
669 static void touchpad_cal_flt_pos(struct ad714x_chip *ad714x, int idx) touchpad_cal_flt_pos() argument
671 struct ad714x_touchpad_drv *sw = &ad714x->sw->touchpad[idx]; touchpad_cal_flt_pos()
679 idx, sw->x_flt_pos, sw->y_flt_pos); touchpad_cal_flt_pos()
700 static int touchpad_check_endpoint(struct ad714x_chip *ad714x, int idx) touchpad_check_endpoint() argument
702 struct ad714x_touchpad_plat *hw = &ad714x->hw->touchpad[idx]; touchpad_check_endpoint()
703 struct ad714x_touchpad_drv *sw = &ad714x->sw->touchpad[idx]; touchpad_check_endpoint()
777 static void touchpad_use_com_int(struct ad714x_chip *ad714x, int idx) touchpad_use_com_int() argument
779 struct ad714x_touchpad_plat *hw = &ad714x->hw->touchpad[idx]; touchpad_use_com_int()
784 static void touchpad_use_thr_int(struct ad714x_chip *ad714x, int idx) touchpad_use_thr_int() argument
786 struct ad714x_touchpad_plat *hw = &ad714x->hw->touchpad[idx]; touchpad_use_thr_int()
792 static void ad714x_touchpad_state_machine(struct ad714x_chip *ad714x, int idx) ad714x_touchpad_state_machine() argument
794 struct ad714x_touchpad_plat *hw = &ad714x->hw->touchpad[idx]; ad714x_touchpad_state_machine()
795 struct ad714x_touchpad_drv *sw = &ad714x->sw->touchpad[idx]; ad714x_touchpad_state_machine()
814 touchpad_use_com_int(ad714x, idx); ad714x_touchpad_state_machine()
815 dev_dbg(ad714x->dev, "touchpad %d touched\n", idx); ad714x_touchpad_state_machine()
821 touchpad_cal_sensor_val(ad714x, idx); ad714x_touchpad_state_machine()
822 touchpad_cal_highest_stage(ad714x, idx); ad714x_touchpad_state_machine()
823 if ((!touchpad_check_second_peak(ad714x, idx)) && ad714x_touchpad_state_machine()
824 (!touchpad_check_endpoint(ad714x, idx))) { ad714x_touchpad_state_machine()
827 idx); ad714x_touchpad_state_machine()
828 touchpad_cal_abs_pos(ad714x, idx); ad714x_touchpad_state_machine()
839 touchpad_cal_sensor_val(ad714x, idx); ad714x_touchpad_state_machine()
840 touchpad_cal_highest_stage(ad714x, idx); ad714x_touchpad_state_machine()
841 if ((!touchpad_check_second_peak(ad714x, idx)) ad714x_touchpad_state_machine()
842 && (!touchpad_check_endpoint(ad714x, idx))) { ad714x_touchpad_state_machine()
843 touchpad_cal_abs_pos(ad714x, idx); ad714x_touchpad_state_machine()
844 touchpad_cal_flt_pos(ad714x, idx); ad714x_touchpad_state_machine()
856 touchpad_use_thr_int(ad714x, idx); ad714x_touchpad_state_machine()
860 idx); ad714x_touchpad_state_machine()
H A Daxp20x-pek.c40 unsigned int idx; member in struct:axp20x_time
44 { .time = 128, .idx = 0 },
45 { .time = 1000, .idx = 2 },
46 { .time = 3000, .idx = 1 },
47 { .time = 2000, .idx = 3 },
51 { .time = 4000, .idx = 0 },
52 { .time = 6000, .idx = 1 },
53 { .time = 8000, .idx = 2 },
54 { .time = 10000, .idx = 3 },
93 if (val == axp20x_ea->p_time[i].idx) axp20x_show_ext_attr()
108 unsigned int val, idx = 0; axp20x_store_ext_attr() local
128 idx = axp20x_ea->p_time[i].idx; axp20x_store_ext_attr()
135 idx <<= ffs(axp20x_ea->mask) - 1; axp20x_store_ext_attr()
138 axp20x_ea->mask, idx); axp20x_store_ext_attr()
/linux-4.4.14/drivers/clk/imx/
H A Dclk-pfd.c23 * @idx: the index of PFD encoded in the register
26 * data encoded, and member idx is used to specify the one. And each
32 u8 idx; member in struct:clk_pfd
45 writel_relaxed(1 << ((pfd->idx + 1) * 8 - 1), pfd->reg + CLR); clk_pfd_enable()
54 writel_relaxed(1 << ((pfd->idx + 1) * 8 - 1), pfd->reg + SET); clk_pfd_disable()
62 u8 frac = (readl_relaxed(pfd->reg) >> (pfd->idx * 8)) & 0x3f; clk_pfd_recalc_rate()
105 writel_relaxed(0x3f << (pfd->idx * 8), pfd->reg + CLR); clk_pfd_set_rate()
106 writel_relaxed(frac << (pfd->idx * 8), pfd->reg + SET); clk_pfd_set_rate()
115 if (readl_relaxed(pfd->reg) & (1 << ((pfd->idx + 1) * 8 - 1))) clk_pfd_is_enabled()
131 void __iomem *reg, u8 idx) imx_clk_pfd()
142 pfd->idx = idx; imx_clk_pfd()
130 imx_clk_pfd(const char *name, const char *parent_name, void __iomem *reg, u8 idx) imx_clk_pfd() argument
/linux-4.4.14/arch/x86/crypto/sha-mb/
H A Dsha1_mb_mgr_flush_avx2.S69 # idx must be a register not clobbered by sha1_x8_avx2
70 #define idx %r8 define
133 xor idx, idx
136 cmovne one(%rip), idx
139 cmovne two(%rip), idx
142 cmovne three(%rip), idx
145 cmovne four(%rip), idx
148 cmovne five(%rip), idx
151 cmovne six(%rip), idx
154 cmovne seven(%rip), idx
156 # copy idx to empty lanes
159 mov offset(state,idx,8), tmp
187 mov idx, len2
188 and $0xF, idx
204 # state and idx are intact
208 # process completed job "idx"
209 imul $_LANE_DATA_size, idx, lane_data
217 or idx, unused_lanes
220 movl $0xFFFFFFFF, _lens(state, idx, 4)
222 vmovd _args_digest(state , idx, 4) , %xmm0
223 vpinsrd $1, _args_digest+1*32(state, idx, 4), %xmm0, %xmm0
224 vpinsrd $2, _args_digest+2*32(state, idx, 4), %xmm0, %xmm0
225 vpinsrd $3, _args_digest+3*32(state, idx, 4), %xmm0, %xmm0
226 movl _args_digest+4*32(state, idx, 4), tmp2_w
273 test $~0xF, idx
276 # process completed job "idx"
277 imul $_LANE_DATA_size, idx, lane_data
285 or idx, unused_lanes
288 movl $0xFFFFFFFF, _lens(state, idx, 4)
290 vmovd _args_digest(state, idx, 4), %xmm0
291 vpinsrd $1, _args_digest+1*32(state, idx, 4), %xmm0, %xmm0
292 vpinsrd $2, _args_digest+2*32(state, idx, 4), %xmm0, %xmm0
293 vpinsrd $3, _args_digest+3*32(state, idx, 4), %xmm0, %xmm0
294 movl _args_digest+4*32(state, idx, 4), tmp2_w
/linux-4.4.14/arch/x86/crypto/
H A Daes-i586-asm_32.S89 // idx input register for the round (destroyed)
93 #define do_col(table, a1,a2,a3,a4, idx, tmp) \
94 movzx %l(idx),%tmp; \
96 movzx %h(idx),%tmp; \
97 shr $16,%idx; \
99 movzx %l(idx),%tmp; \
100 movzx %h(idx),%idx; \
102 xor table+3*tlen(,%idx,4),%a4;
105 // NB1: original value of a3 is in idx on exit
107 #define do_fcol(table, a1,a2,a3,a4, idx, tmp, sched) \
109 movzx %l(idx),%tmp; \
113 movzx %h(idx),%tmp; \
114 shr $16,%idx; \
116 movzx %l(idx),%tmp; \
117 movzx %h(idx),%idx; \
118 xor table+3*tlen(,%idx,4),%a4; \
119 mov %a3,%idx; \
124 // NB1: original value of a3 is in idx on exit
126 #define do_icol(table, a1,a2,a3,a4, idx, tmp, sched) \
128 movzx %l(idx),%tmp; \
132 movzx %h(idx),%tmp; \
133 shr $16,%idx; \
135 movzx %l(idx),%tmp; \
136 movzx %h(idx),%idx; \
137 xor table+3*tlen(,%idx,4),%a4; \
138 mov %a3,%idx; \
163 do_fcol(table, r2,r5,r4,r1, r0,r3, arg); /* idx=r0 */ \
164 do_col (table, r4,r1,r2,r5, r0,r3); /* idx=r4 */ \
166 do_col (table, r1,r2,r5,r4, r0,r3); /* idx=r1 */ \
168 do_col (table, r5,r4,r1,r2, r0,r3); /* idx=r5 */
178 do_fcol(table, r0,r5,r4,r1, r2,r3, arg); /* idx=r2 */ \
179 do_col (table, r4,r1,r0,r5, r2,r3); /* idx=r4 */ \
181 do_col (table, r1,r0,r5,r4, r2,r3); /* idx=r1 */ \
183 do_col (table, r5,r4,r1,r0, r2,r3); /* idx=r5 */
198 do_icol(table, r2,r1,r4,r5, r0,r3, arg); /* idx=r0 */ \
199 do_col (table, r4,r5,r2,r1, r0,r3); /* idx=r4 */ \
201 do_col (table, r1,r4,r5,r2, r0,r3); /* idx=r1 */ \
203 do_col (table, r5,r2,r1,r4, r0,r3); /* idx=r5 */
213 do_icol(table, r0,r1,r4,r5, r2,r3, arg); /* idx=r2 */ \
214 do_col (table, r4,r5,r0,r1, r2,r3); /* idx=r4 */ \
216 do_col (table, r1,r4,r5,r0, r2,r3); /* idx=r1 */ \
218 do_col (table, r5,r0,r1,r4, r2,r3); /* idx=r5 */
/linux-4.4.14/include/xen/
H A Dhvm.h38 static inline int hvm_get_parameter(int idx, uint64_t *value) hvm_get_parameter() argument
44 xhv.index = idx; hvm_get_parameter()
48 param_name(idx), idx, r); hvm_get_parameter()
/linux-4.4.14/arch/arc/kernel/
H A Dperf_event.c84 /* read counter #idx; note that counter# != event# on ARC! */ arc_pmu_read_counter()
85 static uint64_t arc_pmu_read_counter(int idx) arc_pmu_read_counter() argument
94 write_aux_reg(ARC_REG_PCT_INDEX, idx); arc_pmu_read_counter()
104 struct hw_perf_event *hwc, int idx) arc_perf_event_update()
107 uint64_t new_raw_count = arc_pmu_read_counter(idx); arc_perf_event_update()
121 arc_perf_event_update(event, &event->hw, event->hw.idx); arc_pmu_read()
219 int idx = hwc->idx; arc_pmu_event_set_period() local
244 write_aux_reg(ARC_REG_PCT_INDEX, idx); arc_pmu_event_set_period()
263 int idx = hwc->idx; arc_pmu_start() local
265 if (WARN_ON_ONCE(idx == -1)) arc_pmu_start()
278 read_aux_reg(ARC_REG_PCT_INT_CTRL) | (1 << idx)); arc_pmu_start()
281 write_aux_reg(ARC_REG_PCT_INDEX, idx); /* counter # */ arc_pmu_start()
288 int idx = hwc->idx; arc_pmu_stop() local
296 write_aux_reg(ARC_REG_PCT_INT_ACT, 1 << idx); arc_pmu_stop()
298 read_aux_reg(ARC_REG_PCT_INT_CTRL) & ~(1 << idx)); arc_pmu_stop()
303 write_aux_reg(ARC_REG_PCT_INDEX, idx); arc_pmu_stop()
313 arc_perf_event_update(event, &event->hw, idx); arc_pmu_stop()
323 __clear_bit(event->hw.idx, pmu_cpu->used_mask); arc_pmu_del()
325 pmu_cpu->act_counter[event->hw.idx] = 0; arc_pmu_del()
335 int idx = hwc->idx; arc_pmu_add() local
337 if (__test_and_set_bit(idx, pmu_cpu->used_mask)) { arc_pmu_add()
338 idx = find_first_zero_bit(pmu_cpu->used_mask, arc_pmu_add()
340 if (idx == arc_pmu->n_counters) arc_pmu_add()
343 __set_bit(idx, pmu_cpu->used_mask); arc_pmu_add()
344 hwc->idx = idx; arc_pmu_add()
347 write_aux_reg(ARC_REG_PCT_INDEX, idx); arc_pmu_add()
349 pmu_cpu->act_counter[idx] = event; arc_pmu_add()
379 int idx; arc_pmu_intr() local
387 for (idx = 0; idx < arc_pmu->n_counters; idx++) { arc_pmu_intr()
388 struct perf_event *event = pmu_cpu->act_counter[idx]; arc_pmu_intr()
391 if (!(active_ints & (1 << idx))) arc_pmu_intr()
395 write_aux_reg(ARC_REG_PCT_INT_ACT, 1 << idx); arc_pmu_intr()
403 read_aux_reg(ARC_REG_PCT_INT_CTRL) | (1 << idx)); arc_pmu_intr()
407 WARN_ON_ONCE(hwc->idx != idx); arc_pmu_intr()
409 arc_perf_event_update(event, &event->hw, event->hw.idx); arc_pmu_intr()
497 pr_debug("mapping perf event %2d to h/w event \'%8s\' (idx %d)\n", arc_pmu_device_probe()
103 arc_perf_event_update(struct perf_event *event, struct hw_perf_event *hwc, int idx) arc_perf_event_update() argument
/linux-4.4.14/include/linux/
H A Dhugetlb_cgroup.h54 extern int hugetlb_cgroup_charge_cgroup(int idx, unsigned long nr_pages,
56 extern void hugetlb_cgroup_commit_charge(int idx, unsigned long nr_pages,
59 extern void hugetlb_cgroup_uncharge_page(int idx, unsigned long nr_pages,
61 extern void hugetlb_cgroup_uncharge_cgroup(int idx, unsigned long nr_pages,
85 hugetlb_cgroup_charge_cgroup(int idx, unsigned long nr_pages, hugetlb_cgroup_charge_cgroup() argument
92 hugetlb_cgroup_commit_charge(int idx, unsigned long nr_pages, hugetlb_cgroup_commit_charge() argument
100 hugetlb_cgroup_uncharge_page(int idx, unsigned long nr_pages, struct page *page) hugetlb_cgroup_uncharge_page() argument
106 hugetlb_cgroup_uncharge_cgroup(int idx, unsigned long nr_pages, hugetlb_cgroup_uncharge_cgroup() argument
H A Dhtirq.h20 unsigned idx; member in struct:ht_irq_cfg
31 int arch_setup_ht_irq(int idx, int pos, struct pci_dev *dev,
36 int __ht_create_irq(struct pci_dev *dev, int idx, ht_irq_update_t *update);
H A Dperf_regs.h11 u64 perf_reg_value(struct pt_regs *regs, int idx);
18 static inline u64 perf_reg_value(struct pt_regs *regs, int idx) perf_reg_value() argument
/linux-4.4.14/drivers/input/touchscreen/
H A Dad7879-i2c.c33 u8 idx; ad7879_i2c_multi_read() local
37 for (idx = 0; idx < count; ++idx) ad7879_i2c_multi_read()
38 buf[idx] = swab16(buf[idx]); ad7879_i2c_multi_read()
H A Dad7879-spi.c38 u8 idx; ad7879_spi_xfer() local
63 for (idx = 0; idx < count; ++idx) { ad7879_spi_xfer()
65 xfers[idx].rx_buf = &rx_buf[idx]; ad7879_spi_xfer()
67 xfers[idx].tx_buf = &tx_buf[idx]; ad7879_spi_xfer()
68 xfers[idx].len = 2; ad7879_spi_xfer()
69 spi_message_add_tail(&xfers[idx], &msg); ad7879_spi_xfer()
H A Delo.c61 int idx; member in struct:elo
73 elo->data[elo->idx] = data; elo_process_data_10()
75 switch (elo->idx++) { elo_process_data_10()
81 elo->idx = 0; elo_process_data_10()
86 elo->idx = 0; elo_process_data_10()
125 elo->data[elo->idx] = data; elo_process_data_6()
127 switch (elo->idx++) { elo_process_data_6()
131 elo->idx = 0; elo_process_data_6()
136 elo->idx = 0; elo_process_data_6()
141 elo->idx = 0; elo_process_data_6()
146 elo->idx = 0; elo_process_data_6()
156 elo->idx = 0; elo_process_data_6()
164 elo->idx = 0; elo_process_data_6()
174 elo->idx = 0; elo_process_data_6()
183 elo->data[elo->idx] = data; elo_process_data_3()
185 switch (elo->idx++) { elo_process_data_3()
189 elo->idx = 0; elo_process_data_3()
196 elo->idx = 0; elo_process_data_3()
H A Dtsc40.c19 u32 idx; member in struct:tsc_ser
47 ptsc->data[ptsc->idx] = data; tsc_interrupt()
48 switch (ptsc->idx++) { tsc_interrupt()
53 ptsc->idx = 0; tsc_interrupt()
57 ptsc->idx = 0; tsc_interrupt()
66 data, ptsc->idx - 1); tsc_interrupt()
67 ptsc->idx = 0; tsc_interrupt()
73 ptsc->idx = 0; tsc_interrupt()
/linux-4.4.14/sound/pci/hda/
H A Dhda_controller_trace.h21 __field( int, idx )
27 __entry->idx = (dev)->core.index;
31 TP_printk("[%d:%d] cmd=%d", __entry->card, __entry->idx, __entry->cmd)
42 __field( int, idx )
49 __entry->idx = (dev)->core.index;
54 TP_printk("[%d:%d] pos=%u, delay=%u", __entry->card, __entry->idx, __entry->pos, __entry->delay)
/linux-4.4.14/drivers/hwmon/
H A Dmenf21bmc_hwmon.c35 #define IDX_TO_VOLT_MIN_CMD(idx) (0x40 + idx)
36 #define IDX_TO_VOLT_MAX_CMD(idx) (0x50 + idx)
37 #define IDX_TO_VOLT_INP_CMD(idx) (0x60 + idx)
141 #define create_voltage_sysfs(idx) \
142 static SENSOR_DEVICE_ATTR(in##idx##_input, S_IRUGO, \
143 show_in, NULL, idx); \
144 static SENSOR_DEVICE_ATTR(in##idx##_min, S_IRUGO, \
145 show_min, NULL, idx); \
146 static SENSOR_DEVICE_ATTR(in##idx##_max, S_IRUGO, \
147 show_max, NULL, idx); \
148 static SENSOR_DEVICE_ATTR(in##idx##_label, S_IRUGO, \
149 show_label, NULL, idx);
/linux-4.4.14/arch/score/mm/
H A Dtlb-score.c99 int idx; local_flush_tlb_range() local
105 idx = tlbpt_get(); local_flush_tlb_range()
108 if (idx < 0) local_flush_tlb_range()
138 long idx; local_flush_tlb_kernel_range() local
143 idx = tlbpt_get(); local_flush_tlb_kernel_range()
144 if (idx < 0) local_flush_tlb_kernel_range()
163 int oldpid, newpid, idx; local_flush_tlb_page() local
173 idx = tlbpt_get(); local_flush_tlb_page()
176 if (idx < 0) /* p_bit(31) - 1: miss, 0: hit*/ local_flush_tlb_page()
193 int oldpid, idx; local_flush_tlb_one() local
201 idx = tlbpt_get(); local_flush_tlb_one()
203 if (idx >= 0) { local_flush_tlb_one()
216 int idx, pid; __update_tlb() local
231 idx = tlbpt_get(); __update_tlb()
234 if (idx < 0) __update_tlb()
/linux-4.4.14/drivers/clk/
H A Dclk-scpi.c76 int idx; __scpi_dvfs_round_rate() local
80 for (idx = 0; idx < clk->info->count; idx++, opp++) { __scpi_dvfs_round_rate()
97 int idx = clk->scpi_ops->dvfs_get_idx(clk->id); scpi_dvfs_recalc_rate() local
100 if (idx < 0) scpi_dvfs_recalc_rate()
103 opp = clk->info->opps + idx; scpi_dvfs_recalc_rate()
117 int idx, max_opp = clk->info->count; __scpi_find_dvfs_index() local
120 for (idx = 0; idx < max_opp; idx++, opp++) __scpi_find_dvfs_index()
122 return idx; __scpi_find_dvfs_index()
191 unsigned int idx = clkspec->args[0], count; scpi_of_clk_src_get() local
195 if (idx == sclk->id) scpi_of_clk_src_get()
206 int idx, count; scpi_clk_add() local
229 for (idx = 0; idx < count; idx++) { scpi_clk_add()
239 idx, &name)) { scpi_clk_add()
245 idx, &val)) { scpi_clk_add()
252 clks[idx] = scpi_clk_ops_init(dev, match, sclk, name); scpi_clk_add()
253 if (IS_ERR_OR_NULL(clks[idx])) scpi_clk_add()
257 clk_data->clk[idx] = sclk; scpi_clk_add()
/linux-4.4.14/arch/sh/kernel/
H A Dperf_event.c180 struct hw_perf_event *hwc, int idx) sh_perf_event_update()
200 new_raw_count = sh_pmu->read(idx); sh_perf_event_update()
224 int idx = hwc->idx; sh_pmu_stop() local
227 sh_pmu->disable(hwc, idx); sh_pmu_stop()
228 cpuc->events[idx] = NULL; sh_pmu_stop()
233 sh_perf_event_update(event, &event->hw, idx); sh_pmu_stop()
242 int idx = hwc->idx; sh_pmu_start() local
244 if (WARN_ON_ONCE(idx == -1)) sh_pmu_start()
250 cpuc->events[idx] = event; sh_pmu_start()
252 sh_pmu->enable(hwc, idx); sh_pmu_start()
260 __clear_bit(event->hw.idx, cpuc->used_mask); sh_pmu_del()
269 int idx = hwc->idx; sh_pmu_add() local
274 if (__test_and_set_bit(idx, cpuc->used_mask)) { sh_pmu_add()
275 idx = find_first_zero_bit(cpuc->used_mask, sh_pmu->num_events); sh_pmu_add()
276 if (idx == sh_pmu->num_events) sh_pmu_add()
279 __set_bit(idx, cpuc->used_mask); sh_pmu_add()
280 hwc->idx = idx; sh_pmu_add()
283 sh_pmu->disable(hwc, idx); sh_pmu_add()
298 sh_perf_event_update(event, &event->hw, event->hw.idx); sh_pmu_read()
179 sh_perf_event_update(struct perf_event *event, struct hw_perf_event *hwc, int idx) sh_perf_event_update() argument
/linux-4.4.14/arch/xtensa/kernel/
H A Dperf_event.c129 static inline uint32_t xtensa_pmu_read_counter(int idx) xtensa_pmu_read_counter() argument
131 return get_er(XTENSA_PMU_PM(idx)); xtensa_pmu_read_counter()
134 static inline void xtensa_pmu_write_counter(int idx, uint32_t v) xtensa_pmu_write_counter() argument
136 set_er(v, XTENSA_PMU_PM(idx)); xtensa_pmu_write_counter()
140 struct hw_perf_event *hwc, int idx) xtensa_perf_event_update()
147 new_raw_count = xtensa_pmu_read_counter(event->hw.idx); xtensa_perf_event_update()
158 struct hw_perf_event *hwc, int idx) xtensa_perf_event_set_period()
185 xtensa_pmu_write_counter(idx, -left); xtensa_perf_event_set_period()
246 int idx = hwc->idx; xtensa_pmu_start() local
248 if (WARN_ON_ONCE(idx == -1)) xtensa_pmu_start()
253 xtensa_perf_event_set_period(event, hwc, idx); xtensa_pmu_start()
258 set_er(hwc->config, XTENSA_PMU_PMCTRL(idx)); xtensa_pmu_start()
264 int idx = hwc->idx; xtensa_pmu_stop() local
267 set_er(0, XTENSA_PMU_PMCTRL(idx)); xtensa_pmu_stop()
268 set_er(get_er(XTENSA_PMU_PMSTAT(idx)), xtensa_pmu_stop()
269 XTENSA_PMU_PMSTAT(idx)); xtensa_pmu_stop()
275 xtensa_perf_event_update(event, &event->hw, idx); xtensa_pmu_stop()
288 int idx = hwc->idx; xtensa_pmu_add() local
290 if (__test_and_set_bit(idx, ev->used_mask)) { xtensa_pmu_add()
291 idx = find_first_zero_bit(ev->used_mask, xtensa_pmu_add()
293 if (idx == XCHAL_NUM_PERF_COUNTERS) xtensa_pmu_add()
296 __set_bit(idx, ev->used_mask); xtensa_pmu_add()
297 hwc->idx = idx; xtensa_pmu_add()
299 ev->event[idx] = event; xtensa_pmu_add()
315 __clear_bit(event->hw.idx, ev->used_mask); xtensa_pmu_del()
321 xtensa_perf_event_update(event, &event->hw, event->hw.idx); xtensa_pmu_read()
139 xtensa_perf_event_update(struct perf_event *event, struct hw_perf_event *hwc, int idx) xtensa_perf_event_update() argument
157 xtensa_perf_event_set_period(struct perf_event *event, struct hw_perf_event *hwc, int idx) xtensa_perf_event_set_period() argument
/linux-4.4.14/arch/blackfin/kernel/
H A Dperf_event.c190 static u64 bfin_pfmon_read(int idx) bfin_pfmon_read() argument
192 return bfin_read32(PFCNTR0 + (idx * 4)); bfin_pfmon_read()
195 static void bfin_pfmon_disable(struct hw_perf_event *hwc, int idx) bfin_pfmon_disable() argument
197 bfin_write_PFCTL(bfin_read_PFCTL() & ~PFCEN(idx, PFCEN_MASK)); bfin_pfmon_disable()
200 static void bfin_pfmon_enable(struct hw_perf_event *hwc, int idx) bfin_pfmon_enable() argument
205 if (idx) { bfin_pfmon_enable()
261 struct hw_perf_event *hwc, int idx) bfin_perf_event_update()
281 new_raw_count = bfin_pfmon_read(idx); bfin_perf_event_update()
305 int idx = hwc->idx; bfin_pmu_stop() local
308 bfin_pfmon_disable(hwc, idx); bfin_pmu_stop()
309 cpuc->events[idx] = NULL; bfin_pmu_stop()
314 bfin_perf_event_update(event, &event->hw, idx); bfin_pmu_stop()
323 int idx = hwc->idx; bfin_pmu_start() local
325 if (WARN_ON_ONCE(idx == -1)) bfin_pmu_start()
331 cpuc->events[idx] = event; bfin_pmu_start()
333 bfin_pfmon_enable(hwc, idx); bfin_pmu_start()
341 __clear_bit(event->hw.idx, cpuc->used_mask); bfin_pmu_del()
350 int idx = hwc->idx; bfin_pmu_add() local
355 if (__test_and_set_bit(idx, cpuc->used_mask)) { bfin_pmu_add()
356 idx = find_first_zero_bit(cpuc->used_mask, MAX_HWEVENTS); bfin_pmu_add()
357 if (idx == MAX_HWEVENTS) bfin_pmu_add()
360 __set_bit(idx, cpuc->used_mask); bfin_pmu_add()
361 hwc->idx = idx; bfin_pmu_add()
364 bfin_pfmon_disable(hwc, idx); bfin_pmu_add()
379 bfin_perf_event_update(event, &event->hw, event->hw.idx); bfin_pmu_read()
434 bfin_pfmon_enable(hwc, hwc->idx); bfin_pmu_enable()
260 bfin_perf_event_update(struct perf_event *event, struct hw_perf_event *hwc, int idx) bfin_perf_event_update() argument
/linux-4.4.14/drivers/pci/
H A Dhtirq.c33 pci_write_config_byte(cfg->dev, cfg->pos + 2, cfg->idx); write_ht_irq_msg()
37 pci_write_config_byte(cfg->dev, cfg->pos + 2, cfg->idx + 1); write_ht_irq_msg()
74 * @idx: Which of the possible irqs to attach to.
79 int __ht_create_irq(struct pci_dev *dev, int idx, ht_irq_update_t *update) __ht_create_irq() argument
89 /* Verify the idx I want to use is in range */ __ht_create_irq()
96 if (idx > max_irq) __ht_create_irq()
99 irq = arch_setup_ht_irq(idx, pos, dev, update); __ht_create_irq()
110 * @idx: Which of the possible irqs to attach to.
117 int ht_create_irq(struct pci_dev *dev, int idx) ht_create_irq() argument
119 return __ht_create_irq(dev, idx, NULL); ht_create_irq()
/linux-4.4.14/drivers/clk/mxs/
H A Dclk-ref.c22 * @idx: the index of the reference clock within the same register
25 * one register space, and @idx is used to identify them. Each reference
32 u8 idx; member in struct:clk_ref
41 writel_relaxed(1 << ((ref->idx + 1) * 8 - 1), ref->reg + CLR); clk_ref_enable()
50 writel_relaxed(1 << ((ref->idx + 1) * 8 - 1), ref->reg + SET); clk_ref_disable()
58 u8 frac = (readl_relaxed(ref->reg) >> (ref->idx * 8)) & 0x3f; clk_ref_recalc_rate()
96 u8 frac, shift = ref->idx * 8; clk_ref_set_rate()
128 void __iomem *reg, u8 idx) mxs_clk_ref()
145 ref->idx = idx; mxs_clk_ref()
127 mxs_clk_ref(const char *name, const char *parent_name, void __iomem *reg, u8 idx) mxs_clk_ref() argument
/linux-4.4.14/arch/s390/kernel/
H A Dcache.c71 int idx; show_cacheinfo() local
77 for (idx = 0; idx < this_cpu_ci->num_leaves; idx++) { show_cacheinfo()
78 cache = this_cpu_ci->info_list + idx; show_cacheinfo()
79 seq_printf(m, "cache%-11d: ", idx); show_cacheinfo()
162 unsigned int level, idx, pvt; populate_cache_leaves() local
169 for (idx = 0, level = 0; level < this_cpu_ci->num_levels && populate_cache_leaves()
170 idx < this_cpu_ci->num_leaves; idx++, level++) { populate_cache_leaves()
/linux-4.4.14/arch/mips/include/asm/
H A Dmsa.h23 extern void read_msa_wr_b(unsigned idx, union fpureg *to);
24 extern void read_msa_wr_h(unsigned idx, union fpureg *to);
25 extern void read_msa_wr_w(unsigned idx, union fpureg *to);
26 extern void read_msa_wr_d(unsigned idx, union fpureg *to);
30 * @idx: The index of the vector register to read
34 * Read the value of MSA vector register idx into the FPU register
37 static inline void read_msa_wr(unsigned idx, union fpureg *to, read_msa_wr() argument
42 read_msa_wr_b(idx, to); read_msa_wr()
46 read_msa_wr_h(idx, to); read_msa_wr()
50 read_msa_wr_w(idx, to); read_msa_wr()
54 read_msa_wr_d(idx, to); read_msa_wr()
62 extern void write_msa_wr_b(unsigned idx, union fpureg *from);
63 extern void write_msa_wr_h(unsigned idx, union fpureg *from);
64 extern void write_msa_wr_w(unsigned idx, union fpureg *from);
65 extern void write_msa_wr_d(unsigned idx, union fpureg *from);
69 * @idx: The index of the vector register to write
74 * register idx, using the format fmt.
76 static inline void write_msa_wr(unsigned idx, union fpureg *from, write_msa_wr() argument
81 write_msa_wr_b(idx, from); write_msa_wr()
85 write_msa_wr_h(idx, from); write_msa_wr()
89 write_msa_wr_w(idx, from); write_msa_wr()
93 write_msa_wr_d(idx, from); write_msa_wr()
H A Dtlb.h21 #define UNIQUE_ENTRYHI(idx) \
22 ((CKSEG0 + ((idx) << (PAGE_SHIFT + 1))) | \
/linux-4.4.14/sound/pci/ice1712/
H A Dwtm.c82 int id, idx, change; stac9460_dac_mute_all() local
88 idx = STAC946X_MASTER_VOLUME; stac9460_dac_mute_all()
90 idx = STAC946X_LF_VOLUME - 1 + id; stac9460_dac_mute_all()
91 old = stac9460_get(ice, idx); stac9460_dac_mute_all()
95 stac9460_put(ice, idx, new); stac9460_dac_mute_all()
107 idx = STAC946X_MASTER_VOLUME; stac9460_dac_mute_all()
109 idx = STAC946X_LF_VOLUME - 1 + id; stac9460_dac_mute_all()
110 old = stac9460_2_get(ice, idx); stac9460_dac_mute_all()
114 stac9460_2_put(ice, idx, new); stac9460_dac_mute_all()
133 int idx, id; stac9460_dac_mute_get() local
138 idx = STAC946X_MASTER_VOLUME; stac9460_dac_mute_get()
142 idx = id + STAC946X_LF_VOLUME; stac9460_dac_mute_get()
145 val = stac9460_get(ice, idx); stac9460_dac_mute_get()
147 val = stac9460_2_get(ice, idx - 6); stac9460_dac_mute_get()
159 int id, idx; stac9460_dac_mute_put() local
163 idx = STAC946X_MASTER_VOLUME; stac9460_dac_mute_put()
164 old = stac9460_get(ice, idx); stac9460_dac_mute_put()
169 stac9460_put(ice, idx, new); stac9460_dac_mute_put()
170 stac9460_2_put(ice, idx, new); stac9460_dac_mute_put()
174 idx = id + STAC946X_LF_VOLUME; stac9460_dac_mute_put()
176 old = stac9460_get(ice, idx); stac9460_dac_mute_put()
178 old = stac9460_2_get(ice, idx - 6); stac9460_dac_mute_put()
184 stac9460_put(ice, idx, new); stac9460_dac_mute_put()
186 stac9460_2_put(ice, idx - 6, new); stac9460_dac_mute_put()
209 int idx, id; stac9460_dac_vol_get() local
213 idx = STAC946X_MASTER_VOLUME; stac9460_dac_vol_get()
217 idx = id + STAC946X_LF_VOLUME; stac9460_dac_vol_get()
220 vol = stac9460_get(ice, idx) & 0x7f; stac9460_dac_vol_get()
222 vol = stac9460_2_get(ice, idx - 6) & 0x7f; stac9460_dac_vol_get()
231 int idx, id; stac9460_dac_vol_put() local
236 idx = STAC946X_MASTER_VOLUME; stac9460_dac_vol_put()
238 tmp = stac9460_get(ice, idx); stac9460_dac_vol_put()
242 stac9460_put(ice, idx, (0x7f - nvol) | (tmp & 0x80)); stac9460_dac_vol_put()
243 stac9460_2_put(ice, idx, (0x7f - nvol) | (tmp & 0x80)); stac9460_dac_vol_put()
247 idx = id + STAC946X_LF_VOLUME; stac9460_dac_vol_put()
250 tmp = stac9460_get(ice, idx); stac9460_dac_vol_put()
252 tmp = stac9460_2_get(ice, idx - 6); stac9460_dac_vol_put()
257 stac9460_put(ice, idx, (0x7f - nvol) | stac9460_dac_vol_put()
260 stac9460_2_put(ice, idx-6, (0x7f - nvol) | stac9460_dac_vol_put()
H A Dprodigy192.c90 * idx = STAC9460 volume register number, mute: 0 = mute, 1 = unmute
92 static int stac9460_dac_mute(struct snd_ice1712 *ice, int idx, stac9460_dac_mute() argument
97 old = stac9460_get(ice, idx); stac9460_dac_mute()
101 /* dev_dbg(ice->card->dev, "Volume register 0x%02x: 0x%02x\n", idx, new);*/ stac9460_dac_mute()
102 stac9460_put(ice, idx, new); stac9460_dac_mute()
112 int idx; stac9460_dac_mute_get() local
115 idx = STAC946X_MASTER_VOLUME; stac9460_dac_mute_get()
117 idx = snd_ctl_get_ioffidx(kcontrol, &ucontrol->id) + STAC946X_LF_VOLUME; stac9460_dac_mute_get()
118 val = stac9460_get(ice, idx); stac9460_dac_mute_get()
127 int idx, change; stac9460_dac_mute_put() local
130 idx = STAC946X_MASTER_VOLUME; stac9460_dac_mute_put()
132 idx = snd_ctl_get_ioffidx(kcontrol, &ucontrol->id) + STAC946X_LF_VOLUME; stac9460_dac_mute_put()
136 dev_dbg(ice->card->dev, "Mute put: reg 0x%02x, ctrl value: 0x%02x\n", idx, stac9460_dac_mute_put()
139 change = stac9460_dac_mute(ice, idx, ucontrol->value.integer.value[0]); stac9460_dac_mute_put()
159 int idx; stac9460_dac_vol_get() local
163 idx = STAC946X_MASTER_VOLUME; stac9460_dac_vol_get()
165 idx = snd_ctl_get_ioffidx(kcontrol, &ucontrol->id) + STAC946X_LF_VOLUME; stac9460_dac_vol_get()
166 vol = stac9460_get(ice, idx) & 0x7f; stac9460_dac_vol_get()
175 int idx; stac9460_dac_vol_put() local
180 idx = STAC946X_MASTER_VOLUME; stac9460_dac_vol_put()
182 idx = snd_ctl_get_ioffidx(kcontrol, &ucontrol->id) + STAC946X_LF_VOLUME; stac9460_dac_vol_put()
184 tmp = stac9460_get(ice, idx); stac9460_dac_vol_put()
191 idx, ovol); stac9460_dac_vol_put()
193 stac9460_put(ice, idx, (0x7f - nvol) | (tmp & 0x80)); stac9460_dac_vol_put()
321 int idx; stac9460_set_rate_val() local
340 for (idx = 0; idx < 7 ; ++idx) stac9460_set_rate_val()
341 changed[idx] = stac9460_dac_mute(ice, stac9460_set_rate_val()
342 STAC946X_MASTER_VOLUME + idx, 0); stac9460_set_rate_val()
348 for (idx = 0; idx < 7 ; ++idx) { stac9460_set_rate_val()
349 if (changed[idx]) stac9460_set_rate_val()
350 stac9460_dac_mute(ice, STAC946X_MASTER_VOLUME + idx, 1); stac9460_set_rate_val()
447 unsigned int data, int idx) write_data()
449 for (; idx >= 0; idx--) { write_data()
455 if (data & (1 << idx)) write_data()
472 int idx) read_data()
476 for (; idx >= 0; idx--) { read_data()
483 data |= (1 << idx); read_data()
446 write_data(struct snd_ice1712 *ice, unsigned int gpio, unsigned int data, int idx) write_data() argument
471 read_data(struct snd_ice1712 *ice, unsigned int gpio, int idx) read_data() argument
/linux-4.4.14/arch/metag/kernel/perf/
H A Dperf_event.c190 struct hw_perf_event *hwc, int idx) metag_pmu_event_update()
206 new_raw_count = metag_pmu->read(idx); metag_pmu_event_update()
222 struct hw_perf_event *hwc, int idx) metag_pmu_event_set_period()
251 metag_pmu->write(idx, -left & MAX_PERIOD); metag_pmu_event_set_period()
263 int idx = hwc->idx; metag_pmu_start() local
265 if (WARN_ON_ONCE(idx == -1)) metag_pmu_start()
287 metag_pmu_event_set_period(event, hwc, hwc->idx); metag_pmu_start()
288 cpuc->events[idx] = event; metag_pmu_start()
289 metag_pmu->enable(hwc, idx); metag_pmu_start()
301 metag_pmu_event_update(event, hwc, hwc->idx); metag_pmu_stop()
302 metag_pmu->disable(hwc, hwc->idx); metag_pmu_stop()
311 int idx = 0, ret = 0; metag_pmu_add() local
322 idx = METAG_INST_COUNTER; metag_pmu_add()
325 idx = find_first_zero_bit(cpuc->used_mask, metag_pmu_add()
327 if (idx >= METAG_INST_COUNTER) { metag_pmu_add()
332 __set_bit(idx, cpuc->used_mask); metag_pmu_add()
334 hwc->idx = idx; metag_pmu_add()
337 metag_pmu->disable(hwc, idx); metag_pmu_add()
353 int idx = hwc->idx; metag_pmu_del() local
355 WARN_ON(idx < 0); metag_pmu_del()
357 cpuc->events[idx] = NULL; metag_pmu_del()
358 __clear_bit(idx, cpuc->used_mask); metag_pmu_del()
368 if (hwc->idx < 0) metag_pmu_read()
371 metag_pmu_event_update(event, hwc, hwc->idx); metag_pmu_read()
576 hwc->idx = -1; _hw_perf_event_init()
598 static void metag_pmu_enable_counter(struct hw_perf_event *event, int idx) metag_pmu_enable_counter() argument
611 if (METAG_INST_COUNTER == idx) { metag_pmu_enable_counter()
614 config, idx); metag_pmu_enable_counter()
629 perf_addr = (void *)PERF_ICORE(idx); metag_pmu_enable_counter()
633 perf_addr = (void *)PERF_CHAN(idx); metag_pmu_enable_counter()
658 tmp |= metag_in32(PERF_COUNT(idx)) & 0x00ffffff; metag_pmu_enable_counter()
666 metag_out32(tmp, PERF_COUNT(idx)); metag_pmu_enable_counter()
671 static void metag_pmu_disable_counter(struct hw_perf_event *event, int idx) metag_pmu_disable_counter() argument
682 if (METAG_INST_COUNTER == idx) metag_pmu_disable_counter()
698 tmp = metag_in32(PERF_COUNT(idx)); metag_pmu_disable_counter()
700 metag_out32(tmp, PERF_COUNT(idx)); metag_pmu_disable_counter()
705 static u64 metag_pmu_read_counter(int idx) metag_pmu_read_counter() argument
709 if (METAG_INST_COUNTER == idx) { metag_pmu_read_counter()
714 tmp = metag_in32(PERF_COUNT(idx)) & 0x00ffffff; metag_pmu_read_counter()
719 static void metag_pmu_write_counter(int idx, u32 val) metag_pmu_write_counter() argument
729 if (METAG_INST_COUNTER == idx) metag_pmu_write_counter()
739 tmp = metag_in32(PERF_COUNT(idx)) & 0xff000000; metag_pmu_write_counter()
741 metag_out32(val, PERF_COUNT(idx)); metag_pmu_write_counter()
746 static int metag_pmu_event_map(int idx) metag_pmu_event_map() argument
748 return metag_general_events[idx]; metag_pmu_event_map()
753 int idx = (int)dev; metag_pmu_counter_overflow() local
755 struct perf_event *event = cpuhw->events[idx]; metag_pmu_counter_overflow()
768 counter = metag_in32(PERF_COUNT(idx)); metag_pmu_counter_overflow()
769 metag_out32((counter & 0x00ffffff), PERF_COUNT(idx)); metag_pmu_counter_overflow()
773 metag_pmu_event_update(event, hwc, idx); metag_pmu_counter_overflow()
775 metag_pmu_event_set_period(event, hwc, idx); metag_pmu_counter_overflow()
785 (metag_in32(PERF_COUNT(idx)) & 0x00ffffff); metag_pmu_counter_overflow()
786 metag_out32(counter, PERF_COUNT(idx)); metag_pmu_counter_overflow()
189 metag_pmu_event_update(struct perf_event *event, struct hw_perf_event *hwc, int idx) metag_pmu_event_update() argument
221 metag_pmu_event_set_period(struct perf_event *event, struct hw_perf_event *hwc, int idx) metag_pmu_event_set_period() argument
/linux-4.4.14/arch/sparc/mm/
H A Dhighmem.c54 long idx, type; kmap_atomic() local
62 idx = type + KM_TYPE_NR*smp_processor_id(); kmap_atomic()
63 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic()
73 BUG_ON(!pte_none(*(kmap_pte-idx))); kmap_atomic()
75 set_pte(kmap_pte-idx, mk_pte(page, kmap_prot)); kmap_atomic()
102 unsigned long idx; __kunmap_atomic() local
104 idx = type + KM_TYPE_NR * smp_processor_id(); __kunmap_atomic()
105 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN+idx)); __kunmap_atomic()
118 pte_clear(&init_mm, vaddr, kmap_pte-idx); __kunmap_atomic()
/linux-4.4.14/arch/x86/mm/
H A Dhighmem_32.c36 int idx, type; kmap_atomic_prot() local
45 idx = type + KM_TYPE_NR*smp_processor_id(); kmap_atomic_prot()
46 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic_prot()
47 BUG_ON(!pte_none(*(kmap_pte-idx))); kmap_atomic_prot()
48 set_pte(kmap_pte-idx, mk_pte(page, prot)); kmap_atomic_prot()
77 int idx, type; __kunmap_atomic() local
80 idx = type + KM_TYPE_NR * smp_processor_id(); __kunmap_atomic()
83 WARN_ON_ONCE(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); __kunmap_atomic()
91 kpte_clear_flush(kmap_pte-idx, vaddr); __kunmap_atomic()
H A Diomap_32.c60 int idx, type; kmap_atomic_prot_pfn() local
66 idx = type + KM_TYPE_NR * smp_processor_id(); kmap_atomic_prot_pfn()
67 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic_prot_pfn()
68 set_pte(kmap_pte - idx, pfn_pte(pfn, prot)); kmap_atomic_prot_pfn()
102 int idx, type; iounmap_atomic() local
105 idx = type + KM_TYPE_NR * smp_processor_id(); iounmap_atomic()
108 WARN_ON_ONCE(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); iounmap_atomic()
116 kpte_clear_flush(kmap_pte-idx, vaddr); iounmap_atomic()
/linux-4.4.14/arch/mn10300/include/asm/
H A Dhighmem.h76 int idx, type; kmap_atomic() local
84 idx = type + KM_TYPE_NR * smp_processor_id(); kmap_atomic()
85 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); kmap_atomic()
87 if (!pte_none(*(kmap_pte - idx))) kmap_atomic()
90 set_pte(kmap_pte - idx, mk_pte(page, kmap_prot)); kmap_atomic()
110 unsigned int idx; __kunmap_atomic() local
111 idx = type + KM_TYPE_NR * smp_processor_id(); __kunmap_atomic()
113 if (vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)) __kunmap_atomic()
120 pte_clear(kmap_pte - idx); __kunmap_atomic()
/linux-4.4.14/drivers/net/ethernet/ibm/emac/
H A Drgmii.c39 #define RGMII_FER_MASK(idx) (0x7 << ((idx) * 4))
40 #define RGMII_FER_RTBI(idx) (0x4 << ((idx) * 4))
41 #define RGMII_FER_RGMII(idx) (0x5 << ((idx) * 4))
42 #define RGMII_FER_TBI(idx) (0x6 << ((idx) * 4))
43 #define RGMII_FER_GMII(idx) (0x7 << ((idx) * 4))
44 #define RGMII_FER_MII(idx) RGMII_FER_GMII(idx)
47 #define RGMII_SSR_MASK(idx) (0x7 << ((idx) * 8))
48 #define RGMII_SSR_10(idx) (0x1 << ((idx) * 8))
49 #define RGMII_SSR_100(idx) (0x2 << ((idx) * 8))
50 #define RGMII_SSR_1000(idx) (0x4 << ((idx) * 8))
/linux-4.4.14/arch/x86/kvm/
H A Dpmu_intel.c88 static unsigned intel_find_fixed_event(int idx) intel_find_fixed_event() argument
90 if (idx >= ARRAY_SIZE(fixed_pmc_events)) intel_find_fixed_event()
93 return intel_arch_events[fixed_pmc_events[idx]].event_type; intel_find_fixed_event()
101 return test_bit(pmc->idx, (unsigned long *)&pmu->global_ctrl); intel_pmc_is_enabled()
110 u32 idx = pmc_idx - INTEL_PMC_IDX_FIXED; intel_pmc_idx_to_pmc() local
112 return get_fixed_pmc(pmu, idx + MSR_CORE_PERF_FIXED_CTR0); intel_pmc_idx_to_pmc()
116 /* returns 0 if idx's corresponding MSR exists; otherwise returns 1. */ intel_is_valid_msr_idx()
117 static int intel_is_valid_msr_idx(struct kvm_vcpu *vcpu, unsigned idx) intel_is_valid_msr_idx() argument
120 bool fixed = idx & (1u << 30); intel_is_valid_msr_idx()
122 idx &= ~(3u << 30); intel_is_valid_msr_idx()
124 return (!fixed && idx >= pmu->nr_arch_gp_counters) || intel_is_valid_msr_idx()
125 (fixed && idx >= pmu->nr_arch_fixed_counters); intel_is_valid_msr_idx()
129 unsigned idx) intel_msr_idx_to_pmc()
132 bool fixed = idx & (1u << 30); intel_msr_idx_to_pmc()
135 idx &= ~(3u << 30); intel_msr_idx_to_pmc()
136 if (!fixed && idx >= pmu->nr_arch_gp_counters) intel_msr_idx_to_pmc()
138 if (fixed && idx >= pmu->nr_arch_fixed_counters) intel_msr_idx_to_pmc()
142 return &counters[idx]; intel_msr_idx_to_pmc()
316 pmu->gp_counters[i].idx = i; intel_pmu_init()
322 pmu->fixed_counters[i].idx = i + INTEL_PMC_IDX_FIXED; intel_pmu_init()
128 intel_msr_idx_to_pmc(struct kvm_vcpu *vcpu, unsigned idx) intel_msr_idx_to_pmc() argument
H A Dpmu.c36 * 2. MSR Index (named idx): This normally is used by RDPMC instruction.
39 * that it also supports fixed counters. idx can be used to as index to
42 * code. Each pmc, stored in kvm_pmc.idx field, is unique across
65 if (!test_and_set_bit(pmc->idx, kvm_perf_overflow()
67 __set_bit(pmc->idx, (unsigned long *)&pmu->global_status); kvm_perf_overflow()
79 if (!test_and_set_bit(pmc->idx, kvm_perf_overflow_intr()
81 __set_bit(pmc->idx, (unsigned long *)&pmu->global_status); kvm_perf_overflow_intr()
133 clear_bit(pmc->idx, (unsigned long*)&pmc_to_pmu(pmc)->reprogram_pmi); pmc_reprogram_counter()
178 void reprogram_fixed_counter(struct kvm_pmc *pmc, u8 ctrl, int idx) reprogram_fixed_counter() argument
189 kvm_x86_ops->pmu_ops->find_fixed_event(idx), reprogram_fixed_counter()
206 int idx = pmc_idx - INTEL_PMC_IDX_FIXED; reprogram_counter() local
207 u8 ctrl = fixed_ctrl_field(pmu->fixed_ctr_ctrl, idx); reprogram_counter()
209 reprogram_fixed_counter(pmc, ctrl, idx); reprogram_counter()
234 /* check if idx is a valid index to access PMU */ kvm_pmu_is_valid_msr_idx()
235 int kvm_pmu_is_valid_msr_idx(struct kvm_vcpu *vcpu, unsigned idx) kvm_pmu_is_valid_msr_idx() argument
237 return kvm_x86_ops->pmu_ops->is_valid_msr_idx(vcpu, idx); kvm_pmu_is_valid_msr_idx()
240 int kvm_pmu_rdpmc(struct kvm_vcpu *vcpu, unsigned idx, u64 *data) kvm_pmu_rdpmc() argument
242 bool fast_mode = idx & (1u << 31); kvm_pmu_rdpmc()
246 pmc = kvm_x86_ops->pmu_ops->msr_idx_to_pmc(vcpu, idx); kvm_pmu_rdpmc()
H A Dpmu_amd.c52 static unsigned amd_find_fixed_event(int idx) amd_find_fixed_event() argument
70 /* returns 0 if idx's corresponding MSR exists; otherwise returns 1. */ amd_is_valid_msr_idx()
71 static int amd_is_valid_msr_idx(struct kvm_vcpu *vcpu, unsigned idx) amd_is_valid_msr_idx() argument
75 idx &= ~(3u << 30); amd_is_valid_msr_idx()
77 return (idx >= pmu->nr_arch_gp_counters); amd_is_valid_msr_idx()
80 /* idx is the ECX register of RDPMC instruction */ amd_msr_idx_to_pmc()
81 static struct kvm_pmc *amd_msr_idx_to_pmc(struct kvm_vcpu *vcpu, unsigned idx) amd_msr_idx_to_pmc() argument
86 idx &= ~(3u << 30); amd_msr_idx_to_pmc()
87 if (idx >= pmu->nr_arch_gp_counters) amd_msr_idx_to_pmc()
91 return &counters[idx]; amd_msr_idx_to_pmc()
175 pmu->gp_counters[i].idx = i; amd_pmu_init()
/linux-4.4.14/arch/sh/kernel/cpu/sh2/
H A Dclock-sh7619.c37 int idx = (__raw_readw(FREQCR) & 0x0007); module_clk_recalc() local
38 return clk->parent->rate / pfc_divisors[idx]; module_clk_recalc()
65 void __init arch_init_clk_ops(struct sh_clk_ops **ops, int idx) arch_init_clk_ops() argument
75 if (idx < ARRAY_SIZE(sh7619_clk_ops)) arch_init_clk_ops()
76 *ops = sh7619_clk_ops[idx]; arch_init_clk_ops()
/linux-4.4.14/drivers/gpu/drm/
H A Ddrm_dp_mst_topology.c145 int idx = 0; drm_dp_encode_sideband_msg_hdr() local
148 buf[idx++] = ((hdr->lct & 0xf) << 4) | (hdr->lcr & 0xf); drm_dp_encode_sideband_msg_hdr()
150 buf[idx++] = hdr->rad[i]; drm_dp_encode_sideband_msg_hdr()
151 buf[idx++] = (hdr->broadcast << 7) | (hdr->path_msg << 6) | drm_dp_encode_sideband_msg_hdr()
153 buf[idx++] = (hdr->somt << 7) | (hdr->eomt << 6) | (hdr->seqno << 4); drm_dp_encode_sideband_msg_hdr()
155 crc4 = drm_dp_msg_header_crc4(buf, (idx * 2) - 1); drm_dp_encode_sideband_msg_hdr()
156 buf[idx - 1] |= (crc4 & 0xf); drm_dp_encode_sideband_msg_hdr()
158 *len = idx; drm_dp_encode_sideband_msg_hdr()
167 u8 idx; drm_dp_decode_sideband_msg_hdr() local
183 idx = 1; drm_dp_decode_sideband_msg_hdr()
185 hdr->rad[i] = buf[idx++]; drm_dp_decode_sideband_msg_hdr()
186 hdr->broadcast = (buf[idx] >> 7) & 0x1; drm_dp_decode_sideband_msg_hdr()
187 hdr->path_msg = (buf[idx] >> 6) & 0x1; drm_dp_decode_sideband_msg_hdr()
188 hdr->msg_len = buf[idx] & 0x3f; drm_dp_decode_sideband_msg_hdr()
189 idx++; drm_dp_decode_sideband_msg_hdr()
190 hdr->somt = (buf[idx] >> 7) & 0x1; drm_dp_decode_sideband_msg_hdr()
191 hdr->eomt = (buf[idx] >> 6) & 0x1; drm_dp_decode_sideband_msg_hdr()
192 hdr->seqno = (buf[idx] >> 4) & 0x1; drm_dp_decode_sideband_msg_hdr()
193 idx++; drm_dp_decode_sideband_msg_hdr()
194 *hdrlen = idx; drm_dp_decode_sideband_msg_hdr()
201 int idx = 0; drm_dp_encode_sideband_req() local
204 buf[idx++] = req->req_type & 0x7f; drm_dp_encode_sideband_req()
208 buf[idx] = (req->u.port_num.port_number & 0xf) << 4; drm_dp_encode_sideband_req()
209 idx++; drm_dp_encode_sideband_req()
212 buf[idx] = (req->u.allocate_payload.port_number & 0xf) << 4 | drm_dp_encode_sideband_req()
214 idx++; drm_dp_encode_sideband_req()
215 buf[idx] = (req->u.allocate_payload.vcpi & 0x7f); drm_dp_encode_sideband_req()
216 idx++; drm_dp_encode_sideband_req()
217 buf[idx] = (req->u.allocate_payload.pbn >> 8); drm_dp_encode_sideband_req()
218 idx++; drm_dp_encode_sideband_req()
219 buf[idx] = (req->u.allocate_payload.pbn & 0xff); drm_dp_encode_sideband_req()
220 idx++; drm_dp_encode_sideband_req()
222 buf[idx] = ((req->u.allocate_payload.sdp_stream_sink[i * 2] & 0xf) << 4) | drm_dp_encode_sideband_req()
224 idx++; drm_dp_encode_sideband_req()
228 buf[idx] = (req->u.allocate_payload.sdp_stream_sink[i] & 0xf) << 4; drm_dp_encode_sideband_req()
229 idx++; drm_dp_encode_sideband_req()
233 buf[idx] = (req->u.query_payload.port_number & 0xf) << 4; drm_dp_encode_sideband_req()
234 idx++; drm_dp_encode_sideband_req()
235 buf[idx] = (req->u.query_payload.vcpi & 0x7f); drm_dp_encode_sideband_req()
236 idx++; drm_dp_encode_sideband_req()
239 buf[idx] = (req->u.dpcd_read.port_number & 0xf) << 4; drm_dp_encode_sideband_req()
240 buf[idx] |= ((req->u.dpcd_read.dpcd_address & 0xf0000) >> 16) & 0xf; drm_dp_encode_sideband_req()
241 idx++; drm_dp_encode_sideband_req()
242 buf[idx] = (req->u.dpcd_read.dpcd_address & 0xff00) >> 8; drm_dp_encode_sideband_req()
243 idx++; drm_dp_encode_sideband_req()
244 buf[idx] = (req->u.dpcd_read.dpcd_address & 0xff); drm_dp_encode_sideband_req()
245 idx++; drm_dp_encode_sideband_req()
246 buf[idx] = (req->u.dpcd_read.num_bytes); drm_dp_encode_sideband_req()
247 idx++; drm_dp_encode_sideband_req()
251 buf[idx] = (req->u.dpcd_write.port_number & 0xf) << 4; drm_dp_encode_sideband_req()
252 buf[idx] |= ((req->u.dpcd_write.dpcd_address & 0xf0000) >> 16) & 0xf; drm_dp_encode_sideband_req()
253 idx++; drm_dp_encode_sideband_req()
254 buf[idx] = (req->u.dpcd_write.dpcd_address & 0xff00) >> 8; drm_dp_encode_sideband_req()
255 idx++; drm_dp_encode_sideband_req()
256 buf[idx] = (req->u.dpcd_write.dpcd_address & 0xff); drm_dp_encode_sideband_req()
257 idx++; drm_dp_encode_sideband_req()
258 buf[idx] = (req->u.dpcd_write.num_bytes); drm_dp_encode_sideband_req()
259 idx++; drm_dp_encode_sideband_req()
260 memcpy(&buf[idx], req->u.dpcd_write.bytes, req->u.dpcd_write.num_bytes); drm_dp_encode_sideband_req()
261 idx += req->u.dpcd_write.num_bytes; drm_dp_encode_sideband_req()
264 buf[idx] = (req->u.i2c_read.port_number & 0xf) << 4; drm_dp_encode_sideband_req()
265 buf[idx] |= (req->u.i2c_read.num_transactions & 0x3); drm_dp_encode_sideband_req()
266 idx++; drm_dp_encode_sideband_req()
268 buf[idx] = req->u.i2c_read.transactions[i].i2c_dev_id & 0x7f; drm_dp_encode_sideband_req()
269 idx++; drm_dp_encode_sideband_req()
270 buf[idx] = req->u.i2c_read.transactions[i].num_bytes; drm_dp_encode_sideband_req()
271 idx++; drm_dp_encode_sideband_req()
272 memcpy(&buf[idx], req->u.i2c_read.transactions[i].bytes, req->u.i2c_read.transactions[i].num_bytes); drm_dp_encode_sideband_req()
273 idx += req->u.i2c_read.transactions[i].num_bytes; drm_dp_encode_sideband_req()
275 buf[idx] = (req->u.i2c_read.transactions[i].no_stop_bit & 0x1) << 5; drm_dp_encode_sideband_req()
276 buf[idx] |= (req->u.i2c_read.transactions[i].i2c_transaction_delay & 0xf); drm_dp_encode_sideband_req()
277 idx++; drm_dp_encode_sideband_req()
279 buf[idx] = (req->u.i2c_read.read_i2c_device_id) & 0x7f; drm_dp_encode_sideband_req()
280 idx++; drm_dp_encode_sideband_req()
281 buf[idx] = (req->u.i2c_read.num_bytes_read); drm_dp_encode_sideband_req()
282 idx++; drm_dp_encode_sideband_req()
286 buf[idx] = (req->u.i2c_write.port_number & 0xf) << 4; drm_dp_encode_sideband_req()
287 idx++; drm_dp_encode_sideband_req()
288 buf[idx] = (req->u.i2c_write.write_i2c_device_id) & 0x7f; drm_dp_encode_sideband_req()
289 idx++; drm_dp_encode_sideband_req()
290 buf[idx] = (req->u.i2c_write.num_bytes); drm_dp_encode_sideband_req()
291 idx++; drm_dp_encode_sideband_req()
292 memcpy(&buf[idx], req->u.i2c_write.bytes, req->u.i2c_write.num_bytes); drm_dp_encode_sideband_req()
293 idx += req->u.i2c_write.num_bytes; drm_dp_encode_sideband_req()
296 raw->cur_len = idx; drm_dp_encode_sideband_req()
309 int idx = 0; drm_dp_encode_sideband_reply() local
312 buf[idx++] = (rep->reply_type & 0x1) << 7 | (rep->req_type & 0x7f); drm_dp_encode_sideband_reply()
314 raw->cur_len = idx; drm_dp_encode_sideband_reply()
369 int idx = 1; drm_dp_sideband_parse_link_address() local
371 memcpy(repmsg->u.link_addr.guid, &raw->msg[idx], 16); drm_dp_sideband_parse_link_address()
372 idx += 16; drm_dp_sideband_parse_link_address()
373 repmsg->u.link_addr.nports = raw->msg[idx] & 0xf; drm_dp_sideband_parse_link_address()
374 idx++; drm_dp_sideband_parse_link_address()
375 if (idx > raw->curlen) drm_dp_sideband_parse_link_address()
378 if (raw->msg[idx] & 0x80) drm_dp_sideband_parse_link_address()
381 repmsg->u.link_addr.ports[i].peer_device_type = (raw->msg[idx] >> 4) & 0x7; drm_dp_sideband_parse_link_address()
382 repmsg->u.link_addr.ports[i].port_number = (raw->msg[idx] & 0xf); drm_dp_sideband_parse_link_address()
384 idx++; drm_dp_sideband_parse_link_address()
385 if (idx > raw->curlen) drm_dp_sideband_parse_link_address()
387 repmsg->u.link_addr.ports[i].mcs = (raw->msg[idx] >> 7) & 0x1; drm_dp_sideband_parse_link_address()
388 repmsg->u.link_addr.ports[i].ddps = (raw->msg[idx] >> 6) & 0x1; drm_dp_sideband_parse_link_address()
390 repmsg->u.link_addr.ports[i].legacy_device_plug_status = (raw->msg[idx] >> 5) & 0x1; drm_dp_sideband_parse_link_address()
391 idx++; drm_dp_sideband_parse_link_address()
392 if (idx > raw->curlen) drm_dp_sideband_parse_link_address()
395 repmsg->u.link_addr.ports[i].dpcd_revision = (raw->msg[idx]); drm_dp_sideband_parse_link_address()
396 idx++; drm_dp_sideband_parse_link_address()
397 if (idx > raw->curlen) drm_dp_sideband_parse_link_address()
399 memcpy(repmsg->u.link_addr.ports[i].peer_guid, &raw->msg[idx], 16); drm_dp_sideband_parse_link_address()
400 idx += 16; drm_dp_sideband_parse_link_address()
401 if (idx > raw->curlen) drm_dp_sideband_parse_link_address()
403 repmsg->u.link_addr.ports[i].num_sdp_streams = (raw->msg[idx] >> 4) & 0xf; drm_dp_sideband_parse_link_address()
404 repmsg->u.link_addr.ports[i].num_sdp_stream_sinks = (raw->msg[idx] & 0xf); drm_dp_sideband_parse_link_address()
405 idx++; drm_dp_sideband_parse_link_address()
408 if (idx > raw->curlen) drm_dp_sideband_parse_link_address()
414 DRM_DEBUG_KMS("link address reply parse length fail %d %d\n", idx, raw->curlen); drm_dp_sideband_parse_link_address()
421 int idx = 1; drm_dp_sideband_parse_remote_dpcd_read() local
422 repmsg->u.remote_dpcd_read_ack.port_number = raw->msg[idx] & 0xf; drm_dp_sideband_parse_remote_dpcd_read()
423 idx++; drm_dp_sideband_parse_remote_dpcd_read()
424 if (idx > raw->curlen) drm_dp_sideband_parse_remote_dpcd_read()
426 repmsg->u.remote_dpcd_read_ack.num_bytes = raw->msg[idx]; drm_dp_sideband_parse_remote_dpcd_read()
427 if (idx > raw->curlen) drm_dp_sideband_parse_remote_dpcd_read()
430 memcpy(repmsg->u.remote_dpcd_read_ack.bytes, &raw->msg[idx], repmsg->u.remote_dpcd_read_ack.num_bytes); drm_dp_sideband_parse_remote_dpcd_read()
433 DRM_DEBUG_KMS("link address reply parse length fail %d %d\n", idx, raw->curlen); drm_dp_sideband_parse_remote_dpcd_read()
440 int idx = 1; drm_dp_sideband_parse_remote_dpcd_write() local
441 repmsg->u.remote_dpcd_write_ack.port_number = raw->msg[idx] & 0xf; drm_dp_sideband_parse_remote_dpcd_write()
442 idx++; drm_dp_sideband_parse_remote_dpcd_write()
443 if (idx > raw->curlen) drm_dp_sideband_parse_remote_dpcd_write()
447 DRM_DEBUG_KMS("parse length fail %d %d\n", idx, raw->curlen); drm_dp_sideband_parse_remote_dpcd_write()
454 int idx = 1; drm_dp_sideband_parse_remote_i2c_read_ack() local
456 repmsg->u.remote_i2c_read_ack.port_number = (raw->msg[idx] & 0xf); drm_dp_sideband_parse_remote_i2c_read_ack()
457 idx++; drm_dp_sideband_parse_remote_i2c_read_ack()
458 if (idx > raw->curlen) drm_dp_sideband_parse_remote_i2c_read_ack()
460 repmsg->u.remote_i2c_read_ack.num_bytes = raw->msg[idx]; drm_dp_sideband_parse_remote_i2c_read_ack()
461 idx++; drm_dp_sideband_parse_remote_i2c_read_ack()
463 memcpy(repmsg->u.remote_i2c_read_ack.bytes, &raw->msg[idx], repmsg->u.remote_i2c_read_ack.num_bytes); drm_dp_sideband_parse_remote_i2c_read_ack()
466 DRM_DEBUG_KMS("remote i2c reply parse length fail %d %d\n", idx, raw->curlen); drm_dp_sideband_parse_remote_i2c_read_ack()
473 int idx = 1; drm_dp_sideband_parse_enum_path_resources_ack() local
474 repmsg->u.path_resources.port_number = (raw->msg[idx] >> 4) & 0xf; drm_dp_sideband_parse_enum_path_resources_ack()
475 idx++; drm_dp_sideband_parse_enum_path_resources_ack()
476 if (idx > raw->curlen) drm_dp_sideband_parse_enum_path_resources_ack()
478 repmsg->u.path_resources.full_payload_bw_number = (raw->msg[idx] << 8) | (raw->msg[idx+1]); drm_dp_sideband_parse_enum_path_resources_ack()
479 idx += 2; drm_dp_sideband_parse_enum_path_resources_ack()
480 if (idx > raw->curlen) drm_dp_sideband_parse_enum_path_resources_ack()
482 repmsg->u.path_resources.avail_payload_bw_number = (raw->msg[idx] << 8) | (raw->msg[idx+1]); drm_dp_sideband_parse_enum_path_resources_ack()
483 idx += 2; drm_dp_sideband_parse_enum_path_resources_ack()
484 if (idx > raw->curlen) drm_dp_sideband_parse_enum_path_resources_ack()
488 DRM_DEBUG_KMS("enum resource parse length fail %d %d\n", idx, raw->curlen); drm_dp_sideband_parse_enum_path_resources_ack()
495 int idx = 1; drm_dp_sideband_parse_allocate_payload_ack() local
496 repmsg->u.allocate_payload.port_number = (raw->msg[idx] >> 4) & 0xf; drm_dp_sideband_parse_allocate_payload_ack()
497 idx++; drm_dp_sideband_parse_allocate_payload_ack()
498 if (idx > raw->curlen) drm_dp_sideband_parse_allocate_payload_ack()
500 repmsg->u.allocate_payload.vcpi = raw->msg[idx]; drm_dp_sideband_parse_allocate_payload_ack()
501 idx++; drm_dp_sideband_parse_allocate_payload_ack()
502 if (idx > raw->curlen) drm_dp_sideband_parse_allocate_payload_ack()
504 repmsg->u.allocate_payload.allocated_pbn = (raw->msg[idx] << 8) | (raw->msg[idx+1]); drm_dp_sideband_parse_allocate_payload_ack()
505 idx += 2; drm_dp_sideband_parse_allocate_payload_ack()
506 if (idx > raw->curlen) drm_dp_sideband_parse_allocate_payload_ack()
510 DRM_DEBUG_KMS("allocate payload parse length fail %d %d\n", idx, raw->curlen); drm_dp_sideband_parse_allocate_payload_ack()
517 int idx = 1; drm_dp_sideband_parse_query_payload_ack() local
518 repmsg->u.query_payload.port_number = (raw->msg[idx] >> 4) & 0xf; drm_dp_sideband_parse_query_payload_ack()
519 idx++; drm_dp_sideband_parse_query_payload_ack()
520 if (idx > raw->curlen) drm_dp_sideband_parse_query_payload_ack()
522 repmsg->u.query_payload.allocated_pbn = (raw->msg[idx] << 8) | (raw->msg[idx + 1]); drm_dp_sideband_parse_query_payload_ack()
523 idx += 2; drm_dp_sideband_parse_query_payload_ack()
524 if (idx > raw->curlen) drm_dp_sideband_parse_query_payload_ack()
528 DRM_DEBUG_KMS("query payload parse length fail %d %d\n", idx, raw->curlen); drm_dp_sideband_parse_query_payload_ack()
570 int idx = 1; drm_dp_sideband_parse_connection_status_notify() local
572 msg->u.conn_stat.port_number = (raw->msg[idx] & 0xf0) >> 4; drm_dp_sideband_parse_connection_status_notify()
573 idx++; drm_dp_sideband_parse_connection_status_notify()
574 if (idx > raw->curlen) drm_dp_sideband_parse_connection_status_notify()
577 memcpy(msg->u.conn_stat.guid, &raw->msg[idx], 16); drm_dp_sideband_parse_connection_status_notify()
578 idx += 16; drm_dp_sideband_parse_connection_status_notify()
579 if (idx > raw->curlen) drm_dp_sideband_parse_connection_status_notify()
582 msg->u.conn_stat.legacy_device_plug_status = (raw->msg[idx] >> 6) & 0x1; drm_dp_sideband_parse_connection_status_notify()
583 msg->u.conn_stat.displayport_device_plug_status = (raw->msg[idx] >> 5) & 0x1; drm_dp_sideband_parse_connection_status_notify()
584 msg->u.conn_stat.message_capability_status = (raw->msg[idx] >> 4) & 0x1; drm_dp_sideband_parse_connection_status_notify()
585 msg->u.conn_stat.input_port = (raw->msg[idx] >> 3) & 0x1; drm_dp_sideband_parse_connection_status_notify()
586 msg->u.conn_stat.peer_device_type = (raw->msg[idx] & 0x7); drm_dp_sideband_parse_connection_status_notify()
587 idx++; drm_dp_sideband_parse_connection_status_notify()
590 DRM_DEBUG_KMS("connection status reply parse length fail %d %d\n", idx, raw->curlen); drm_dp_sideband_parse_connection_status_notify()
597 int idx = 1; drm_dp_sideband_parse_resource_status_notify() local
599 msg->u.resource_stat.port_number = (raw->msg[idx] & 0xf0) >> 4; drm_dp_sideband_parse_resource_status_notify()
600 idx++; drm_dp_sideband_parse_resource_status_notify()
601 if (idx > raw->curlen) drm_dp_sideband_parse_resource_status_notify()
604 memcpy(msg->u.resource_stat.guid, &raw->msg[idx], 16); drm_dp_sideband_parse_resource_status_notify()
605 idx += 16; drm_dp_sideband_parse_resource_status_notify()
606 if (idx > raw->curlen) drm_dp_sideband_parse_resource_status_notify()
609 msg->u.resource_stat.available_pbn = (raw->msg[idx] << 8) | (raw->msg[idx + 1]); drm_dp_sideband_parse_resource_status_notify()
610 idx++; drm_dp_sideband_parse_resource_status_notify()
613 DRM_DEBUG_KMS("resource status reply parse length fail %d %d\n", idx, raw->curlen); drm_dp_sideband_parse_resource_status_notify()
1001 int idx = (parent_lct - 1) / 2; drm_dp_calculate_rad() local
1003 memcpy(rad, port->parent->rad, idx + 1); drm_dp_calculate_rad()
1008 rad[idx] |= port->port_num << shift; drm_dp_calculate_rad()
1433 int len, space, idx, tosend; process_single_tx_qlock() local
1463 drm_dp_encode_sideband_msg_hdr(&hdr, chunk, &idx); process_single_tx_qlock()
1464 memcpy(&chunk[idx], &txmsg->msg[txmsg->cur_offset], tosend); process_single_tx_qlock()
1466 drm_dp_crc_sideband_chunk_req(&chunk[idx], tosend); process_single_tx_qlock()
1467 idx += tosend + 1; process_single_tx_qlock()
1469 ret = drm_dp_send_sideband_msg(mgr, up, chunk, idx); process_single_tx_qlock()
/linux-4.4.14/drivers/clk/samsung/
H A Dclk.c109 unsigned int idx, ret; samsung_clk_register_alias() local
116 for (idx = 0; idx < nr_clk; idx++, list++) { samsung_clk_register_alias()
119 idx); samsung_clk_register_alias()
143 unsigned int idx, ret; samsung_clk_register_fixed_rate() local
145 for (idx = 0; idx < nr_clk; idx++, list++) { samsung_clk_register_fixed_rate()
172 unsigned int idx; samsung_clk_register_fixed_factor() local
174 for (idx = 0; idx < nr_clk; idx++, list++) { samsung_clk_register_fixed_factor()
193 unsigned int idx, ret; samsung_clk_register_mux() local
195 for (idx = 0; idx < nr_clk; idx++, list++) { samsung_clk_register_mux()
225 unsigned int idx, ret; samsung_clk_register_div() local
227 for (idx = 0; idx < nr_clk; idx++, list++) { samsung_clk_register_div()
264 unsigned int idx, ret; samsung_clk_register_gate() local
266 for (idx = 0; idx < nr_clk; idx++, list++) { samsung_clk_register_gate()
/linux-4.4.14/sound/pci/emu10k1/
H A Demuproc.c190 int idx; snd_emu10k1_proc_read() local
199 for (idx = 0; idx < NUM_G; idx++) { snd_emu10k1_proc_read()
201 snd_emu10k1_ptr_read(emu, A_FXRT1, idx) : snd_emu10k1_proc_read()
202 snd_emu10k1_ptr_read(emu, FXRT, idx); snd_emu10k1_proc_read()
204 snd_emu10k1_ptr_read(emu, A_FXRT2, idx) : snd_emu10k1_proc_read()
208 idx, snd_emu10k1_proc_read()
220 idx, snd_emu10k1_proc_read()
228 for (idx = 0; idx < nefx; idx++) { snd_emu10k1_proc_read()
229 if (emu->efx_voices_mask[idx/32] & (1 << (idx%32))) snd_emu10k1_proc_read()
230 snd_iprintf(buffer, " Output %02i [%s]\n", idx, outputs[idx]); snd_emu10k1_proc_read()
233 for (idx = 0; idx < (emu->audigy ? 64 : 32); idx++) snd_emu10k1_proc_read()
234 snd_iprintf(buffer, " Output %02i [%s]\n", idx, outputs[idx]); snd_emu10k1_proc_read()
344 unsigned int idx; snd_emu10k1_fx8010_read() local
360 for (idx = 0; idx < ((pos & 3) + count + 3) >> 2; idx++) { snd_emu10k1_fx8010_read()
362 val = snd_emu10k1_ptr_read(emu, offset + idx + (pos >> 2), 0); snd_emu10k1_fx8010_read()
365 val |= snd_emu10k1_ptr_read(emu, 0x100 + idx + (pos >> 2), 0) << 20; snd_emu10k1_fx8010_read()
367 tmp[idx] = val; snd_emu10k1_fx8010_read()
382 int idx; snd_emu10k1_proc_voices_read() local
385 for (idx = 0; idx < NUM_G; idx++) { snd_emu10k1_proc_voices_read()
386 voice = &emu->voices[idx]; snd_emu10k1_proc_voices_read()
388 idx, snd_emu10k1_proc_voices_read()
/linux-4.4.14/arch/frv/mb93090-mb00/
H A Dpci-frv.c86 int idx; pcibios_allocate_bus_resources() local
93 for (idx = PCI_BRIDGE_RESOURCES; idx < PCI_NUM_RESOURCES; idx++) { pcibios_allocate_bus_resources()
94 r = &dev->resource[idx]; pcibios_allocate_bus_resources()
97 pci_claim_bridge_resource(dev, idx); pcibios_allocate_bus_resources()
107 int idx, disabled; pcibios_allocate_resources() local
113 for(idx = 0; idx < 6; idx++) { for_each_pci_dev()
114 r = &dev->resource[idx]; for_each_pci_dev()
126 if (pci_claim_resource(dev, idx) < 0) { for_each_pci_dev()
150 int idx; pcibios_assign_resources() local
160 for(idx=0; idx<6; idx++) { for_each_pci_dev()
161 r = &dev->resource[idx]; for_each_pci_dev()
166 if ((class == PCI_CLASS_STORAGE_IDE && idx < 4) || for_each_pci_dev()
176 pci_assign_resource(dev, idx); for_each_pci_dev()
/linux-4.4.14/drivers/gpu/drm/nouveau/include/nvkm/subdev/
H A Dgpio.h29 int nvkm_gpio_find(struct nvkm_gpio *, int idx, u8 tag, u8 line,
31 int nvkm_gpio_set(struct nvkm_gpio *, int idx, u8 tag, u8 line, int state);
32 int nvkm_gpio_get(struct nvkm_gpio *, int idx, u8 tag, u8 line);
/linux-4.4.14/include/net/
H A Dnetprio_cgroup.h33 u32 idx; task_netprioidx() local
37 idx = css->cgroup->id; task_netprioidx()
39 return idx; task_netprioidx()
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/
H A Datom.c177 uint32_t idx, val = 0xCDCDCDCD, align, arg; atom_get_src_int() local
183 idx = U16(*ptr); atom_get_src_int()
186 DEBUG("REG[0x%04X]", idx); atom_get_src_int()
187 idx += gctx->reg_block; atom_get_src_int()
190 val = gctx->card->reg_read(gctx->card, idx); atom_get_src_int()
214 idx, 0); atom_get_src_int()
218 idx = U8(*ptr); atom_get_src_int()
222 val = get_unaligned_le32((u32 *)&ctx->ps[idx]); atom_get_src_int()
224 DEBUG("PS[0x%02X,0x%04X]", idx, val); atom_get_src_int()
227 idx = U8(*ptr); atom_get_src_int()
230 DEBUG("WS[0x%02X]", idx); atom_get_src_int()
231 switch (idx) { atom_get_src_int()
260 val = ctx->ws[idx]; atom_get_src_int()
264 idx = U16(*ptr); atom_get_src_int()
268 DEBUG("ID[0x%04X+%04X]", idx, gctx->data_block); atom_get_src_int()
270 DEBUG("ID[0x%04X]", idx); atom_get_src_int()
272 val = U32(idx + gctx->data_block); atom_get_src_int()
275 idx = U8(*ptr); atom_get_src_int()
277 if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) { atom_get_src_int()
279 gctx->fb_base + (idx * 4), gctx->scratch_size_bytes); atom_get_src_int()
282 val = gctx->scratch[(gctx->fb_base / 4) + idx]; atom_get_src_int()
284 DEBUG("FB[0x%02X]", idx); atom_get_src_int()
314 idx = U8(*ptr); atom_get_src_int()
317 DEBUG("PLL[0x%02X]", idx); atom_get_src_int()
318 val = gctx->card->pll_read(gctx->card, idx); atom_get_src_int()
321 idx = U8(*ptr); atom_get_src_int()
324 DEBUG("MC[0x%02X]", idx); atom_get_src_int()
325 val = gctx->card->mc_read(gctx->card, idx); atom_get_src_int()
450 val, idx; atom_put_dst() local
459 idx = U16(*ptr); atom_put_dst()
461 DEBUG("REG[0x%04X]", idx); atom_put_dst()
462 idx += gctx->reg_block; atom_put_dst()
465 if (idx == 0) atom_put_dst()
466 gctx->card->reg_write(gctx->card, idx, atom_put_dst()
469 gctx->card->reg_write(gctx->card, idx, val); atom_put_dst()
491 idx, val); atom_put_dst()
495 idx = U8(*ptr); atom_put_dst()
497 DEBUG("PS[0x%02X]", idx); atom_put_dst()
498 ctx->ps[idx] = cpu_to_le32(val); atom_put_dst()
501 idx = U8(*ptr); atom_put_dst()
503 DEBUG("WS[0x%02X]", idx); atom_put_dst()
504 switch (idx) { atom_put_dst()
530 ctx->ws[idx] = val; atom_put_dst()
534 idx = U8(*ptr); atom_put_dst()
536 if ((gctx->fb_base + (idx * 4)) > gctx->scratch_size_bytes) { atom_put_dst()
538 gctx->fb_base + (idx * 4), gctx->scratch_size_bytes); atom_put_dst()
540 gctx->scratch[(gctx->fb_base / 4) + idx] = val; atom_put_dst()
541 DEBUG("FB[0x%02X]", idx); atom_put_dst()
544 idx = U8(*ptr); atom_put_dst()
546 DEBUG("PLL[0x%02X]", idx); atom_put_dst()
547 gctx->card->pll_write(gctx->card, idx, val); atom_put_dst()
550 idx = U8(*ptr); atom_put_dst()
552 DEBUG("MC[0x%02X]", idx); atom_put_dst()
553 gctx->card->mc_write(gctx->card, idx, val); atom_put_dst()
619 int idx = U8((*ptr)++); atom_op_calltable() local
622 if (idx < ATOM_TABLE_NAMES_CNT) atom_op_calltable()
623 SDEBUG(" table: %d (%s)\n", idx, atom_table_names[idx]); atom_op_calltable()
625 SDEBUG(" table: %d\n", idx); atom_op_calltable()
626 if (U16(ctx->ctx->cmd_table + 4 + 2 * idx)) atom_op_calltable()
627 r = amdgpu_atom_execute_table_locked(ctx->ctx, idx, ctx->ps + ctx->ps_shift); atom_op_calltable()
868 int idx = U8(*ptr); atom_op_setdatablock() local
870 SDEBUG(" block: %d\n", idx); atom_op_setdatablock()
871 if (!idx) atom_op_setdatablock()
873 else if (idx == 255) atom_op_setdatablock()
876 ctx->ctx->data_block = U16(ctx->ctx->data_table + 4 + 2 * idx); atom_op_setdatablock()
1399 int idx = CU16(ctx->data_table + offset); amdgpu_atom_parse_data_header() local
1406 *size = CU16(idx); amdgpu_atom_parse_data_header()
1408 *frev = CU8(idx + 2); amdgpu_atom_parse_data_header()
1410 *crev = CU8(idx + 3); amdgpu_atom_parse_data_header()
1411 *data_start = idx; amdgpu_atom_parse_data_header()
1419 int idx = CU16(ctx->cmd_table + offset); amdgpu_atom_parse_cmd_header() local
1426 *frev = CU8(idx + 2); amdgpu_atom_parse_cmd_header()
1428 *crev = CU8(idx + 3); amdgpu_atom_parse_cmd_header()
H A Dvce_v3_0.c51 static void vce_v3_0_mc_resume(struct amdgpu_device *adev, int idx);
116 int idx, i, j, r; vce_v3_0_start() local
119 for (idx = 0; idx < 2; ++idx) { vce_v3_0_start()
121 if (adev->vce.harvest_config & (1 << idx)) vce_v3_0_start()
124 if(idx == 0) vce_v3_0_start()
132 vce_v3_0_mc_resume(adev, idx); vce_v3_0_start()
385 static void vce_v3_0_mc_resume(struct amdgpu_device *adev, int idx) vce_v3_0_mc_resume() argument
410 if (idx == 0) { vce_v3_0_mc_resume()
440 int idx; vce_v3_0_is_idle() local
442 for (idx = 0; idx < 2; ++idx) { vce_v3_0_is_idle()
443 if (adev->vce.harvest_config & (1 << idx)) vce_v3_0_is_idle()
446 if (idx == 0) vce_v3_0_is_idle()
460 int idx; vce_v3_0_wait_for_idle() local
462 for (idx = 0; idx < 2; ++idx) { vce_v3_0_wait_for_idle()
463 if (adev->vce.harvest_config & (1 << idx)) vce_v3_0_wait_for_idle()
466 if (idx == 0) vce_v3_0_wait_for_idle()
483 int idx; vce_v3_0_soft_reset() local
485 for (idx = 0; idx < 2; ++idx) { vce_v3_0_soft_reset()
486 if (adev->vce.harvest_config & (1 << idx)) vce_v3_0_soft_reset()
489 if (idx == 0) vce_v3_0_soft_reset()
/linux-4.4.14/drivers/hwspinlock/
H A Dsirf_hwspinlock.c60 int idx, ret; sirf_hwspinlock_probe() local
75 for (idx = 0; idx < HW_SPINLOCK_NUMBER; idx++) { sirf_hwspinlock_probe()
76 hwlock = &hwspin->bank.lock[idx]; sirf_hwspinlock_probe()
77 hwlock->priv = hwspin->io_base + HW_SPINLOCK_OFFSET(idx); sirf_hwspinlock_probe()
/linux-4.4.14/drivers/hid/
H A Dhid-primax.c28 int idx = size; px_raw_event() local
49 while (--idx > 1) { px_raw_event()
50 if (data[idx] < 0xE0 || data[idx] > 0xE7) px_raw_event()
52 data[0] |= (1 << (data[idx] - 0xE0)); px_raw_event()
53 data[idx] = 0; px_raw_event()
/linux-4.4.14/arch/cris/arch-v32/drivers/pci/
H A Dbios.c54 int idx; pcibios_enable_resources() local
59 for(idx=0; idx<6; idx++) { pcibios_enable_resources()
61 if (!(mask & (1<<idx))) pcibios_enable_resources()
64 r = &dev->resource[idx]; pcibios_enable_resources()
/linux-4.4.14/arch/m68k/kernel/
H A Dpcibios.c53 int idx; pcibios_enable_device() local
58 for (idx = 0; idx < 6; idx++) { pcibios_enable_device()
60 if (!(mask & (1 << idx))) pcibios_enable_device()
63 r = dev->resource + idx; pcibios_enable_device()
/linux-4.4.14/include/linux/platform_data/
H A Dsa11x0-serial.h27 void sa1100_register_uart(int idx, int port);
30 #define sa1100_register_uart(idx,port) do { } while (0)
/linux-4.4.14/fs/nfs/blocklayout/
H A Ddev.c166 dprintk("%s: invalid chunk idx %d (%lld/%lld)\n", bl_map_stripe()
189 struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask);
194 struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask) bl_parse_simple()
196 struct pnfs_block_volume *v = &volumes[idx]; bl_parse_simple()
221 struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask) bl_parse_slice()
223 struct pnfs_block_volume *v = &volumes[idx]; bl_parse_slice()
237 struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask) bl_parse_concat()
239 struct pnfs_block_volume *v = &volumes[idx]; bl_parse_concat()
266 struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask) bl_parse_stripe()
268 struct pnfs_block_volume *v = &volumes[idx]; bl_parse_stripe()
295 struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask) bl_parse_deviceid()
297 switch (volumes[idx].type) { bl_parse_deviceid()
299 return bl_parse_simple(server, d, volumes, idx, gfp_mask); bl_parse_deviceid()
301 return bl_parse_slice(server, d, volumes, idx, gfp_mask); bl_parse_deviceid()
303 return bl_parse_concat(server, d, volumes, idx, gfp_mask); bl_parse_deviceid()
305 return bl_parse_stripe(server, d, volumes, idx, gfp_mask); bl_parse_deviceid()
307 dprintk("unsupported volume type: %d\n", volumes[idx].type); bl_parse_deviceid()
193 bl_parse_simple(struct nfs_server *server, struct pnfs_block_dev *d, struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask) bl_parse_simple() argument
220 bl_parse_slice(struct nfs_server *server, struct pnfs_block_dev *d, struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask) bl_parse_slice() argument
236 bl_parse_concat(struct nfs_server *server, struct pnfs_block_dev *d, struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask) bl_parse_concat() argument
265 bl_parse_stripe(struct nfs_server *server, struct pnfs_block_dev *d, struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask) bl_parse_stripe() argument
294 bl_parse_deviceid(struct nfs_server *server, struct pnfs_block_dev *d, struct pnfs_block_volume *volumes, int idx, gfp_t gfp_mask) bl_parse_deviceid() argument
/linux-4.4.14/net/core/
H A Dgen_estimator.c115 int idx = (int)arg; est_timer() local
119 list_for_each_entry_rcu(e, &elist[idx].list, list) { est_timer()
131 brate = (b.bytes - e->last_bytes)<<(7 - idx); est_timer()
137 rate <<= (7 - idx); est_timer()
146 if (!list_empty(&elist[idx].list)) est_timer()
147 mod_timer(&elist[idx].timer, jiffies + ((HZ/4) << idx)); est_timer()
216 int idx; gen_new_estimator() local
230 idx = parm->interval + 2; gen_new_estimator()
242 if (!elist[idx].timer.function) { gen_new_estimator()
243 INIT_LIST_HEAD(&elist[idx].list); gen_new_estimator()
244 setup_timer(&elist[idx].timer, est_timer, idx); gen_new_estimator()
247 if (list_empty(&elist[idx].list)) gen_new_estimator()
248 mod_timer(&elist[idx].timer, jiffies + ((HZ/4) << idx)); gen_new_estimator()
250 list_add_rcu(&est->list, &elist[idx].list); gen_new_estimator()
/linux-4.4.14/security/selinux/
H A Dnetnode.c116 unsigned int idx; sel_netnode_find() local
121 idx = sel_netnode_hashfn_ipv4(*(__be32 *)addr); sel_netnode_find()
124 idx = sel_netnode_hashfn_ipv6(addr); sel_netnode_find()
131 list_for_each_entry_rcu(node, &sel_netnode_hash[idx].list, list) sel_netnode_find()
158 unsigned int idx; sel_netnode_insert() local
162 idx = sel_netnode_hashfn_ipv4(node->nsec.addr.ipv4); sel_netnode_insert()
165 idx = sel_netnode_hashfn_ipv6(&node->nsec.addr.ipv6); sel_netnode_insert()
174 list_add_rcu(&node->list, &sel_netnode_hash[idx].list); sel_netnode_insert()
175 if (sel_netnode_hash[idx].size == SEL_NETNODE_HASH_BKT_LIMIT) { sel_netnode_insert()
178 rcu_dereference_protected(sel_netnode_hash[idx].list.prev, sel_netnode_insert()
184 sel_netnode_hash[idx].size++; sel_netnode_insert()
288 unsigned int idx; sel_netnode_flush() local
292 for (idx = 0; idx < SEL_NETNODE_HASH_SIZE; idx++) { sel_netnode_flush()
294 &sel_netnode_hash[idx].list, list) { sel_netnode_flush()
298 sel_netnode_hash[idx].size = 0; sel_netnode_flush()
H A Dnetport.c96 unsigned int idx; sel_netport_find() local
99 idx = sel_netport_hashfn(pnum); sel_netport_find()
100 list_for_each_entry_rcu(port, &sel_netport_hash[idx].list, list) sel_netport_find()
117 unsigned int idx; sel_netport_insert() local
121 idx = sel_netport_hashfn(port->psec.port); sel_netport_insert()
122 list_add_rcu(&port->list, &sel_netport_hash[idx].list); sel_netport_insert()
123 if (sel_netport_hash[idx].size == SEL_NETPORT_HASH_BKT_LIMIT) { sel_netport_insert()
127 sel_netport_hash[idx].list.prev, sel_netport_insert()
133 sel_netport_hash[idx].size++; sel_netport_insert()
222 unsigned int idx; sel_netport_flush() local
226 for (idx = 0; idx < SEL_NETPORT_HASH_SIZE; idx++) { sel_netport_flush()
228 &sel_netport_hash[idx].list, list) { sel_netport_flush()
232 sel_netport_hash[idx].size = 0; sel_netport_flush()
/linux-4.4.14/arch/ia64/sn/kernel/
H A Dio_init.c152 int idx; sn_io_slot_fixup() local
177 for (idx = 0; idx <= PCI_ROM_RESOURCE; idx++) { sn_io_slot_fixup()
179 if (!pcidev_info->pdi_pio_mapped_addr[idx]) { sn_io_slot_fixup()
183 start = dev->resource[idx].start; sn_io_slot_fixup()
184 end = dev->resource[idx].end; sn_io_slot_fixup()
189 addr = pcidev_info->pdi_pio_mapped_addr[idx]; sn_io_slot_fixup()
191 dev->resource[idx].start = addr; sn_io_slot_fixup()
192 dev->resource[idx].end = addr + size; sn_io_slot_fixup()
198 if (dev->resource[idx].parent && dev->resource[idx].parent->child) sn_io_slot_fixup()
199 release_resource(&dev->resource[idx]); sn_io_slot_fixup()
201 if (dev->resource[idx].flags & IORESOURCE_IO) sn_io_slot_fixup()
202 insert_resource(&ioport_resource, &dev->resource[idx]); sn_io_slot_fixup()
204 insert_resource(&iomem_resource, &dev->resource[idx]); sn_io_slot_fixup()
209 if (idx == PCI_ROM_RESOURCE) { sn_io_slot_fixup()
/linux-4.4.14/sound/pci/ctxfi/
H A Dctamixer.c33 return rsc->idx = container_of(rsc, struct amixer, rsc)->idx[0]; amixer_master()
39 return container_of(rsc, struct amixer, rsc)->idx[rsc->conj]; amixer_next_conj()
44 return container_of(rsc, struct amixer, rsc)->idx[rsc->conj]; amixer_index()
206 err = rsc_init(&amixer->rsc, amixer->idx[0], amixer_rsc_init()
237 unsigned int idx; get_amixer_rsc() local
253 err = mgr_get_resource(&mgr->mgr, 1, &idx); get_amixer_rsc()
257 amixer->idx[i] = idx; get_amixer_rsc()
277 mgr_put_resource(&mgr->mgr, 1, amixer->idx[i]); get_amixer_rsc()
291 mgr_put_resource(&mgr->mgr, 1, amixer->idx[i]); put_amixer_rsc()
341 return rsc->idx = container_of(rsc, struct sum, rsc)->idx[0]; sum_master()
347 return container_of(rsc, struct sum, rsc)->idx[rsc->conj]; sum_next_conj()
352 return container_of(rsc, struct sum, rsc)->idx[rsc->conj]; sum_index()
373 err = rsc_init(&sum->rsc, sum->idx[0], SUM, desc->msr, mgr->mgr.hw); sum_rsc_init()
393 unsigned int idx; get_sum_rsc() local
408 err = mgr_get_resource(&mgr->mgr, 1, &idx); get_sum_rsc()
412 sum->idx[i] = idx; get_sum_rsc()
432 mgr_put_resource(&mgr->mgr, 1, sum->idx[i]); get_sum_rsc()
446 mgr_put_resource(&mgr->mgr, 1, sum->idx[i]); put_sum_rsc()
H A Dcthardware.h111 int (*src_commit_write)(struct hw *hw, unsigned int idx, void *blk);
112 int (*src_get_ca)(struct hw *hw, unsigned int idx, void *blk);
117 /* syncly enable src @idx */
118 int (*src_mgr_enbs_src)(void *blk, unsigned int idx);
119 /* enable src @idx */
120 int (*src_mgr_enb_src)(void *blk, unsigned int idx);
121 /* disable src @idx */
122 int (*src_mgr_dsb_src)(void *blk, unsigned int idx);
147 int (*amixer_commit_write)(struct hw *hw, unsigned int idx, void *blk);
160 int (*dai_commit_write)(struct hw *hw, unsigned int idx, void *blk);
164 int (*dao_commit_write)(struct hw *hw, unsigned int idx, void *blk);
169 int (*daio_mgr_enb_dai)(void *blk, unsigned int idx);
170 int (*daio_mgr_dsb_dai)(void *blk, unsigned int idx);
171 int (*daio_mgr_enb_dao)(void *blk, unsigned int idx);
172 int (*daio_mgr_dsb_dao)(void *blk, unsigned int idx);
173 int (*daio_mgr_dao_init)(void *blk, unsigned int idx,
/linux-4.4.14/arch/tile/kernel/
H A Dperf_event.c355 static inline u64 read_counter(int idx) read_counter() argument
360 switch (idx) { read_counter()
374 WARN_ON_ONCE(idx > AUX_PERF_COUNT_1_IDX || read_counter()
375 idx < PERF_COUNT_0_IDX); read_counter()
384 static inline void write_counter(int idx, u64 value) write_counter() argument
387 switch (idx) { write_counter()
401 WARN_ON_ONCE(idx > AUX_PERF_COUNT_1_IDX || write_counter()
402 idx < PERF_COUNT_0_IDX); write_counter()
414 int shift, idx = hwc->idx; tile_pmu_enable_event() local
420 if (WARN_ON_ONCE(idx == -1)) tile_pmu_enable_event()
423 if (idx < tile_pmu->num_base_counters) tile_pmu_enable_event()
428 switch (idx) { tile_pmu_enable_event()
440 WARN_ON_ONCE(idx < PERF_COUNT_0_IDX || tile_pmu_enable_event()
441 idx > AUX_PERF_COUNT_1_IDX); tile_pmu_enable_event()
449 if (idx < tile_pmu->num_base_counters) tile_pmu_enable_event()
463 int idx = hwc->idx; tile_pmu_disable_event() local
465 if (idx == -1) tile_pmu_disable_event()
468 if (idx < tile_pmu->num_base_counters) tile_pmu_disable_event()
473 switch (idx) { tile_pmu_disable_event()
483 WARN_ON_ONCE(idx < PERF_COUNT_0_IDX || tile_pmu_disable_event()
484 idx > AUX_PERF_COUNT_1_IDX); tile_pmu_disable_event()
491 if (idx < tile_pmu->num_base_counters) tile_pmu_disable_event()
508 int idx = hwc->idx; tile_perf_event_update() local
520 new_raw_count = read_counter(idx); tile_perf_event_update()
551 int idx = hwc->idx; tile_event_set_period() local
581 write_counter(idx, (u64)(-left) & tile_pmu->cntval_mask); tile_event_set_period()
595 int idx = hwc->idx; tile_pmu_stop() local
597 if (__test_and_clear_bit(idx, cpuc->active_mask)) { tile_pmu_stop()
599 cpuc->events[hwc->idx] = NULL; tile_pmu_stop()
620 int idx = event->hw.idx; tile_pmu_start() local
625 if (WARN_ON_ONCE(idx == -1)) tile_pmu_start()
635 cpuc->events[idx] = event; tile_pmu_start()
636 __set_bit(idx, cpuc->active_mask); tile_pmu_start()
689 event->hw.idx = b; tile_pmu_add()
720 cpuc->events[event->hw.idx] = NULL; tile_pmu_del()
721 __clear_bit(event->hw.idx, &cpuc->used_mask); tile_pmu_del()
813 hwc->idx = -1; __tile_event_init()
/linux-4.4.14/drivers/net/wireless/iwlegacy/
H A D3945-rs.c61 u8 idx; member in struct:il3945_tpt_entry
102 u32 idx = 0; il3945_get_rate_idx_by_rssi() local
123 while (idx < table_size && rssi < tpt_table[idx].min_rssi) il3945_get_rate_idx_by_rssi()
124 idx++; il3945_get_rate_idx_by_rssi()
126 idx = min(idx, table_size - 1); il3945_get_rate_idx_by_rssi()
128 return tpt_table[idx].idx; il3945_get_rate_idx_by_rssi()
168 D_RATE("flushing %d samples of rate " "idx %d\n", il3945_rate_scale_flush_wins()
257 int retries, int idx) il3945_collect_tx_data()
319 ((win->success_ratio * rs_sta->expected_tpt[idx] + il3945_collect_tx_data()
463 first_idx = sband->bitrates[info->status.rates[0].idx].hw_value; il3945_rs_tx_status()
516 /* Update the last idx win with success/failure based on ACK */ il3945_rs_tx_status()
543 il3945_get_adjacent_rate(struct il3945_rs_sta *rs_sta, u8 idx, u16 rate_mask, il3945_get_adjacent_rate() argument
557 i = idx - 1; il3945_get_adjacent_rate()
566 i = idx + 1; il3945_get_adjacent_rate()
577 low = idx; il3945_get_adjacent_rate()
590 high = idx; il3945_get_adjacent_rate()
618 * As such, we can't convert the idx obtained below into the hw_mode's
631 int idx; il3945_rs_get_rate() local
665 idx = min(rs_sta->last_txrate_idx & 0xffff, RATE_COUNT_3945 - 1); il3945_rs_get_rate()
676 if (rs_sta->start_rate < idx && il3945_rs_get_rate()
678 idx = rs_sta->start_rate; il3945_rs_get_rate()
683 if (max_rate_idx != -1 && max_rate_idx < idx) { il3945_rs_get_rate()
685 idx = max_rate_idx; il3945_rs_get_rate()
688 win = &(rs_sta->win[idx]); il3945_rs_get_rate()
698 "expected_tpt is %sNULL\n", idx, win->counter, il3945_rs_get_rate()
711 il3945_get_adjacent_rate(rs_sta, idx, rate_mask, sband->band); il3945_rs_get_rate()
790 idx = low; il3945_rs_get_rate()
795 idx = high; il3945_rs_get_rate()
804 D_RATE("Selected %d (action %d) - low %d high %d\n", idx, scale_action, il3945_rs_get_rate()
810 if (WARN_ON_ONCE(idx < IL_FIRST_OFDM_RATE)) il3945_rs_get_rate()
811 idx = IL_FIRST_OFDM_RATE; il3945_rs_get_rate()
812 rs_sta->last_txrate_idx = idx; il3945_rs_get_rate()
813 info->control.rates[0].idx = idx - IL_FIRST_OFDM_RATE; il3945_rs_get_rate()
815 rs_sta->last_txrate_idx = idx; il3945_rs_get_rate()
816 info->control.rates[0].idx = rs_sta->last_txrate_idx; il3945_rs_get_rate()
820 D_RATE("leave: %d\n", idx); il3945_rs_get_rate()
841 "tx packets=%d last rate idx=%d\n" il3945_sta_dbgfs_stats_table_read()
964 D_RATE("leave: rssi %d assign rate idx: " "%d (plcp 0x%x)\n", rssi, il3945_rate_scale_init()
255 il3945_collect_tx_data(struct il3945_rs_sta *rs_sta, struct il3945_rate_scale_data *win, int success, int retries, int idx) il3945_collect_tx_data() argument
/linux-4.4.14/drivers/pnp/
H A Dmanager.c37 static int pnp_assign_port(struct pnp_dev *dev, struct pnp_port *rule, int idx) pnp_assign_port() argument
41 res = pnp_find_resource(dev, rule->flags, IORESOURCE_IO, idx); pnp_assign_port()
44 "flags %#lx\n", idx, (unsigned long long) res->start, pnp_assign_port()
56 pnp_dbg(&dev->dev, " io %d disabled\n", idx); pnp_assign_port()
68 "(min %#llx max %#llx)\n", idx, pnp_assign_port()
80 static int pnp_assign_mem(struct pnp_dev *dev, struct pnp_mem *rule, int idx) pnp_assign_mem() argument
84 res = pnp_find_resource(dev, rule->flags, IORESOURCE_MEM, idx); pnp_assign_mem()
87 "flags %#lx\n", idx, (unsigned long long) res->start, pnp_assign_mem()
107 pnp_dbg(&dev->dev, " mem %d disabled\n", idx); pnp_assign_mem()
119 "(min %#llx max %#llx)\n", idx, pnp_assign_mem()
131 static int pnp_assign_irq(struct pnp_dev *dev, struct pnp_irq *rule, int idx) pnp_assign_irq() argument
141 res = pnp_find_resource(dev, rule->flags, IORESOURCE_IRQ, idx); pnp_assign_irq()
144 idx, (int) res->start, res->flags); pnp_assign_irq()
155 pnp_dbg(&dev->dev, " irq %d disabled\n", idx); pnp_assign_irq()
177 pnp_dbg(&dev->dev, " irq %d disabled (optional)\n", idx); pnp_assign_irq()
181 pnp_dbg(&dev->dev, " couldn't assign irq %d\n", idx); pnp_assign_irq()
190 static int pnp_assign_dma(struct pnp_dev *dev, struct pnp_dma *rule, int idx) pnp_assign_dma() argument
200 res = pnp_find_resource(dev, rule->flags, IORESOURCE_DMA, idx); pnp_assign_dma()
203 idx, (int) res->start, res->flags); pnp_assign_dma()
214 pnp_dbg(&dev->dev, " dma %d disabled\n", idx); pnp_assign_dma()
226 pnp_dbg(&dev->dev, " couldn't assign dma %d\n", idx); pnp_assign_dma()
/linux-4.4.14/drivers/net/ethernet/hisilicon/hns/
H A Dhns_enet.h72 #define tx_ring_data(priv, idx) ((priv)->ring_data[idx])
73 #define rx_ring_data(priv, idx) \
74 ((priv)->ring_data[(priv)->ae_handle->q_num + (idx)])
/linux-4.4.14/arch/x86/um/asm/
H A Dptrace.h50 extern int ptrace_get_thread_area(struct task_struct *child, int idx,
53 extern int ptrace_set_thread_area(struct task_struct *child, int idx,
69 static inline int ptrace_get_thread_area(struct task_struct *child, int idx, ptrace_get_thread_area() argument
75 static inline int ptrace_set_thread_area(struct task_struct *child, int idx, ptrace_set_thread_area() argument
/linux-4.4.14/include/sound/
H A Dhda_regmap.h40 * @idx: input index value
44 #define snd_hdac_regmap_encode_amp(nid, ch, dir, idx) \
48 (idx))
54 * @idx: input index value
58 #define snd_hdac_regmap_encode_amp_stereo(nid, dir, idx) \
62 (idx))
131 int ch, int dir, int idx) snd_hdac_regmap_get_amp()
133 unsigned int cmd = snd_hdac_regmap_encode_amp(nid, ch, dir, idx); snd_hdac_regmap_get_amp()
146 * @idx: the index value (only for input direction)
155 int ch, int dir, int idx, int mask, int val) snd_hdac_regmap_update_amp()
157 unsigned int cmd = snd_hdac_regmap_encode_amp(nid, ch, dir, idx); snd_hdac_regmap_update_amp()
176 int dir, int idx) snd_hdac_regmap_get_amp_stereo()
178 unsigned int cmd = snd_hdac_regmap_encode_amp_stereo(nid, dir, idx); snd_hdac_regmap_get_amp_stereo()
190 * @idx: the index value (only for input direction)
200 int dir, int idx, int mask, int val) snd_hdac_regmap_update_amp_stereo()
202 unsigned int cmd = snd_hdac_regmap_encode_amp_stereo(nid, dir, idx); snd_hdac_regmap_update_amp_stereo()
130 snd_hdac_regmap_get_amp(struct hdac_device *codec, hda_nid_t nid, int ch, int dir, int idx) snd_hdac_regmap_get_amp() argument
154 snd_hdac_regmap_update_amp(struct hdac_device *codec, hda_nid_t nid, int ch, int dir, int idx, int mask, int val) snd_hdac_regmap_update_amp() argument
175 snd_hdac_regmap_get_amp_stereo(struct hdac_device *codec, hda_nid_t nid, int dir, int idx) snd_hdac_regmap_get_amp_stereo() argument
199 snd_hdac_regmap_update_amp_stereo(struct hdac_device *codec, hda_nid_t nid, int dir, int idx, int mask, int val) snd_hdac_regmap_update_amp_stereo() argument
/linux-4.4.14/drivers/isdn/hardware/mISDN/
H A Dnetjet.c45 int idx; member in struct:tiger_dma
57 int idx; member in struct:tiger_ch
188 fill_mem(struct tiger_ch *bc, u32 idx, u32 cnt, u32 fill) fill_mem() argument
193 pr_debug("%s: B%1d fill %02x len %d idx %d/%d\n", card->name, fill_mem()
194 bc->bch.nr, fill, cnt, idx, card->send.idx); fill_mem()
201 val = card->send.start[idx]; fill_mem()
204 card->send.start[idx++] = val; fill_mem()
205 if (idx >= card->send.size) fill_mem()
206 idx = 0; fill_mem()
239 bc->idx = 0; mode_tiger()
253 bc->idx = 0; mode_tiger()
273 card->send.idx = (card->send.dmacur - card->send.dmastart) >> 2; mode_tiger()
274 card->recv.idx = (card->recv.dmacur - card->recv.dmastart) >> 2; mode_tiger()
275 pr_debug("%s: %s ctrl %x irq %02x/%02x idx %d/%d\n", mode_tiger()
280 card->send.idx, mode_tiger()
281 card->recv.idx); mode_tiger()
376 read_dma(struct tiger_ch *bc, u32 idx, int cnt) read_dma() argument
383 if (bc->lastrx == idx) { read_dma()
385 pr_info("%s: B%1d overrun at idx %d\n", card->name, read_dma()
386 bc->bch.nr, idx); read_dma()
388 bc->lastrx = idx; read_dma()
406 val = card->recv.start[idx++]; read_dma()
409 if (idx >= card->recv.size) read_dma()
410 idx = 0; read_dma()
458 u32 idx; recv_tiger() local
466 idx = cnt - 1; recv_tiger()
468 idx = card->recv.size - 1; recv_tiger()
471 read_dma(&card->bc[0], idx, cnt); recv_tiger()
473 read_dma(&card->bc[1], idx, cnt); recv_tiger()
481 card->send.idx = (card->send.dmacur - card->send.dmastart) >> 2; resync()
488 if (card->send.idx < ((card->send.size / 2) - 1)) resync()
489 bc->idx = (card->recv.size / 2) - 1; resync()
491 bc->idx = card->recv.size - 1; resync()
493 pr_debug("%s: %s B%1d free %d idx %d/%d\n", card->name, resync()
494 __func__, bc->bch.nr, bc->free, bc->idx, card->send.idx); resync()
509 pr_debug("%s: %s B%1d %d state %x idx %d/%d\n", card->name, fill_hdlc_flag()
511 bc->idx, card->send.idx); fill_hdlc_flag()
522 if (bc->idx >= card->send.size) fill_hdlc_flag()
523 bc->idx = 0; fill_hdlc_flag()
524 v = card->send.start[bc->idx]; fill_hdlc_flag()
527 card->send.start[bc->idx++] = v; fill_hdlc_flag()
556 pr_debug("%s: %s B%1d %d/%d/%d/%d state %x idx %d/%d\n", fill_dma()
559 bc->idx, card->send.idx); fill_dma()
585 if (bc->idx >= card->send.size) fill_dma()
586 bc->idx = 0; fill_dma()
587 v = card->send.start[bc->idx]; fill_dma()
590 card->send.start[bc->idx++] = v; fill_dma()
594 if (bc->idx >= card->send.size) fill_dma()
595 bc->idx = 0; fill_dma()
596 v = card->send.start[bc->idx]; fill_dma()
600 card->send.start[bc->idx++] = v; fill_dma()
658 pr_debug("%s: B%1d TX no data free %d idx %d/%d\n", card->name, send_tiger_bc()
659 bc->bch.nr, bc->free, bc->idx, card->send.idx); send_tiger_bc()
661 fill_mem(bc, bc->idx, bc->free, 0xff); send_tiger_bc()
718 card->recv.idx = (card->recv.dmacur - card->recv.dmastart) >> 2; nj_irq()
725 card->send.idx = (card->send.dmacur - card->send.dmastart) >> 2; nj_irq()
733 card->recv.idx, card->send.idx); nj_irq()
/linux-4.4.14/net/wireless/
H A Dwext-compat.c435 int idx, struct key_params *params) __cfg80211_set_encryption()
465 if (idx < 4 || idx > 5) __cfg80211_set_encryption()
467 } else if (idx < 0 || idx > 3) __cfg80211_set_encryption()
477 if (idx == wdev->wext.default_key && __cfg80211_set_encryption()
487 err = rdev_del_key(rdev, dev, idx, pairwise, __cfg80211_set_encryption()
499 memset(wdev->wext.keys->data[idx], 0, __cfg80211_set_encryption()
500 sizeof(wdev->wext.keys->data[idx])); __cfg80211_set_encryption()
501 wdev->wext.keys->params[idx].key_len = 0; __cfg80211_set_encryption()
502 wdev->wext.keys->params[idx].cipher = 0; __cfg80211_set_encryption()
504 if (idx == wdev->wext.default_key) __cfg80211_set_encryption()
506 else if (idx == wdev->wext.default_mgmt_key) __cfg80211_set_encryption()
519 if (cfg80211_validate_key_settings(rdev, params, idx, pairwise, addr)) __cfg80211_set_encryption()
524 err = rdev_add_key(rdev, dev, idx, pairwise, addr, params); __cfg80211_set_encryption()
529 wdev->wext.keys->params[idx] = *params; __cfg80211_set_encryption()
530 memcpy(wdev->wext.keys->data[idx], __cfg80211_set_encryption()
532 wdev->wext.keys->params[idx].key = __cfg80211_set_encryption()
533 wdev->wext.keys->data[idx]; __cfg80211_set_encryption()
550 err = rdev_set_default_key(rdev, dev, idx, true, true); __cfg80211_set_encryption()
553 wdev->wext.default_key = idx; __cfg80211_set_encryption()
563 err = rdev_set_default_mgmt_key(rdev, dev, idx); __cfg80211_set_encryption()
565 wdev->wext.default_mgmt_key = idx; __cfg80211_set_encryption()
575 int idx, struct key_params *params) cfg80211_set_encryption()
581 remove, tx_key, idx, params); cfg80211_set_encryption()
593 int idx, err; cfg80211_wext_siwencode() local
607 idx = erq->flags & IW_ENCODE_INDEX; cfg80211_wext_siwencode()
608 if (idx == 0) { cfg80211_wext_siwencode()
609 idx = wdev->wext.default_key; cfg80211_wext_siwencode()
610 if (idx < 0) cfg80211_wext_siwencode()
611 idx = 0; cfg80211_wext_siwencode()
612 } else if (idx < 1 || idx > 4) cfg80211_wext_siwencode()
615 idx--; cfg80211_wext_siwencode()
624 err = rdev_set_default_key(rdev, dev, idx, true, cfg80211_wext_siwencode()
627 wdev->wext.default_key = idx; cfg80211_wext_siwencode()
644 idx, &params); cfg80211_wext_siwencode()
655 int idx; cfg80211_wext_siwencodeext() local
699 idx = erq->flags & IW_ENCODE_INDEX; cfg80211_wext_siwencodeext()
701 if (idx < 4 || idx > 5) { cfg80211_wext_siwencodeext()
702 idx = wdev->wext.default_mgmt_key; cfg80211_wext_siwencodeext()
703 if (idx < 0) cfg80211_wext_siwencodeext()
706 idx--; cfg80211_wext_siwencodeext()
708 if (idx < 1 || idx > 4) { cfg80211_wext_siwencodeext()
709 idx = wdev->wext.default_key; cfg80211_wext_siwencodeext()
710 if (idx < 0) cfg80211_wext_siwencodeext()
713 idx--; cfg80211_wext_siwencodeext()
735 idx, &params); cfg80211_wext_siwencodeext()
743 int idx; cfg80211_wext_giwencode() local
749 idx = erq->flags & IW_ENCODE_INDEX; cfg80211_wext_giwencode()
750 if (idx == 0) { cfg80211_wext_giwencode()
751 idx = wdev->wext.default_key; cfg80211_wext_giwencode()
752 if (idx < 0) cfg80211_wext_giwencode()
753 idx = 0; cfg80211_wext_giwencode()
754 } else if (idx < 1 || idx > 4) cfg80211_wext_giwencode()
757 idx--; cfg80211_wext_giwencode()
759 erq->flags = idx + 1; cfg80211_wext_giwencode()
761 if (!wdev->wext.keys || !wdev->wext.keys->params[idx].cipher) { cfg80211_wext_giwencode()
768 wdev->wext.keys->params[idx].key_len); cfg80211_wext_giwencode()
769 memcpy(keybuf, wdev->wext.keys->params[idx].key, erq->length); cfg80211_wext_giwencode()
432 __cfg80211_set_encryption(struct cfg80211_registered_device *rdev, struct net_device *dev, bool pairwise, const u8 *addr, bool remove, bool tx_key, int idx, struct key_params *params) __cfg80211_set_encryption() argument
572 cfg80211_set_encryption(struct cfg80211_registered_device *rdev, struct net_device *dev, bool pairwise, const u8 *addr, bool remove, bool tx_key, int idx, struct key_params *params) cfg80211_set_encryption() argument
/linux-4.4.14/arch/powerpc/kvm/
H A Dbook3s_64_vio_hv.c55 unsigned long idx = ioba >> SPAPR_TCE_SHIFT; kvmppc_h_put_tce() local
64 page = stt->pages[idx / TCES_PER_PAGE]; kvmppc_h_put_tce()
68 /* udbg_printf("tce @ %p\n", &tbl[idx % TCES_PER_PAGE]); */ kvmppc_h_put_tce()
69 tbl[idx % TCES_PER_PAGE] = tce; kvmppc_h_put_tce()
87 unsigned long idx = ioba >> SPAPR_TCE_SHIFT; kvmppc_h_get_tce() local
94 page = stt->pages[idx / TCES_PER_PAGE]; kvmppc_h_get_tce()
97 vcpu->arch.gpr[4] = tbl[idx % TCES_PER_PAGE]; kvmppc_h_get_tce()
/linux-4.4.14/drivers/w1/masters/
H A Dds2482.c454 int idx; ds2482_probe() local
498 for (idx = 0; idx < data->w1_count; idx++) { ds2482_probe()
499 data->w1_ch[idx].pdev = data; ds2482_probe()
500 data->w1_ch[idx].channel = idx; ds2482_probe()
503 data->w1_ch[idx].w1_bm.data = &data->w1_ch[idx]; ds2482_probe()
504 data->w1_ch[idx].w1_bm.read_byte = ds2482_w1_read_byte; ds2482_probe()
505 data->w1_ch[idx].w1_bm.write_byte = ds2482_w1_write_byte; ds2482_probe()
506 data->w1_ch[idx].w1_bm.touch_bit = ds2482_w1_touch_bit; ds2482_probe()
507 data->w1_ch[idx].w1_bm.triplet = ds2482_w1_triplet; ds2482_probe()
508 data->w1_ch[idx].w1_bm.reset_bus = ds2482_w1_reset_bus; ds2482_probe()
509 data->w1_ch[idx].w1_bm.set_pullup = ds2482_w1_set_pullup; ds2482_probe()
511 err = w1_add_master_device(&data->w1_ch[idx].w1_bm); ds2482_probe()
513 data->w1_ch[idx].pdev = NULL; ds2482_probe()
521 for (idx = 0; idx < data->w1_count; idx++) { ds2482_probe()
522 if (data->w1_ch[idx].pdev != NULL) ds2482_probe()
523 w1_remove_master_device(&data->w1_ch[idx].w1_bm); ds2482_probe()
534 int idx; ds2482_remove() local
537 for (idx = 0; idx < data->w1_count; idx++) { ds2482_remove()
538 if (data->w1_ch[idx].pdev != NULL) ds2482_remove()
539 w1_remove_master_device(&data->w1_ch[idx].w1_bm); ds2482_remove()
/linux-4.4.14/tools/testing/selftests/vm/
H A Dtranshuge-stress.c117 size_t idx = pfn >> (HPAGE_SHIFT - PAGE_SHIFT); main() local
120 if (idx >= map_len) { main()
121 map = realloc(map, idx + 1); main()
124 memset(map + map_len, 0, idx + 1 - map_len); main()
125 map_len = idx + 1; main()
127 if (!map[idx]) main()
129 map[idx] = 1; main()
/linux-4.4.14/drivers/reset/sti/
H A Dreset-syscfg.c53 unsigned long idx, int assert) syscfg_reset_program_hw()
60 if (idx >= rcdev->nr_resets) syscfg_reset_program_hw()
63 ch = &rst->channels[idx]; syscfg_reset_program_hw()
92 unsigned long idx) syscfg_reset_assert()
94 return syscfg_reset_program_hw(rcdev, idx, true); syscfg_reset_assert()
98 unsigned long idx) syscfg_reset_deassert()
100 return syscfg_reset_program_hw(rcdev, idx, false); syscfg_reset_deassert()
104 unsigned long idx) syscfg_reset_dev()
106 int err = syscfg_reset_assert(rcdev, idx); syscfg_reset_dev()
110 return syscfg_reset_deassert(rcdev, idx); syscfg_reset_dev()
52 syscfg_reset_program_hw(struct reset_controller_dev *rcdev, unsigned long idx, int assert) syscfg_reset_program_hw() argument
91 syscfg_reset_assert(struct reset_controller_dev *rcdev, unsigned long idx) syscfg_reset_assert() argument
97 syscfg_reset_deassert(struct reset_controller_dev *rcdev, unsigned long idx) syscfg_reset_deassert() argument
103 syscfg_reset_dev(struct reset_controller_dev *rcdev, unsigned long idx) syscfg_reset_dev() argument
/linux-4.4.14/drivers/memory/
H A Dtegra20-mc.c134 int idx, cid; tegra20_mc_decode() local
159 idx = n - MC_INT_ERR_SHIFT; tegra20_mc_decode()
160 if ((idx < 0) || (idx >= ARRAY_SIZE(reg))) { tegra20_mc_decode()
166 req = mc_readl(mc, reg[idx].offset); tegra20_mc_decode()
167 cid = (req >> reg[idx].cid_shift) & MC_CLIENT_ID_MASK; tegra20_mc_decode()
171 addr = mc_readl(mc, reg[idx].offset + sizeof(u32)); tegra20_mc_decode()
174 reg[idx].message, req, addr, client, tegra20_mc_decode()
175 (req & BIT(reg[idx].write_bit)) ? "write" : "read", tegra20_mc_decode()
176 (reg[idx].offset == MC_SECURITY_VIOLATION_STATUS) ? tegra20_mc_decode()
/linux-4.4.14/arch/x86/include/asm/
H A Dfixmap.h62 * higher than 1). Use set_fixmap(idx,phys) to associate
149 void __native_set_fixmap(enum fixed_addresses idx, pte_t pte);
150 void native_set_fixmap(enum fixed_addresses idx,
154 static inline void __set_fixmap(enum fixed_addresses idx, __set_fixmap() argument
157 native_set_fixmap(idx, phys, flags); __set_fixmap()
163 #define __late_set_fixmap(idx, phys, flags) __set_fixmap(idx, phys, flags)
164 #define __late_clear_fixmap(idx) __set_fixmap(idx, 0, __pgprot(0))
166 void __early_set_fixmap(enum fixed_addresses idx,
/linux-4.4.14/tools/vm/
H A Dslabinfo-gnuplot.sh226 local idx=0
232 files[$idx]=$p
233 idx=$idx+1
236 t_files[$idx]=$p
237 idx=$idx+1
240 files[$idx]=$p
241 idx=$idx+1
/linux-4.4.14/sound/isa/msnd/
H A Dmsnd_pinnacle.c892 static int snd_msnd_isa_probe(struct device *pdev, unsigned int idx) snd_msnd_isa_probe() argument
898 if (has_isapnp(idx) snd_msnd_isa_probe()
900 || cfg[idx] == SNDRV_AUTO_PORT snd_msnd_isa_probe()
907 err = snd_card_new(pdev, index[idx], id[idx], THIS_MODULE, snd_msnd_isa_probe()
916 switch (irq[idx]) { snd_msnd_isa_probe()
931 switch (mem[idx]) { snd_msnd_isa_probe()
947 cfg[idx]); snd_msnd_isa_probe()
949 if (!request_region(cfg[idx], 2, "Pinnacle/Fiji Config")) { snd_msnd_isa_probe()
951 cfg[idx]); snd_msnd_isa_probe()
955 if (reset[idx]) snd_msnd_isa_probe()
956 if (snd_msnd_pinnacle_cfg_reset(cfg[idx])) { snd_msnd_isa_probe()
962 err = snd_msnd_write_cfg_logical(cfg[idx], 0, snd_msnd_isa_probe()
963 io[idx], 0, snd_msnd_isa_probe()
964 irq[idx], mem[idx]); snd_msnd_isa_probe()
972 if (mpu_io[idx] != SNDRV_AUTO_PORT snd_msnd_isa_probe()
973 && mpu_irq[idx] != SNDRV_AUTO_IRQ) { snd_msnd_isa_probe()
976 mpu_io[idx], mpu_irq[idx]); snd_msnd_isa_probe()
977 err = snd_msnd_write_cfg_logical(cfg[idx], 1, snd_msnd_isa_probe()
978 mpu_io[idx], 0, snd_msnd_isa_probe()
979 mpu_irq[idx], 0); snd_msnd_isa_probe()
986 if (ide_io0[idx] != SNDRV_AUTO_PORT snd_msnd_isa_probe()
987 && ide_io1[idx] != SNDRV_AUTO_PORT snd_msnd_isa_probe()
988 && ide_irq[idx] != SNDRV_AUTO_IRQ) { snd_msnd_isa_probe()
991 ide_io0[idx], ide_io1[idx], ide_irq[idx]); snd_msnd_isa_probe()
992 err = snd_msnd_write_cfg_logical(cfg[idx], 2, snd_msnd_isa_probe()
993 ide_io0[idx], ide_io1[idx], snd_msnd_isa_probe()
994 ide_irq[idx], 0); snd_msnd_isa_probe()
1001 if (joystick_io[idx] != SNDRV_AUTO_PORT) { snd_msnd_isa_probe()
1004 joystick_io[idx]); snd_msnd_isa_probe()
1005 err = snd_msnd_write_cfg_logical(cfg[idx], 3, snd_msnd_isa_probe()
1006 joystick_io[idx], 0, snd_msnd_isa_probe()
1012 release_region(cfg[idx], 2); snd_msnd_isa_probe()
1022 chip->io = io[idx]; snd_msnd_isa_probe()
1023 chip->irq = irq[idx]; snd_msnd_isa_probe()
1024 chip->base = mem[idx]; snd_msnd_isa_probe()
1030 if (write_ndelay[idx]) snd_msnd_isa_probe()
1035 if (digital[idx]) snd_msnd_isa_probe()
1058 release_region(cfg[idx], 2); snd_msnd_isa_probe()
1084 static int idx; snd_msnd_pnp_detect() local
1091 for ( ; idx < SNDRV_CARDS; idx++) { snd_msnd_pnp_detect()
1092 if (has_isapnp(idx)) snd_msnd_pnp_detect()
1095 if (idx >= SNDRV_CARDS) snd_msnd_pnp_detect()
1124 index[idx], id[idx], THIS_MODULE, snd_msnd_pnp_detect()
1135 io[idx] = pnp_port_start(pnp_dev, 0); snd_msnd_pnp_detect()
1136 irq[idx] = pnp_irq(pnp_dev, 0); snd_msnd_pnp_detect()
1137 mem[idx] = pnp_mem_start(pnp_dev, 0); snd_msnd_pnp_detect()
1138 mpu_io[idx] = pnp_port_start(mpu_dev, 0); snd_msnd_pnp_detect()
1139 mpu_irq[idx] = pnp_irq(mpu_dev, 0); snd_msnd_pnp_detect()
1147 chip->io = io[idx]; snd_msnd_pnp_detect()
1148 chip->irq = irq[idx]; snd_msnd_pnp_detect()
1149 chip->base = mem[idx]; snd_msnd_pnp_detect()
1155 if (write_ndelay[idx]) snd_msnd_pnp_detect()
1160 if (digital[idx]) snd_msnd_pnp_detect()
1177 ++idx; snd_msnd_pnp_detect()
/linux-4.4.14/drivers/net/ethernet/intel/i40e/
H A Di40e_hmc.c199 * @idx: the page index
213 u32 idx) i40e_remove_pd_bp()
223 sd_idx = idx / I40E_HMC_PD_CNT_IN_SD; i40e_remove_pd_bp()
224 rel_pd_idx = idx % I40E_HMC_PD_CNT_IN_SD; i40e_remove_pd_bp()
227 hw_dbg(hw, "i40e_remove_pd_bp: bad idx\n"); i40e_remove_pd_bp()
249 I40E_INVALIDATE_PF_HMC_PD(hw, sd_idx, idx); i40e_remove_pd_bp()
265 * @idx: the page index
268 u32 idx) i40e_prep_remove_sd_bp()
274 sd_entry = &hmc_info->sd_table.sd_entry[idx]; i40e_prep_remove_sd_bp()
292 * @idx: the page index
297 u32 idx, bool is_pf) i40e_remove_sd_bp_new()
305 sd_entry = &hmc_info->sd_table.sd_entry[idx]; i40e_remove_sd_bp_new()
306 I40E_CLEAR_PF_SD_ENTRY(hw, idx, I40E_SD_TYPE_DIRECT); i40e_remove_sd_bp_new()
314 * @idx: segment descriptor index to find the relevant page descriptor
317 u32 idx) i40e_prep_remove_pd_page()
322 sd_entry = &hmc_info->sd_table.sd_entry[idx]; i40e_prep_remove_pd_page()
341 * @idx: segment descriptor index to find the relevant page descriptor
346 u32 idx, bool is_pf) i40e_remove_pd_page_new()
353 sd_entry = &hmc_info->sd_table.sd_entry[idx]; i40e_remove_pd_page_new()
354 I40E_CLEAR_PF_SD_ENTRY(hw, idx, I40E_SD_TYPE_PAGED); i40e_remove_pd_page_new()
211 i40e_remove_pd_bp(struct i40e_hw *hw, struct i40e_hmc_info *hmc_info, u32 idx) i40e_remove_pd_bp() argument
267 i40e_prep_remove_sd_bp(struct i40e_hmc_info *hmc_info, u32 idx) i40e_prep_remove_sd_bp() argument
295 i40e_remove_sd_bp_new(struct i40e_hw *hw, struct i40e_hmc_info *hmc_info, u32 idx, bool is_pf) i40e_remove_sd_bp_new() argument
316 i40e_prep_remove_pd_page(struct i40e_hmc_info *hmc_info, u32 idx) i40e_prep_remove_pd_page() argument
344 i40e_remove_pd_page_new(struct i40e_hw *hw, struct i40e_hmc_info *hmc_info, u32 idx, bool is_pf) i40e_remove_pd_page_new() argument
/linux-4.4.14/drivers/cpufreq/
H A Dscpi-cpufreq.c43 int idx, ret = 0; scpi_opp_table_ops() local
53 for (opp = info->opps, idx = 0; idx < info->count; idx++, opp++) { scpi_opp_table_ops()
62 while (idx-- > 0) scpi_opp_table_ops()
/linux-4.4.14/arch/x86/pci/
H A Di386.c71 pcibios_save_fw_addr(struct pci_dev *dev, int idx, resource_size_t fw_addr) pcibios_save_fw_addr() argument
88 map->fw_addr[idx] = fw_addr; pcibios_save_fw_addr()
94 map->fw_addr[idx] = fw_addr; pcibios_save_fw_addr()
98 resource_size_t pcibios_retrieve_fw_addr(struct pci_dev *dev, int idx) pcibios_retrieve_fw_addr() argument
110 fw_addr = map->fw_addr[idx]; pcibios_retrieve_fw_addr()
210 int idx; pcibios_allocate_bridge_resources() local
213 for (idx = PCI_BRIDGE_RESOURCES; idx < PCI_NUM_RESOURCES; idx++) { pcibios_allocate_bridge_resources()
214 r = &dev->resource[idx]; pcibios_allocate_bridge_resources()
219 if (!r->start || pci_claim_bridge_resource(dev, idx) < 0) { pcibios_allocate_bridge_resources()
250 int idx, disabled, i; pcibios_allocate_dev_resources() local
263 for (idx = idx_range[i].start; idx <= idx_range[i].end; idx++) { pcibios_allocate_dev_resources()
264 r = &dev->resource[idx]; pcibios_allocate_dev_resources()
276 idx, r, disabled, pass); pcibios_allocate_dev_resources()
277 if (pci_claim_resource(dev, idx) < 0) { pcibios_allocate_dev_resources()
280 idx, r); pcibios_allocate_dev_resources()
284 idx, r->start); pcibios_allocate_dev_resources()
/linux-4.4.14/arch/xtensa/include/asm/
H A Dfixmap.h34 * higher than 1) use fixmap_set(idx,phys) to associate
56 * 'index to address' translation. If anyone tries to use the idx
60 static __always_inline unsigned long fix_to_virt(const unsigned int idx) fix_to_virt() argument
62 BUILD_BUG_ON(idx >= __end_of_fixed_addresses); fix_to_virt()
63 return __fix_to_virt(idx); fix_to_virt()
/linux-4.4.14/arch/mips/loongson64/common/
H A Dmem.c135 int idx; find_vga_mem_init() local
142 for (idx = 0; idx < PCI_NUM_RESOURCES; idx++) { for_each_pci_dev()
143 r = &dev->resource[idx]; for_each_pci_dev()
/linux-4.4.14/sound/pci/
H A Dak4531_codec.c48 int idx;
50 for (idx = 0; idx < 0x19; idx++)
52 idx, ak4531->regs[idx]);
386 unsigned int idx; snd_ak4531_mixer() local
410 for (idx = 0; idx <= 0x19; idx++) { snd_ak4531_mixer()
411 if (idx == AK4531_RESET || idx == AK4531_CLOCK) snd_ak4531_mixer()
413 ak4531->write(ak4531, idx, ak4531->regs[idx] = snd_ak4531_initial_map[idx]); /* recording source is mixer */ snd_ak4531_mixer()
415 for (idx = 0; idx < ARRAY_SIZE(snd_ak4531_controls); idx++) { snd_ak4531_mixer()
416 if ((err = snd_ctl_add(card, snd_ctl_new1(&snd_ak4531_controls[idx], ak4531))) < 0) { snd_ak4531_mixer()
450 int idx; snd_ak4531_resume() local
457 for (idx = 0; idx <= 0x19; idx++) { snd_ak4531_resume()
458 if (idx == AK4531_RESET || idx == AK4531_CLOCK) snd_ak4531_resume()
460 ak4531->write(ak4531, idx, ak4531->regs[idx]); snd_ak4531_resume()
/linux-4.4.14/drivers/usb/usbip/
H A Dstub_main.c58 int idx = -1; get_busid_idx() local
63 idx = i; get_busid_idx()
66 return idx; get_busid_idx()
71 int idx; get_busid_priv() local
75 idx = get_busid_idx(busid); get_busid_priv()
76 if (idx >= 0) get_busid_priv()
77 bid = &(busid_table[idx]); get_busid_priv()
113 int idx; del_match_busid() local
117 idx = get_busid_idx(busid); del_match_busid()
118 if (idx < 0) del_match_busid()
124 if (busid_table[idx].status == STUB_BUSID_OTHER) del_match_busid()
125 memset(busid_table[idx].name, 0, BUSID_SIZE); del_match_busid()
127 if ((busid_table[idx].status != STUB_BUSID_OTHER) && del_match_busid()
128 (busid_table[idx].status != STUB_BUSID_ADDED)) del_match_busid()
129 busid_table[idx].status = STUB_BUSID_REMOV; del_match_busid()
/linux-4.4.14/drivers/mtd/
H A Dafs.c169 u_int mask, off, idx, sz; parse_afs_partitions() local
184 for (idx = off = sz = 0; off < mtd->size; off += mtd->erasesize) { parse_afs_partitions()
202 idx += 1; parse_afs_partitions()
212 str = (char *)(parts + idx); parse_afs_partitions()
217 for (idx = off = 0; off < mtd->size; off += mtd->erasesize) { parse_afs_partitions()
237 parts[idx].name = str; parse_afs_partitions()
238 parts[idx].size = (iis.length + mtd->erasesize - 1) & ~(mtd->erasesize - 1); parse_afs_partitions()
239 parts[idx].offset = img_ptr; parse_afs_partitions()
240 parts[idx].mask_flags = 0; parse_afs_partitions()
243 idx, img_ptr, parts[idx].size / 1024, parse_afs_partitions()
246 idx += 1; parse_afs_partitions()
250 if (!idx) { parse_afs_partitions()
256 return idx ? idx : ret; parse_afs_partitions()
/linux-4.4.14/net/mac80211/
H A Drc80211_minstrel_ht_debugfs.c43 int idx = i * MCS_GROUP_RATES + j; minstrel_ht_stats_dump() local
62 *(p++) = (idx == mi->max_tp_rate[0]) ? 'A' : ' '; minstrel_ht_stats_dump()
63 *(p++) = (idx == mi->max_tp_rate[1]) ? 'B' : ' '; minstrel_ht_stats_dump()
64 *(p++) = (idx == mi->max_tp_rate[2]) ? 'C' : ' '; minstrel_ht_stats_dump()
65 *(p++) = (idx == mi->max_tp_rate[3]) ? 'D' : ' '; minstrel_ht_stats_dump()
66 *(p++) = (idx == mi->max_prob_rate) ? 'P' : ' '; minstrel_ht_stats_dump()
78 p += sprintf(p, " %3u ", idx); minstrel_ht_stats_dump()
135 "mode guard # rate [name idx airtime max_tp] [avg(tp) avg(prob) sd(prob)] [prob.|retry|suc|att] [#success | #attempts]\n"); minstrel_ht_stats_open()
189 int idx = i * MCS_GROUP_RATES + j; minstrel_ht_stats_csv_dump() local
208 p += sprintf(p, "%s" ,((idx == mi->max_tp_rate[0]) ? "A" : "")); minstrel_ht_stats_csv_dump()
209 p += sprintf(p, "%s" ,((idx == mi->max_tp_rate[1]) ? "B" : "")); minstrel_ht_stats_csv_dump()
210 p += sprintf(p, "%s" ,((idx == mi->max_tp_rate[2]) ? "C" : "")); minstrel_ht_stats_csv_dump()
211 p += sprintf(p, "%s" ,((idx == mi->max_tp_rate[3]) ? "D" : "")); minstrel_ht_stats_csv_dump()
212 p += sprintf(p, "%s" ,((idx == mi->max_prob_rate) ? "P" : "")); minstrel_ht_stats_csv_dump()
223 p += sprintf(p, "%u,", idx); minstrel_ht_stats_csv_dump()
/linux-4.4.14/net/nfc/
H A Dnetlink.c87 if (nla_put_u32(msg, NFC_ATTR_TARGET_INDEX, target->idx) || nfc_genl_send_target()
125 u32 idx; __get_device_from_cb() local
137 idx = nla_get_u32(nfc_genl_family.attrbuf[NFC_ATTR_DEVICE_INDEX]); __get_device_from_cb()
139 dev = nfc_get_device(idx); __get_device_from_cb()
207 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) nfc_genl_targets_found()
266 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) nfc_genl_tm_activated()
298 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) nfc_genl_tm_deactivated()
329 nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx) || nfc_genl_device_added()
361 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) nfc_genl_device_removed()
396 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) nfc_genl_llc_send_sdres()
459 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx) || nfc_genl_se_added()
491 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx) || nfc_genl_se_removed()
528 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx) || nfc_genl_se_transaction()
571 nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx) || nfc_genl_send_device()
653 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) nfc_genl_dep_link_up_event()
693 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) nfc_genl_dep_link_down_event()
713 u32 idx; nfc_genl_get_device() local
719 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_get_device()
721 dev = nfc_get_device(idx); nfc_genl_get_device()
751 u32 idx; nfc_genl_dev_up() local
756 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_dev_up()
758 dev = nfc_get_device(idx); nfc_genl_dev_up()
772 u32 idx; nfc_genl_dev_down() local
777 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_dev_down()
779 dev = nfc_get_device(idx); nfc_genl_dev_down()
793 u32 idx; nfc_genl_start_poll() local
804 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_start_poll()
814 dev = nfc_get_device(idx); nfc_genl_start_poll()
834 u32 idx; nfc_genl_stop_poll() local
839 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_stop_poll()
841 dev = nfc_get_device(idx); nfc_genl_stop_poll()
899 u32 idx; nfc_genl_dep_link_up() local
908 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_dep_link_up()
919 dev = nfc_get_device(idx); nfc_genl_dep_link_up()
934 u32 idx; nfc_genl_dep_link_down() local
939 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_dep_link_down()
941 dev = nfc_get_device(idx); nfc_genl_dep_link_down()
962 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, local->dev->idx) || nfc_genl_send_params()
983 u32 idx; nfc_genl_llc_get_params() local
988 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_llc_get_params()
990 dev = nfc_get_device(idx); nfc_genl_llc_get_params()
1031 u32 idx; nfc_genl_llc_set_params() local
1054 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_llc_set_params()
1056 dev = nfc_get_device(idx); nfc_genl_llc_set_params()
1097 u32 idx; nfc_genl_llc_sdreq() local
1109 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_llc_sdreq()
1111 dev = nfc_get_device(idx); nfc_genl_llc_sdreq()
1184 u32 idx; nfc_genl_fw_download() local
1190 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_fw_download()
1192 dev = nfc_get_device(idx); nfc_genl_fw_download()
1222 nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx)) nfc_genl_fw_download_done()
1242 u32 idx, se_idx; nfc_genl_enable_se() local
1248 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_enable_se()
1251 dev = nfc_get_device(idx); nfc_genl_enable_se()
1265 u32 idx, se_idx; nfc_genl_disable_se() local
1271 idx = nla_get_u32(info->attrs[NFC_ATTR_DEVICE_INDEX]); nfc_genl_disable_se()
1274 dev = nfc_get_device(idx); nfc_genl_disable_se()
1301 if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, dev->idx) || nfc_genl_send_se()
1302 nla_put_u32(msg, NFC_ATTR_SE_INDEX, se->idx) || nfc_genl_send_se()
1573 if (nla_put_u32(skb, NFC_ATTR_DEVICE_INDEX, dev->idx)) __nfc_alloc_vendor_cmd_skb()
/linux-4.4.14/arch/mips/kernel/
H A Dperf_event_mipsxx.c91 u64 (*read_counter)(unsigned int idx);
92 void (*write_counter)(unsigned int idx, u64 val);
183 static unsigned int mipsxx_pmu_swizzle_perf_idx(unsigned int idx) mipsxx_pmu_swizzle_perf_idx() argument
186 idx = (idx + 2) & 3; mipsxx_pmu_swizzle_perf_idx()
187 return idx; mipsxx_pmu_swizzle_perf_idx()
190 static u64 mipsxx_pmu_read_counter(unsigned int idx) mipsxx_pmu_read_counter() argument
192 idx = mipsxx_pmu_swizzle_perf_idx(idx); mipsxx_pmu_read_counter()
194 switch (idx) { mipsxx_pmu_read_counter()
208 WARN_ONCE(1, "Invalid performance counter number (%d)\n", idx); mipsxx_pmu_read_counter()
213 static u64 mipsxx_pmu_read_counter_64(unsigned int idx) mipsxx_pmu_read_counter_64() argument
215 idx = mipsxx_pmu_swizzle_perf_idx(idx); mipsxx_pmu_read_counter_64()
217 switch (idx) { mipsxx_pmu_read_counter_64()
227 WARN_ONCE(1, "Invalid performance counter number (%d)\n", idx); mipsxx_pmu_read_counter_64()
232 static void mipsxx_pmu_write_counter(unsigned int idx, u64 val) mipsxx_pmu_write_counter() argument
234 idx = mipsxx_pmu_swizzle_perf_idx(idx); mipsxx_pmu_write_counter()
236 switch (idx) { mipsxx_pmu_write_counter()
252 static void mipsxx_pmu_write_counter_64(unsigned int idx, u64 val) mipsxx_pmu_write_counter_64() argument
254 idx = mipsxx_pmu_swizzle_perf_idx(idx); mipsxx_pmu_write_counter_64()
256 switch (idx) { mipsxx_pmu_write_counter_64()
272 static unsigned int mipsxx_pmu_read_control(unsigned int idx) mipsxx_pmu_read_control() argument
274 idx = mipsxx_pmu_swizzle_perf_idx(idx); mipsxx_pmu_read_control()
276 switch (idx) { mipsxx_pmu_read_control()
286 WARN_ONCE(1, "Invalid performance counter number (%d)\n", idx); mipsxx_pmu_read_control()
291 static void mipsxx_pmu_write_control(unsigned int idx, unsigned int val) mipsxx_pmu_write_control() argument
293 idx = mipsxx_pmu_swizzle_perf_idx(idx); mipsxx_pmu_write_control()
295 switch (idx) { mipsxx_pmu_write_control()
341 static void mipsxx_pmu_enable_event(struct hw_perf_event *evt, int idx) mipsxx_pmu_enable_event() argument
345 WARN_ON(idx < 0 || idx >= mipspmu.num_counters); mipsxx_pmu_enable_event()
347 cpuc->saved_ctrl[idx] = M_PERFCTL_EVENT(evt->event_base & 0xff) | mipsxx_pmu_enable_event()
353 cpuc->saved_ctrl[idx] |= mipsxx_pmu_enable_event()
361 static void mipsxx_pmu_disable_event(int idx) mipsxx_pmu_disable_event() argument
366 WARN_ON(idx < 0 || idx >= mipspmu.num_counters); mipsxx_pmu_disable_event()
369 cpuc->saved_ctrl[idx] = mipsxx_pmu_read_control(idx) & mipsxx_pmu_disable_event()
371 mipsxx_pmu_write_control(idx, cpuc->saved_ctrl[idx]); mipsxx_pmu_disable_event()
377 int idx) mipspmu_event_set_period()
404 mipspmu.write_counter(idx, mipspmu.overflow - left); mipspmu_event_set_period()
413 int idx) mipspmu_event_update()
420 new_raw_count = mipspmu.read_counter(idx); mipspmu_event_update()
442 mipspmu_event_set_period(event, hwc, hwc->idx); mipspmu_start()
445 mipsxx_pmu_enable_event(hwc, hwc->idx); mipspmu_start()
454 mipsxx_pmu_disable_event(hwc->idx); mipspmu_stop()
456 mipspmu_event_update(event, hwc, hwc->idx); mipspmu_stop()
465 int idx; mipspmu_add() local
471 idx = mipsxx_pmu_alloc_counter(cpuc, hwc); mipspmu_add()
472 if (idx < 0) { mipspmu_add()
473 err = idx; mipspmu_add()
481 event->hw.idx = idx; mipspmu_add()
482 mipsxx_pmu_disable_event(idx); mipspmu_add()
483 cpuc->events[idx] = event; mipspmu_add()
501 int idx = hwc->idx; mipspmu_del() local
503 WARN_ON(idx < 0 || idx >= mipspmu.num_counters); mipspmu_del()
506 cpuc->events[idx] = NULL; mipspmu_del()
507 clear_bit(idx, cpuc->used_mask); mipspmu_del()
517 if (hwc->idx < 0) mipspmu_read()
520 mipspmu_event_update(event, hwc, hwc->idx); mipspmu_read()
679 static const struct mips_perf_event *mipspmu_map_general_event(int idx) mipspmu_map_general_event() argument
682 if ((*mipspmu.general_event_map)[idx].cntr_mask == 0) mipspmu_map_general_event()
684 return &(*mipspmu.general_event_map)[idx]; mipspmu_map_general_event()
739 int idx, struct perf_sample_data *data, handle_associated_event()
742 struct perf_event *event = cpuc->events[idx]; handle_associated_event()
745 mipspmu_event_update(event, hwc, idx); handle_associated_event()
747 if (!mipspmu_event_set_period(event, hwc, idx)) handle_associated_event()
751 mipsxx_pmu_disable_event(idx); handle_associated_event()
1319 hwc->idx = -1; __hw_perf_event_init()
375 mipspmu_event_set_period(struct perf_event *event, struct hw_perf_event *hwc, int idx) mipspmu_event_set_period() argument
411 mipspmu_event_update(struct perf_event *event, struct hw_perf_event *hwc, int idx) mipspmu_event_update() argument
738 handle_associated_event(struct cpu_hw_events *cpuc, int idx, struct perf_sample_data *data, struct pt_regs *regs) handle_associated_event() argument
/linux-4.4.14/drivers/net/wireless/realtek/rtlwifi/rtl8192de/
H A Dfw.c374 u8 idx; _rtl92d_fill_h2c_command() local
473 for (idx = 0; idx < 4; idx++) _rtl92d_fill_h2c_command()
474 rtl_write_byte(rtlpriv, box_reg + idx, _rtl92d_fill_h2c_command()
475 boxcontent[idx]); _rtl92d_fill_h2c_command()
480 for (idx = 0; idx < 4; idx++) _rtl92d_fill_h2c_command()
481 rtl_write_byte(rtlpriv, box_reg + idx, _rtl92d_fill_h2c_command()
482 boxcontent[idx]); _rtl92d_fill_h2c_command()
487 for (idx = 0; idx < 4; idx++) _rtl92d_fill_h2c_command()
488 rtl_write_byte(rtlpriv, box_reg + idx, _rtl92d_fill_h2c_command()
489 boxcontent[idx]); _rtl92d_fill_h2c_command()
495 for (idx = 0; idx < 2; idx++) _rtl92d_fill_h2c_command()
496 rtl_write_byte(rtlpriv, box_extreg + idx, _rtl92d_fill_h2c_command()
497 boxextcontent[idx]); _rtl92d_fill_h2c_command()
498 for (idx = 0; idx < 4; idx++) _rtl92d_fill_h2c_command()
499 rtl_write_byte(rtlpriv, box_reg + idx, _rtl92d_fill_h2c_command()
500 boxcontent[idx]); _rtl92d_fill_h2c_command()
506 for (idx = 0; idx < 2; idx++) _rtl92d_fill_h2c_command()
507 rtl_write_byte(rtlpriv, box_extreg + idx, _rtl92d_fill_h2c_command()
508 boxextcontent[idx]); _rtl92d_fill_h2c_command()
509 for (idx = 0; idx < 4; idx++) _rtl92d_fill_h2c_command()
510 rtl_write_byte(rtlpriv, box_reg + idx, _rtl92d_fill_h2c_command()
511 boxcontent[idx]); _rtl92d_fill_h2c_command()
550 u8 idx = 0; _rtl92d_cmd_send_packet() local
558 pdesc = &ring->desc[idx]; _rtl92d_cmd_send_packet()
/linux-4.4.14/drivers/input/serio/
H A Dhp_sdc.c193 curr->seq[curr->idx++] = status; hp_sdc_take()
194 curr->seq[curr->idx++] = data; hp_sdc_take()
208 curr->actidx = curr->idx; hp_sdc_take()
209 curr->idx++; hp_sdc_take()
326 curr->idx += hp_sdc.rqty; hp_sdc_tasklet()
342 curr->actidx = curr->idx; hp_sdc_tasklet()
343 curr->idx++; hp_sdc_tasklet()
355 int idx, curridx; hp_sdc_put() local
416 idx = curr->actidx; hp_sdc_put()
427 act = curr->seq[idx]; hp_sdc_put()
428 idx++; hp_sdc_put()
430 if (curr->idx >= curr->endidx) { hp_sdc_put()
442 if (curr->idx != idx) { hp_sdc_put()
443 idx++; hp_sdc_put()
447 hp_sdc_status_out8(curr->seq[idx]); hp_sdc_put()
448 curr->idx++; hp_sdc_put()
454 curr->idx++; hp_sdc_put()
460 qty = curr->seq[idx]; hp_sdc_put()
461 idx++; hp_sdc_put()
462 if (curr->idx - idx < qty) { hp_sdc_put()
463 hp_sdc_data_out8(curr->seq[curr->idx]); hp_sdc_put()
464 curr->idx++; hp_sdc_put()
466 if (curr->idx - idx >= qty && hp_sdc_put()
471 idx += qty; hp_sdc_put()
478 mask = curr->seq[idx]; hp_sdc_put()
479 if (idx != curr->idx) { hp_sdc_put()
480 idx++; hp_sdc_put()
481 idx += !!(mask & 1); hp_sdc_put()
482 idx += !!(mask & 2); hp_sdc_put()
483 idx += !!(mask & 4); hp_sdc_put()
484 idx += !!(mask & 8); hp_sdc_put()
489 w7[0] = (mask & 1) ? curr->seq[++idx] : hp_sdc.r7[0]; hp_sdc_put()
490 w7[1] = (mask & 2) ? curr->seq[++idx] : hp_sdc.r7[1]; hp_sdc_put()
491 w7[2] = (mask & 4) ? curr->seq[++idx] : hp_sdc.r7[2]; hp_sdc_put()
492 w7[3] = (mask & 8) ? curr->seq[++idx] : hp_sdc.r7[3]; hp_sdc_put()
508 idx++; hp_sdc_put()
512 curr->idx = idx; hp_sdc_put()
526 curr->idx = idx + 1; hp_sdc_put()
547 /* curr->idx should == idx at this point. */ hp_sdc_put()
548 postcmd = curr->seq[idx]; hp_sdc_put()
549 curr->idx++; hp_sdc_put()
553 hp_sdc.rqty = curr->seq[curr->idx]; hp_sdc_put()
555 curr->idx++; hp_sdc_put()
573 if (curr->idx >= curr->endidx) { /* This transaction is over. */ hp_sdc_put()
578 curr->actidx = idx + 1; hp_sdc_put()
579 curr->idx = idx + 2; hp_sdc_put()
616 this->idx = 1; __hp_sdc_enqueue_transaction()
900 t_sync.idx = 1; hp_sdc_init()
1044 tq_init.idx = 1; hp_sdc_register()
1072 tq_init.idx = 1; hp_sdc_register()
1096 tq_init.idx = 1; hp_sdc_register()
/linux-4.4.14/drivers/input/joystick/
H A Dwarrior.c54 int idx, len; member in struct:warrior
69 if (!warrior->idx) return; warrior_process_packet()
104 if (warrior->idx) warrior_process_packet(warrior); warrior_interrupt()
105 warrior->idx = 0; warrior_interrupt()
109 if (warrior->idx < warrior->len) warrior_interrupt()
110 warrior->data[warrior->idx++] = data; warrior_interrupt()
112 if (warrior->idx == warrior->len) { warrior_interrupt()
113 if (warrior->idx) warrior_process_packet(warrior); warrior_interrupt()
114 warrior->idx = 0; warrior_interrupt()
/linux-4.4.14/drivers/net/wireless/realtek/rtlwifi/rtl8192c/
H A Dfw_common.c295 u8 idx; _rtl92c_fill_h2c_command() local
398 for (idx = 0; idx < 4; idx++) { _rtl92c_fill_h2c_command()
399 rtl_write_byte(rtlpriv, box_reg + idx, _rtl92c_fill_h2c_command()
400 boxcontent[idx]); _rtl92c_fill_h2c_command()
408 for (idx = 0; idx < 4; idx++) { _rtl92c_fill_h2c_command()
409 rtl_write_byte(rtlpriv, box_reg + idx, _rtl92c_fill_h2c_command()
410 boxcontent[idx]); _rtl92c_fill_h2c_command()
418 for (idx = 0; idx < 4; idx++) { _rtl92c_fill_h2c_command()
419 rtl_write_byte(rtlpriv, box_reg + idx, _rtl92c_fill_h2c_command()
420 boxcontent[idx]); _rtl92c_fill_h2c_command()
430 for (idx = 0; idx < 2; idx++) { _rtl92c_fill_h2c_command()
431 rtl_write_byte(rtlpriv, box_extreg + idx, _rtl92c_fill_h2c_command()
432 boxextcontent[idx]); _rtl92c_fill_h2c_command()
435 for (idx = 0; idx < 4; idx++) { _rtl92c_fill_h2c_command()
436 rtl_write_byte(rtlpriv, box_reg + idx, _rtl92c_fill_h2c_command()
437 boxcontent[idx]); _rtl92c_fill_h2c_command()
447 for (idx = 0; idx < 2; idx++) { _rtl92c_fill_h2c_command()
448 rtl_write_byte(rtlpriv, box_extreg + idx, _rtl92c_fill_h2c_command()
449 boxextcontent[idx]); _rtl92c_fill_h2c_command()
452 for (idx = 0; idx < 4; idx++) { _rtl92c_fill_h2c_command()
453 rtl_write_byte(rtlpriv, box_reg + idx, _rtl92c_fill_h2c_command()
454 boxcontent[idx]); _rtl92c_fill_h2c_command()
/linux-4.4.14/drivers/net/wireless/realtek/rtlwifi/rtl8723ae/
H A Dfw.c66 u8 idx; _rtl8723e_fill_h2c_command() local
172 for (idx = 0; idx < 4; idx++) { _rtl8723e_fill_h2c_command()
173 rtl_write_byte(rtlpriv, box_reg + idx, _rtl8723e_fill_h2c_command()
174 boxcontent[idx]); _rtl8723e_fill_h2c_command()
182 for (idx = 0; idx < 4; idx++) { _rtl8723e_fill_h2c_command()
183 rtl_write_byte(rtlpriv, box_reg + idx, _rtl8723e_fill_h2c_command()
184 boxcontent[idx]); _rtl8723e_fill_h2c_command()
192 for (idx = 0; idx < 4; idx++) { _rtl8723e_fill_h2c_command()
193 rtl_write_byte(rtlpriv, box_reg + idx, _rtl8723e_fill_h2c_command()
194 boxcontent[idx]); _rtl8723e_fill_h2c_command()
204 for (idx = 0; idx < 2; idx++) { _rtl8723e_fill_h2c_command()
205 rtl_write_byte(rtlpriv, box_extreg + idx, _rtl8723e_fill_h2c_command()
206 boxextcontent[idx]); _rtl8723e_fill_h2c_command()
209 for (idx = 0; idx < 4; idx++) { _rtl8723e_fill_h2c_command()
210 rtl_write_byte(rtlpriv, box_reg + idx, _rtl8723e_fill_h2c_command()
211 boxcontent[idx]); _rtl8723e_fill_h2c_command()
221 for (idx = 0; idx < 2; idx++) { _rtl8723e_fill_h2c_command()
222 rtl_write_byte(rtlpriv, box_extreg + idx, _rtl8723e_fill_h2c_command()
223 boxextcontent[idx]); _rtl8723e_fill_h2c_command()
226 for (idx = 0; idx < 4; idx++) { _rtl8723e_fill_h2c_command()
227 rtl_write_byte(rtlpriv, box_reg + idx, _rtl8723e_fill_h2c_command()
228 boxcontent[idx]); _rtl8723e_fill_h2c_command()
/linux-4.4.14/security/tomoyo/
H A Dmemory.c88 * @idx: Index number.
93 const u8 idx) tomoyo_get_group()
100 if (!tomoyo_correct_word(group_name) || idx >= TOMOYO_MAX_GROUP) tomoyo_get_group()
107 list = &param->ns->group_list[idx]; list_for_each_entry()
192 int idx; tomoyo_mm_init() local
193 for (idx = 0; idx < TOMOYO_MAX_HASH; idx++) tomoyo_mm_init()
194 INIT_LIST_HEAD(&tomoyo_name_list[idx]); tomoyo_mm_init()
92 tomoyo_get_group(struct tomoyo_acl_param *param, const u8 idx) tomoyo_get_group() argument
/linux-4.4.14/arch/mips/include/asm/sibyte/
H A Dbcm1480_regs.h552 #define A_BCM1480_HR_BASE(idx) (A_BCM1480_HR_BASE_0 + ((idx)*BCM1480_HR_REGISTER_SPACING))
553 #define A_BCM1480_HR_REGISTER(idx, reg) (A_BCM1480_HR_BASE(idx) + (reg))
563 #define R_BCM1480_HR_RULE_OP(idx) (BCM1480_HR_OP_OFFSET + ((idx)*BCM1480_HR_RULE_SPACING))
564 #define R_BCM1480_HR_RULE_TYPE(idx) (BCM1480_HR_TYPE_OFFSET + ((idx)*BCM1480_HR_RULE_SPACING))
569 #define R_BCM1480_HR_HA_LEAF0(idx) (BCM1480_HR_LEAF_OFFSET + ((idx)*BCM1480_HR_LEAF_SPACING))
576 #define R_BCM1480_HR_PATH(idx) (BCM1480_HR_PATH_OFFSET + ((idx)*BCM1480_HR_PATH_SPACING))
583 #define R_BCM1480_HR_RT_WORD(idx) (BCM1480_HR_ROUTE_OFFSET + ((idx)*BCM1480_HR_ROUTE_SPACING))
601 #define A_BCM1480_PMI_LCL_BASE(idx) (A_BCM1480_PMI_LCL_0 + ((idx)*BCM1480_PM_LCL_REGISTER_SPACING))
602 #define A_BCM1480_PMI_LCL_REGISTER(idx, reg) (A_BCM1480_PMI_LCL_BASE(idx) + (reg))
603 #define A_BCM1480_PMO_LCL_BASE(idx) (A_BCM1480_PMO_LCL_0 + ((idx)*BCM1480_PM_LCL_REGISTER_SPACING))
604 #define A_BCM1480_PMO_LCL_REGISTER(idx, reg) (A_BCM1480_PMO_LCL_BASE(idx) + (reg))
723 #define A_BCM1480_HSP_BASE(idx) (A_BCM1480_HSP_BASE_0 + ((idx)*BCM1480_HSP_REGISTER_SPACING))
724 #define A_BCM1480_HSP_REGISTER(idx, reg) (A_BCM1480_HSP_BASE(idx) + (reg))
755 #define R_BCM1480_HSP_RX_PKT_RAMALLOC(idx) (R_BCM1480_HSP_RX_PKT_RAMALLOC_0 + 8*(idx))
773 #define R_BCM1480_HSP_RX_SPI_WATERMARK(idx) (R_BCM1480_HSP_RX_SPI_WATERMARK_0 + 8*(idx))
795 #define R_BCM1480_HSP_TX_PKT_RAMALLOC(idx) (R_BCM1480_HSP_TX_PKT_RAMALLOC_0 + 8*(idx))
807 #define R_BCM1480_HSP_TX_PKT_RXPHITCNT(idx) (R_BCM1480_HSP_TX_PKT_RXPHITCNT_0 + 8*(idx))
815 #define R_BCM1480_HSP_TX_PKT_TXPHITCNT(idx) (R_BCM1480_HSP_TX_PKT_TXPHITCNT_0 + 8*(idx))

Completed in 7981 milliseconds

1234567891011>>