Lines Matching refs:dev
50 static void ene_set_reg_addr(struct ene_device *dev, u16 reg) in ene_set_reg_addr() argument
52 outb(reg >> 8, dev->hw_io + ENE_ADDR_HI); in ene_set_reg_addr()
53 outb(reg & 0xFF, dev->hw_io + ENE_ADDR_LO); in ene_set_reg_addr()
57 static u8 ene_read_reg(struct ene_device *dev, u16 reg) in ene_read_reg() argument
60 ene_set_reg_addr(dev, reg); in ene_read_reg()
61 retval = inb(dev->hw_io + ENE_IO); in ene_read_reg()
67 static void ene_write_reg(struct ene_device *dev, u16 reg, u8 value) in ene_write_reg() argument
70 ene_set_reg_addr(dev, reg); in ene_write_reg()
71 outb(value, dev->hw_io + ENE_IO); in ene_write_reg()
75 static void ene_set_reg_mask(struct ene_device *dev, u16 reg, u8 mask) in ene_set_reg_mask() argument
78 ene_set_reg_addr(dev, reg); in ene_set_reg_mask()
79 outb(inb(dev->hw_io + ENE_IO) | mask, dev->hw_io + ENE_IO); in ene_set_reg_mask()
83 static void ene_clear_reg_mask(struct ene_device *dev, u16 reg, u8 mask) in ene_clear_reg_mask() argument
86 ene_set_reg_addr(dev, reg); in ene_clear_reg_mask()
87 outb(inb(dev->hw_io + ENE_IO) & ~mask, dev->hw_io + ENE_IO); in ene_clear_reg_mask()
91 static void ene_set_clear_reg_mask(struct ene_device *dev, u16 reg, u8 mask, in ene_set_clear_reg_mask() argument
95 ene_set_reg_mask(dev, reg, mask); in ene_set_clear_reg_mask()
97 ene_clear_reg_mask(dev, reg, mask); in ene_set_clear_reg_mask()
101 static int ene_hw_detect(struct ene_device *dev) in ene_hw_detect() argument
107 ene_clear_reg_mask(dev, ENE_ECSTS, ENE_ECSTS_RSRVD); in ene_hw_detect()
108 chip_major = ene_read_reg(dev, ENE_ECVER_MAJOR); in ene_hw_detect()
109 chip_minor = ene_read_reg(dev, ENE_ECVER_MINOR); in ene_hw_detect()
110 ene_set_reg_mask(dev, ENE_ECSTS, ENE_ECSTS_RSRVD); in ene_hw_detect()
112 hw_revision = ene_read_reg(dev, ENE_ECHV); in ene_hw_detect()
113 old_ver = ene_read_reg(dev, ENE_HW_VER_OLD); in ene_hw_detect()
115 dev->pll_freq = (ene_read_reg(dev, ENE_PLLFRH) << 4) + in ene_hw_detect()
116 (ene_read_reg(dev, ENE_PLLFRL) >> 4); in ene_hw_detect()
119 dev->rx_period_adjust = in ene_hw_detect()
120 dev->pll_freq == ENE_DEFAULT_PLL_FREQ ? 2 : 4; in ene_hw_detect()
132 pr_notice("PLL freq = %d\n", dev->pll_freq); in ene_hw_detect()
140 dev->hw_revision = ENE_HW_C; in ene_hw_detect()
143 dev->hw_revision = ENE_HW_B; in ene_hw_detect()
146 dev->hw_revision = ENE_HW_D; in ene_hw_detect()
151 if (dev->hw_revision < ENE_HW_C) in ene_hw_detect()
154 fw_reg1 = ene_read_reg(dev, ENE_FW1); in ene_hw_detect()
155 fw_reg2 = ene_read_reg(dev, ENE_FW2); in ene_hw_detect()
159 dev->hw_use_gpio_0a = !!(fw_reg2 & ENE_FW2_GP0A); in ene_hw_detect()
160 dev->hw_learning_and_tx_capable = !!(fw_reg2 & ENE_FW2_LEARNING); in ene_hw_detect()
161 dev->hw_extra_buffer = !!(fw_reg1 & ENE_FW1_HAS_EXTRA_BUF); in ene_hw_detect()
163 if (dev->hw_learning_and_tx_capable) in ene_hw_detect()
164 dev->hw_fan_input = !!(fw_reg2 & ENE_FW2_FAN_INPUT); in ene_hw_detect()
168 if (dev->hw_learning_and_tx_capable) { in ene_hw_detect()
177 dev->hw_use_gpio_0a ? "40" : "0A"); in ene_hw_detect()
179 if (dev->hw_fan_input) in ene_hw_detect()
183 if (!dev->hw_fan_input) in ene_hw_detect()
185 dev->hw_use_gpio_0a ? "0A" : "40"); in ene_hw_detect()
187 if (dev->hw_extra_buffer) in ene_hw_detect()
193 static void ene_rx_setup_hw_buffer(struct ene_device *dev) in ene_rx_setup_hw_buffer() argument
197 ene_rx_read_hw_pointer(dev); in ene_rx_setup_hw_buffer()
198 dev->r_pointer = dev->w_pointer; in ene_rx_setup_hw_buffer()
200 if (!dev->hw_extra_buffer) { in ene_rx_setup_hw_buffer()
201 dev->buffer_len = ENE_FW_PACKET_SIZE * 2; in ene_rx_setup_hw_buffer()
205 tmp = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER); in ene_rx_setup_hw_buffer()
206 tmp |= ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER+1) << 8; in ene_rx_setup_hw_buffer()
207 dev->extra_buf1_address = tmp; in ene_rx_setup_hw_buffer()
209 dev->extra_buf1_len = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 2); in ene_rx_setup_hw_buffer()
211 tmp = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 3); in ene_rx_setup_hw_buffer()
212 tmp |= ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 4) << 8; in ene_rx_setup_hw_buffer()
213 dev->extra_buf2_address = tmp; in ene_rx_setup_hw_buffer()
215 dev->extra_buf2_len = ene_read_reg(dev, ENE_FW_SAMPLE_BUFFER + 5); in ene_rx_setup_hw_buffer()
217 dev->buffer_len = dev->extra_buf1_len + dev->extra_buf2_len + 8; in ene_rx_setup_hw_buffer()
221 dev->extra_buf1_address, dev->extra_buf1_len); in ene_rx_setup_hw_buffer()
223 dev->extra_buf2_address, dev->extra_buf2_len); in ene_rx_setup_hw_buffer()
225 pr_notice("Total buffer len = %d\n", dev->buffer_len); in ene_rx_setup_hw_buffer()
227 if (dev->buffer_len > 64 || dev->buffer_len < 16) in ene_rx_setup_hw_buffer()
230 if (dev->extra_buf1_address > 0xFBFC || in ene_rx_setup_hw_buffer()
231 dev->extra_buf1_address < 0xEC00) in ene_rx_setup_hw_buffer()
234 if (dev->extra_buf2_address > 0xFBFC || in ene_rx_setup_hw_buffer()
235 dev->extra_buf2_address < 0xEC00) in ene_rx_setup_hw_buffer()
238 if (dev->r_pointer > dev->buffer_len) in ene_rx_setup_hw_buffer()
241 ene_set_reg_mask(dev, ENE_FW1, ENE_FW1_EXTRA_BUF_HND); in ene_rx_setup_hw_buffer()
245 dev->hw_extra_buffer = false; in ene_rx_setup_hw_buffer()
246 ene_clear_reg_mask(dev, ENE_FW1, ENE_FW1_EXTRA_BUF_HND); in ene_rx_setup_hw_buffer()
251 static void ene_rx_restore_hw_buffer(struct ene_device *dev) in ene_rx_restore_hw_buffer() argument
253 if (!dev->hw_extra_buffer) in ene_rx_restore_hw_buffer()
256 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 0, in ene_rx_restore_hw_buffer()
257 dev->extra_buf1_address & 0xFF); in ene_rx_restore_hw_buffer()
258 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 1, in ene_rx_restore_hw_buffer()
259 dev->extra_buf1_address >> 8); in ene_rx_restore_hw_buffer()
260 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 2, dev->extra_buf1_len); in ene_rx_restore_hw_buffer()
262 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 3, in ene_rx_restore_hw_buffer()
263 dev->extra_buf2_address & 0xFF); in ene_rx_restore_hw_buffer()
264 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 4, in ene_rx_restore_hw_buffer()
265 dev->extra_buf2_address >> 8); in ene_rx_restore_hw_buffer()
266 ene_write_reg(dev, ENE_FW_SAMPLE_BUFFER + 5, in ene_rx_restore_hw_buffer()
267 dev->extra_buf2_len); in ene_rx_restore_hw_buffer()
268 ene_clear_reg_mask(dev, ENE_FW1, ENE_FW1_EXTRA_BUF_HND); in ene_rx_restore_hw_buffer()
272 static void ene_rx_read_hw_pointer(struct ene_device *dev) in ene_rx_read_hw_pointer() argument
274 if (dev->hw_extra_buffer) in ene_rx_read_hw_pointer()
275 dev->w_pointer = ene_read_reg(dev, ENE_FW_RX_POINTER); in ene_rx_read_hw_pointer()
277 dev->w_pointer = ene_read_reg(dev, ENE_FW2) in ene_rx_read_hw_pointer()
281 dev->w_pointer, dev->r_pointer); in ene_rx_read_hw_pointer()
285 static int ene_rx_get_sample_reg(struct ene_device *dev) in ene_rx_get_sample_reg() argument
289 if (dev->r_pointer == dev->w_pointer) { in ene_rx_get_sample_reg()
291 ene_rx_read_hw_pointer(dev); in ene_rx_get_sample_reg()
294 if (dev->r_pointer == dev->w_pointer) { in ene_rx_get_sample_reg()
295 dbg_verbose("RB: end of data at %d", dev->r_pointer); in ene_rx_get_sample_reg()
299 dbg_verbose("RB: reading at offset %d", dev->r_pointer); in ene_rx_get_sample_reg()
300 r_pointer = dev->r_pointer; in ene_rx_get_sample_reg()
302 dev->r_pointer++; in ene_rx_get_sample_reg()
303 if (dev->r_pointer == dev->buffer_len) in ene_rx_get_sample_reg()
304 dev->r_pointer = 0; in ene_rx_get_sample_reg()
306 dbg_verbose("RB: next read will be from offset %d", dev->r_pointer); in ene_rx_get_sample_reg()
315 if (r_pointer < dev->extra_buf1_len) { in ene_rx_get_sample_reg()
317 return dev->extra_buf1_address + r_pointer; in ene_rx_get_sample_reg()
320 r_pointer -= dev->extra_buf1_len; in ene_rx_get_sample_reg()
322 if (r_pointer < dev->extra_buf2_len) { in ene_rx_get_sample_reg()
324 return dev->extra_buf2_address + r_pointer; in ene_rx_get_sample_reg()
332 static void ene_rx_sense_carrier(struct ene_device *dev) in ene_rx_sense_carrier() argument
337 int period = ene_read_reg(dev, ENE_CIRCAR_PRD); in ene_rx_sense_carrier()
338 int hperiod = ene_read_reg(dev, ENE_CIRCAR_HPRD); in ene_rx_sense_carrier()
355 if (dev->carrier_detect_enabled) { in ene_rx_sense_carrier()
359 ir_raw_event_store(dev->rdev, &ev); in ene_rx_sense_carrier()
364 static void ene_rx_enable_cir_engine(struct ene_device *dev, bool enable) in ene_rx_enable_cir_engine() argument
366 ene_set_clear_reg_mask(dev, ENE_CIRCFG, in ene_rx_enable_cir_engine()
371 static void ene_rx_select_input(struct ene_device *dev, bool gpio_0a) in ene_rx_select_input() argument
373 ene_set_clear_reg_mask(dev, ENE_CIRCFG2, ENE_CIRCFG2_GPIO0A, gpio_0a); in ene_rx_select_input()
380 static void ene_rx_enable_fan_input(struct ene_device *dev, bool enable) in ene_rx_enable_fan_input() argument
382 if (!dev->hw_fan_input) in ene_rx_enable_fan_input()
386 ene_write_reg(dev, ENE_FAN_AS_IN1, 0); in ene_rx_enable_fan_input()
388 ene_write_reg(dev, ENE_FAN_AS_IN1, ENE_FAN_AS_IN1_EN); in ene_rx_enable_fan_input()
389 ene_write_reg(dev, ENE_FAN_AS_IN2, ENE_FAN_AS_IN2_EN); in ene_rx_enable_fan_input()
394 static void ene_rx_setup(struct ene_device *dev) in ene_rx_setup() argument
396 bool learning_mode = dev->learning_mode_enabled || in ene_rx_setup()
397 dev->carrier_detect_enabled; in ene_rx_setup()
404 ene_write_reg(dev, ENE_CIRCFG2, 0x00); in ene_rx_setup()
409 dev->pll_freq == ENE_DEFAULT_PLL_FREQ ? 1 : 2; in ene_rx_setup()
411 ene_write_reg(dev, ENE_CIRRLC_CFG, in ene_rx_setup()
415 if (dev->hw_revision < ENE_HW_C) in ene_rx_setup()
420 WARN_ON(!dev->hw_learning_and_tx_capable); in ene_rx_setup()
427 ene_rx_select_input(dev, !dev->hw_use_gpio_0a); in ene_rx_setup()
428 dev->rx_fan_input_inuse = false; in ene_rx_setup()
431 ene_set_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_CARR_DEMOD); in ene_rx_setup()
434 ene_write_reg(dev, ENE_CIRCAR_PULS, 0x63); in ene_rx_setup()
435 ene_set_clear_reg_mask(dev, ENE_CIRCFG2, ENE_CIRCFG2_CARR_DETECT, in ene_rx_setup()
436 dev->carrier_detect_enabled || debug); in ene_rx_setup()
438 if (dev->hw_fan_input) in ene_rx_setup()
439 dev->rx_fan_input_inuse = true; in ene_rx_setup()
441 ene_rx_select_input(dev, dev->hw_use_gpio_0a); in ene_rx_setup()
444 ene_clear_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_CARR_DEMOD); in ene_rx_setup()
445 ene_clear_reg_mask(dev, ENE_CIRCFG2, ENE_CIRCFG2_CARR_DETECT); in ene_rx_setup()
449 if (dev->rx_fan_input_inuse) { in ene_rx_setup()
450 dev->rdev->rx_resolution = US_TO_NS(ENE_FW_SAMPLE_PERIOD_FAN); in ene_rx_setup()
454 dev->rdev->min_timeout = dev->rdev->max_timeout = in ene_rx_setup()
458 dev->rdev->rx_resolution = US_TO_NS(sample_period); in ene_rx_setup()
465 dev->rdev->min_timeout = US_TO_NS(127 * sample_period); in ene_rx_setup()
466 dev->rdev->max_timeout = US_TO_NS(200000); in ene_rx_setup()
469 if (dev->hw_learning_and_tx_capable) in ene_rx_setup()
470 dev->rdev->tx_resolution = US_TO_NS(sample_period); in ene_rx_setup()
472 if (dev->rdev->timeout > dev->rdev->max_timeout) in ene_rx_setup()
473 dev->rdev->timeout = dev->rdev->max_timeout; in ene_rx_setup()
474 if (dev->rdev->timeout < dev->rdev->min_timeout) in ene_rx_setup()
475 dev->rdev->timeout = dev->rdev->min_timeout; in ene_rx_setup()
479 static void ene_rx_enable_hw(struct ene_device *dev) in ene_rx_enable_hw() argument
484 if (dev->hw_revision < ENE_HW_C) { in ene_rx_enable_hw()
485 ene_write_reg(dev, ENEB_IRQ, dev->irq << 1); in ene_rx_enable_hw()
486 ene_write_reg(dev, ENEB_IRQ_UNK1, 0x01); in ene_rx_enable_hw()
488 reg_value = ene_read_reg(dev, ENE_IRQ) & 0xF0; in ene_rx_enable_hw()
491 reg_value |= (dev->irq & ENE_IRQ_MASK); in ene_rx_enable_hw()
492 ene_write_reg(dev, ENE_IRQ, reg_value); in ene_rx_enable_hw()
496 ene_rx_enable_fan_input(dev, dev->rx_fan_input_inuse); in ene_rx_enable_hw()
497 ene_rx_enable_cir_engine(dev, !dev->rx_fan_input_inuse); in ene_rx_enable_hw()
500 ene_irq_status(dev); in ene_rx_enable_hw()
503 ene_set_reg_mask(dev, ENE_FW1, ENE_FW1_ENABLE | ENE_FW1_IRQ); in ene_rx_enable_hw()
506 ir_raw_event_set_idle(dev->rdev, true); in ene_rx_enable_hw()
510 static void ene_rx_enable(struct ene_device *dev) in ene_rx_enable() argument
512 ene_rx_enable_hw(dev); in ene_rx_enable()
513 dev->rx_enabled = true; in ene_rx_enable()
517 static void ene_rx_disable_hw(struct ene_device *dev) in ene_rx_disable_hw() argument
520 ene_rx_enable_cir_engine(dev, false); in ene_rx_disable_hw()
521 ene_rx_enable_fan_input(dev, false); in ene_rx_disable_hw()
524 ene_clear_reg_mask(dev, ENE_FW1, ENE_FW1_ENABLE | ENE_FW1_IRQ); in ene_rx_disable_hw()
525 ir_raw_event_set_idle(dev->rdev, true); in ene_rx_disable_hw()
529 static void ene_rx_disable(struct ene_device *dev) in ene_rx_disable() argument
531 ene_rx_disable_hw(dev); in ene_rx_disable()
532 dev->rx_enabled = false; in ene_rx_disable()
538 static void ene_rx_reset(struct ene_device *dev) in ene_rx_reset() argument
540 ene_clear_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_RX_EN); in ene_rx_reset()
541 ene_set_reg_mask(dev, ENE_CIRCFG, ENE_CIRCFG_RX_EN); in ene_rx_reset()
545 static void ene_tx_set_carrier(struct ene_device *dev) in ene_tx_set_carrier() argument
550 spin_lock_irqsave(&dev->hw_lock, flags); in ene_tx_set_carrier()
552 ene_set_clear_reg_mask(dev, ENE_CIRCFG, in ene_tx_set_carrier()
553 ENE_CIRCFG_TX_CARR, dev->tx_period > 0); in ene_tx_set_carrier()
555 if (!dev->tx_period) in ene_tx_set_carrier()
558 BUG_ON(dev->tx_duty_cycle >= 100 || dev->tx_duty_cycle <= 0); in ene_tx_set_carrier()
560 tx_puls_width = dev->tx_period / (100 / dev->tx_duty_cycle); in ene_tx_set_carrier()
565 dbg("TX: pulse distance = %d * 500 ns", dev->tx_period); in ene_tx_set_carrier()
568 ene_write_reg(dev, ENE_CIRMOD_PRD, dev->tx_period | ENE_CIRMOD_PRD_POL); in ene_tx_set_carrier()
569 ene_write_reg(dev, ENE_CIRMOD_HPRD, tx_puls_width); in ene_tx_set_carrier()
571 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_tx_set_carrier()
575 static void ene_tx_set_transmitters(struct ene_device *dev) in ene_tx_set_transmitters() argument
579 spin_lock_irqsave(&dev->hw_lock, flags); in ene_tx_set_transmitters()
580 ene_set_clear_reg_mask(dev, ENE_GPIOFS8, ENE_GPIOFS8_GPIO41, in ene_tx_set_transmitters()
581 !!(dev->transmitter_mask & 0x01)); in ene_tx_set_transmitters()
582 ene_set_clear_reg_mask(dev, ENE_GPIOFS1, ENE_GPIOFS1_GPIO0D, in ene_tx_set_transmitters()
583 !!(dev->transmitter_mask & 0x02)); in ene_tx_set_transmitters()
584 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_tx_set_transmitters()
588 static void ene_tx_enable(struct ene_device *dev) in ene_tx_enable() argument
590 u8 conf1 = ene_read_reg(dev, ENE_CIRCFG); in ene_tx_enable()
591 u8 fwreg2 = ene_read_reg(dev, ENE_FW2); in ene_tx_enable()
593 dev->saved_conf1 = conf1; in ene_tx_enable()
606 if (dev->hw_revision == ENE_HW_C) in ene_tx_enable()
611 ene_write_reg(dev, ENE_CIRCFG, conf1); in ene_tx_enable()
615 static void ene_tx_disable(struct ene_device *dev) in ene_tx_disable() argument
617 ene_write_reg(dev, ENE_CIRCFG, dev->saved_conf1); in ene_tx_disable()
618 dev->tx_buffer = NULL; in ene_tx_disable()
623 static void ene_tx_sample(struct ene_device *dev) in ene_tx_sample() argument
627 bool pulse = dev->tx_sample_pulse; in ene_tx_sample()
629 if (!dev->tx_buffer) { in ene_tx_sample()
635 if (!dev->tx_sample) { in ene_tx_sample()
637 if (dev->tx_pos == dev->tx_len) { in ene_tx_sample()
638 if (!dev->tx_done) { in ene_tx_sample()
640 dev->tx_done = true; in ene_tx_sample()
644 ene_tx_disable(dev); in ene_tx_sample()
645 complete(&dev->tx_complete); in ene_tx_sample()
650 sample = dev->tx_buffer[dev->tx_pos++]; in ene_tx_sample()
651 dev->tx_sample_pulse = !dev->tx_sample_pulse; in ene_tx_sample()
653 dev->tx_sample = DIV_ROUND_CLOSEST(sample, sample_period); in ene_tx_sample()
655 if (!dev->tx_sample) in ene_tx_sample()
656 dev->tx_sample = 1; in ene_tx_sample()
659 raw_tx = min(dev->tx_sample , (unsigned int)ENE_CIRRLC_OUT_MASK); in ene_tx_sample()
660 dev->tx_sample -= raw_tx; in ene_tx_sample()
667 ene_write_reg(dev, in ene_tx_sample()
668 dev->tx_reg ? ENE_CIRRLC_OUT1 : ENE_CIRRLC_OUT0, raw_tx); in ene_tx_sample()
670 dev->tx_reg = !dev->tx_reg; in ene_tx_sample()
674 mod_timer(&dev->tx_sim_timer, jiffies + HZ / 500); in ene_tx_sample()
680 struct ene_device *dev = (struct ene_device *)data; in ene_tx_irqsim() local
683 spin_lock_irqsave(&dev->hw_lock, flags); in ene_tx_irqsim()
684 ene_tx_sample(dev); in ene_tx_irqsim()
685 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_tx_irqsim()
690 static int ene_irq_status(struct ene_device *dev) in ene_irq_status() argument
696 fw_flags2 = ene_read_reg(dev, ENE_FW2); in ene_irq_status()
698 if (dev->hw_revision < ENE_HW_C) { in ene_irq_status()
699 irq_status = ene_read_reg(dev, ENEB_IRQ_STATUS); in ene_irq_status()
704 ene_clear_reg_mask(dev, ENEB_IRQ_STATUS, ENEB_IRQ_STATUS_IR); in ene_irq_status()
708 irq_status = ene_read_reg(dev, ENE_IRQ); in ene_irq_status()
713 ene_write_reg(dev, ENE_IRQ, irq_status & ~ENE_IRQ_STATUS); in ene_irq_status()
714 ene_write_reg(dev, ENE_IRQ, irq_status & ~ENE_IRQ_STATUS); in ene_irq_status()
719 ene_write_reg(dev, ENE_FW2, fw_flags2 & ~ENE_FW2_RXIRQ); in ene_irq_status()
723 fw_flags1 = ene_read_reg(dev, ENE_FW1); in ene_irq_status()
725 ene_write_reg(dev, ENE_FW1, fw_flags1 & ~ENE_FW1_TXIRQ); in ene_irq_status()
740 struct ene_device *dev = (struct ene_device *)data; in ene_isr() local
743 spin_lock_irqsave(&dev->hw_lock, flags); in ene_isr()
746 ene_rx_read_hw_pointer(dev); in ene_isr()
747 irq_status = ene_irq_status(dev); in ene_isr()
756 if (!dev->hw_learning_and_tx_capable) { in ene_isr()
760 ene_tx_sample(dev); in ene_isr()
768 if (dev->hw_learning_and_tx_capable) in ene_isr()
769 ene_rx_sense_carrier(dev); in ene_isr()
773 if (!dev->hw_extra_buffer) in ene_isr()
774 dev->r_pointer = dev->w_pointer == 0 ? ENE_FW_PACKET_SIZE : 0; in ene_isr()
778 reg = ene_rx_get_sample_reg(dev); in ene_isr()
784 hw_value = ene_read_reg(dev, reg); in ene_isr()
786 if (dev->rx_fan_input_inuse) { in ene_isr()
791 hw_value |= ene_read_reg(dev, reg + offset) << 8; in ene_isr()
803 if (dev->rx_period_adjust) { in ene_isr()
805 hw_sample /= (100 + dev->rx_period_adjust); in ene_isr()
809 if (!dev->hw_extra_buffer && !hw_sample) { in ene_isr()
810 dev->r_pointer = dev->w_pointer; in ene_isr()
818 ir_raw_event_store_with_filter(dev->rdev, &ev); in ene_isr()
821 ir_raw_event_handle(dev->rdev); in ene_isr()
823 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_isr()
828 static void ene_setup_default_settings(struct ene_device *dev) in ene_setup_default_settings() argument
830 dev->tx_period = 32; in ene_setup_default_settings()
831 dev->tx_duty_cycle = 50; /*%*/ in ene_setup_default_settings()
832 dev->transmitter_mask = 0x03; in ene_setup_default_settings()
833 dev->learning_mode_enabled = learning_mode_force; in ene_setup_default_settings()
836 dev->rdev->timeout = US_TO_NS(150000); in ene_setup_default_settings()
840 static void ene_setup_hw_settings(struct ene_device *dev) in ene_setup_hw_settings() argument
842 if (dev->hw_learning_and_tx_capable) { in ene_setup_hw_settings()
843 ene_tx_set_carrier(dev); in ene_setup_hw_settings()
844 ene_tx_set_transmitters(dev); in ene_setup_hw_settings()
847 ene_rx_setup(dev); in ene_setup_hw_settings()
853 struct ene_device *dev = rdev->priv; in ene_open() local
856 spin_lock_irqsave(&dev->hw_lock, flags); in ene_open()
857 ene_rx_enable(dev); in ene_open()
858 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_open()
865 struct ene_device *dev = rdev->priv; in ene_close() local
867 spin_lock_irqsave(&dev->hw_lock, flags); in ene_close()
869 ene_rx_disable(dev); in ene_close()
870 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_close()
876 struct ene_device *dev = rdev->priv; in ene_set_tx_mask() local
886 dev->transmitter_mask = tx_mask; in ene_set_tx_mask()
887 ene_tx_set_transmitters(dev); in ene_set_tx_mask()
894 struct ene_device *dev = rdev->priv; in ene_set_tx_carrier() local
910 dev->tx_period = period; in ene_set_tx_carrier()
911 ene_tx_set_carrier(dev); in ene_set_tx_carrier()
918 struct ene_device *dev = rdev->priv; in ene_set_tx_duty_cycle() local
920 dev->tx_duty_cycle = duty_cycle; in ene_set_tx_duty_cycle()
921 ene_tx_set_carrier(dev); in ene_set_tx_duty_cycle()
928 struct ene_device *dev = rdev->priv; in ene_set_learning_mode() local
930 if (enable == dev->learning_mode_enabled) in ene_set_learning_mode()
933 spin_lock_irqsave(&dev->hw_lock, flags); in ene_set_learning_mode()
934 dev->learning_mode_enabled = enable; in ene_set_learning_mode()
935 ene_rx_disable(dev); in ene_set_learning_mode()
936 ene_rx_setup(dev); in ene_set_learning_mode()
937 ene_rx_enable(dev); in ene_set_learning_mode()
938 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_set_learning_mode()
944 struct ene_device *dev = rdev->priv; in ene_set_carrier_report() local
947 if (enable == dev->carrier_detect_enabled) in ene_set_carrier_report()
950 spin_lock_irqsave(&dev->hw_lock, flags); in ene_set_carrier_report()
951 dev->carrier_detect_enabled = enable; in ene_set_carrier_report()
952 ene_rx_disable(dev); in ene_set_carrier_report()
953 ene_rx_setup(dev); in ene_set_carrier_report()
954 ene_rx_enable(dev); in ene_set_carrier_report()
955 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_set_carrier_report()
962 struct ene_device *dev = rdev->priv; in ene_set_idle() local
965 ene_rx_reset(dev); in ene_set_idle()
973 struct ene_device *dev = rdev->priv; in ene_transmit() local
976 dev->tx_buffer = buf; in ene_transmit()
977 dev->tx_len = n; in ene_transmit()
978 dev->tx_pos = 0; in ene_transmit()
979 dev->tx_reg = 0; in ene_transmit()
980 dev->tx_done = 0; in ene_transmit()
981 dev->tx_sample = 0; in ene_transmit()
982 dev->tx_sample_pulse = false; in ene_transmit()
984 dbg("TX: %d samples", dev->tx_len); in ene_transmit()
986 spin_lock_irqsave(&dev->hw_lock, flags); in ene_transmit()
988 ene_tx_enable(dev); in ene_transmit()
991 ene_tx_sample(dev); in ene_transmit()
992 ene_tx_sample(dev); in ene_transmit()
994 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_transmit()
996 if (wait_for_completion_timeout(&dev->tx_complete, 2 * HZ) == 0) { in ene_transmit()
998 spin_lock_irqsave(&dev->hw_lock, flags); in ene_transmit()
999 ene_tx_disable(dev); in ene_transmit()
1000 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_transmit()
1011 struct ene_device *dev; in ene_probe() local
1014 dev = kzalloc(sizeof(struct ene_device), GFP_KERNEL); in ene_probe()
1016 if (!dev || !rdev) in ene_probe()
1023 dev->hw_io = -1; in ene_probe()
1024 dev->irq = -1; in ene_probe()
1033 spin_lock_init(&dev->hw_lock); in ene_probe()
1035 dev->hw_io = pnp_port_start(pnp_dev, 0); in ene_probe()
1036 dev->irq = pnp_irq(pnp_dev, 0); in ene_probe()
1039 pnp_set_drvdata(pnp_dev, dev); in ene_probe()
1040 dev->pnp_dev = pnp_dev; in ene_probe()
1047 error = ene_hw_detect(dev); in ene_probe()
1051 if (!dev->hw_learning_and_tx_capable && txsim) { in ene_probe()
1052 dev->hw_learning_and_tx_capable = true; in ene_probe()
1053 setup_timer(&dev->tx_sim_timer, ene_tx_irqsim, in ene_probe()
1054 (long unsigned int)dev); in ene_probe()
1058 if (!dev->hw_learning_and_tx_capable) in ene_probe()
1063 rdev->priv = dev; in ene_probe()
1071 if (dev->hw_learning_and_tx_capable) { in ene_probe()
1073 init_completion(&dev->tx_complete); in ene_probe()
1082 dev->rdev = rdev; in ene_probe()
1084 ene_rx_setup_hw_buffer(dev); in ene_probe()
1085 ene_setup_default_settings(dev); in ene_probe()
1086 ene_setup_hw_settings(dev); in ene_probe()
1088 device_set_wakeup_capable(&pnp_dev->dev, true); in ene_probe()
1089 device_set_wakeup_enable(&pnp_dev->dev, true); in ene_probe()
1097 if (!request_region(dev->hw_io, ENE_IO_SIZE, ENE_DRIVER_NAME)) { in ene_probe()
1101 if (request_irq(dev->irq, ene_isr, in ene_probe()
1102 IRQF_SHARED, ENE_DRIVER_NAME, (void *)dev)) { in ene_probe()
1110 release_region(dev->hw_io, ENE_IO_SIZE); in ene_probe()
1116 kfree(dev); in ene_probe()
1123 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_remove() local
1126 spin_lock_irqsave(&dev->hw_lock, flags); in ene_remove()
1127 ene_rx_disable(dev); in ene_remove()
1128 ene_rx_restore_hw_buffer(dev); in ene_remove()
1129 spin_unlock_irqrestore(&dev->hw_lock, flags); in ene_remove()
1131 free_irq(dev->irq, dev); in ene_remove()
1132 release_region(dev->hw_io, ENE_IO_SIZE); in ene_remove()
1133 rc_unregister_device(dev->rdev); in ene_remove()
1134 kfree(dev); in ene_remove()
1138 static void ene_enable_wake(struct ene_device *dev, bool enable) in ene_enable_wake() argument
1141 ene_set_clear_reg_mask(dev, ENE_FW1, ENE_FW1_WAKE, enable); in ene_enable_wake()
1147 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_suspend() local
1148 bool wake = device_may_wakeup(&dev->pnp_dev->dev); in ene_suspend()
1150 if (!wake && dev->rx_enabled) in ene_suspend()
1151 ene_rx_disable_hw(dev); in ene_suspend()
1153 ene_enable_wake(dev, wake); in ene_suspend()
1159 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_resume() local
1160 ene_setup_hw_settings(dev); in ene_resume()
1162 if (dev->rx_enabled) in ene_resume()
1163 ene_rx_enable(dev); in ene_resume()
1165 ene_enable_wake(dev, false); in ene_resume()
1172 struct ene_device *dev = pnp_get_drvdata(pnp_dev); in ene_shutdown() local
1173 ene_enable_wake(dev, true); in ene_shutdown()