Lines Matching refs:ucb

53 void ucb1x00_io_set_dir(struct ucb1x00 *ucb, unsigned int in, unsigned int out)  in ucb1x00_io_set_dir()  argument
57 spin_lock_irqsave(&ucb->io_lock, flags); in ucb1x00_io_set_dir()
58 ucb->io_dir |= out; in ucb1x00_io_set_dir()
59 ucb->io_dir &= ~in; in ucb1x00_io_set_dir()
61 ucb1x00_reg_write(ucb, UCB_IO_DIR, ucb->io_dir); in ucb1x00_io_set_dir()
62 spin_unlock_irqrestore(&ucb->io_lock, flags); in ucb1x00_io_set_dir()
81 void ucb1x00_io_write(struct ucb1x00 *ucb, unsigned int set, unsigned int clear) in ucb1x00_io_write() argument
85 spin_lock_irqsave(&ucb->io_lock, flags); in ucb1x00_io_write()
86 ucb->io_out |= set; in ucb1x00_io_write()
87 ucb->io_out &= ~clear; in ucb1x00_io_write()
89 ucb1x00_reg_write(ucb, UCB_IO_DATA, ucb->io_out); in ucb1x00_io_write()
90 spin_unlock_irqrestore(&ucb->io_lock, flags); in ucb1x00_io_write()
105 unsigned int ucb1x00_io_read(struct ucb1x00 *ucb) in ucb1x00_io_read() argument
107 return ucb1x00_reg_read(ucb, UCB_IO_DATA); in ucb1x00_io_read()
112 struct ucb1x00 *ucb = container_of(chip, struct ucb1x00, gpio); in ucb1x00_gpio_set() local
115 spin_lock_irqsave(&ucb->io_lock, flags); in ucb1x00_gpio_set()
117 ucb->io_out |= 1 << offset; in ucb1x00_gpio_set()
119 ucb->io_out &= ~(1 << offset); in ucb1x00_gpio_set()
121 ucb1x00_enable(ucb); in ucb1x00_gpio_set()
122 ucb1x00_reg_write(ucb, UCB_IO_DATA, ucb->io_out); in ucb1x00_gpio_set()
123 ucb1x00_disable(ucb); in ucb1x00_gpio_set()
124 spin_unlock_irqrestore(&ucb->io_lock, flags); in ucb1x00_gpio_set()
129 struct ucb1x00 *ucb = container_of(chip, struct ucb1x00, gpio); in ucb1x00_gpio_get() local
132 ucb1x00_enable(ucb); in ucb1x00_gpio_get()
133 val = ucb1x00_reg_read(ucb, UCB_IO_DATA); in ucb1x00_gpio_get()
134 ucb1x00_disable(ucb); in ucb1x00_gpio_get()
141 struct ucb1x00 *ucb = container_of(chip, struct ucb1x00, gpio); in ucb1x00_gpio_direction_input() local
144 spin_lock_irqsave(&ucb->io_lock, flags); in ucb1x00_gpio_direction_input()
145 ucb->io_dir &= ~(1 << offset); in ucb1x00_gpio_direction_input()
146 ucb1x00_enable(ucb); in ucb1x00_gpio_direction_input()
147 ucb1x00_reg_write(ucb, UCB_IO_DIR, ucb->io_dir); in ucb1x00_gpio_direction_input()
148 ucb1x00_disable(ucb); in ucb1x00_gpio_direction_input()
149 spin_unlock_irqrestore(&ucb->io_lock, flags); in ucb1x00_gpio_direction_input()
157 struct ucb1x00 *ucb = container_of(chip, struct ucb1x00, gpio); in ucb1x00_gpio_direction_output() local
161 spin_lock_irqsave(&ucb->io_lock, flags); in ucb1x00_gpio_direction_output()
162 old = ucb->io_out; in ucb1x00_gpio_direction_output()
164 ucb->io_out |= mask; in ucb1x00_gpio_direction_output()
166 ucb->io_out &= ~mask; in ucb1x00_gpio_direction_output()
168 ucb1x00_enable(ucb); in ucb1x00_gpio_direction_output()
169 if (old != ucb->io_out) in ucb1x00_gpio_direction_output()
170 ucb1x00_reg_write(ucb, UCB_IO_DATA, ucb->io_out); in ucb1x00_gpio_direction_output()
172 if (!(ucb->io_dir & mask)) { in ucb1x00_gpio_direction_output()
173 ucb->io_dir |= mask; in ucb1x00_gpio_direction_output()
174 ucb1x00_reg_write(ucb, UCB_IO_DIR, ucb->io_dir); in ucb1x00_gpio_direction_output()
176 ucb1x00_disable(ucb); in ucb1x00_gpio_direction_output()
177 spin_unlock_irqrestore(&ucb->io_lock, flags); in ucb1x00_gpio_direction_output()
184 struct ucb1x00 *ucb = container_of(chip, struct ucb1x00, gpio); in ucb1x00_to_irq() local
186 return ucb->irq_base > 0 ? ucb->irq_base + offset : -ENXIO; in ucb1x00_to_irq()
214 void ucb1x00_adc_enable(struct ucb1x00 *ucb) in ucb1x00_adc_enable() argument
216 mutex_lock(&ucb->adc_mutex); in ucb1x00_adc_enable()
218 ucb->adc_cr |= UCB_ADC_ENA; in ucb1x00_adc_enable()
220 ucb1x00_enable(ucb); in ucb1x00_adc_enable()
221 ucb1x00_reg_write(ucb, UCB_ADC_CR, ucb->adc_cr); in ucb1x00_adc_enable()
240 unsigned int ucb1x00_adc_read(struct ucb1x00 *ucb, int adc_channel, int sync) in ucb1x00_adc_read() argument
247 ucb1x00_reg_write(ucb, UCB_ADC_CR, ucb->adc_cr | adc_channel); in ucb1x00_adc_read()
248 ucb1x00_reg_write(ucb, UCB_ADC_CR, ucb->adc_cr | adc_channel | UCB_ADC_START); in ucb1x00_adc_read()
251 val = ucb1x00_reg_read(ucb, UCB_ADC_DATA); in ucb1x00_adc_read()
268 void ucb1x00_adc_disable(struct ucb1x00 *ucb) in ucb1x00_adc_disable() argument
270 ucb->adc_cr &= ~UCB_ADC_ENA; in ucb1x00_adc_disable()
271 ucb1x00_reg_write(ucb, UCB_ADC_CR, ucb->adc_cr); in ucb1x00_adc_disable()
272 ucb1x00_disable(ucb); in ucb1x00_adc_disable()
274 mutex_unlock(&ucb->adc_mutex); in ucb1x00_adc_disable()
287 struct ucb1x00 *ucb = irq_desc_get_handler_data(desc); in ucb1x00_irq() local
290 ucb1x00_enable(ucb); in ucb1x00_irq()
291 isr = ucb1x00_reg_read(ucb, UCB_IE_STATUS); in ucb1x00_irq()
292 ucb1x00_reg_write(ucb, UCB_IE_CLEAR, isr); in ucb1x00_irq()
293 ucb1x00_reg_write(ucb, UCB_IE_CLEAR, 0); in ucb1x00_irq()
297 generic_handle_irq(ucb->irq_base + i); in ucb1x00_irq()
298 ucb1x00_disable(ucb); in ucb1x00_irq()
301 static void ucb1x00_irq_update(struct ucb1x00 *ucb, unsigned mask) in ucb1x00_irq_update() argument
303 ucb1x00_enable(ucb); in ucb1x00_irq_update()
304 if (ucb->irq_ris_enbl & mask) in ucb1x00_irq_update()
305 ucb1x00_reg_write(ucb, UCB_IE_RIS, ucb->irq_ris_enbl & in ucb1x00_irq_update()
306 ucb->irq_mask); in ucb1x00_irq_update()
307 if (ucb->irq_fal_enbl & mask) in ucb1x00_irq_update()
308 ucb1x00_reg_write(ucb, UCB_IE_FAL, ucb->irq_fal_enbl & in ucb1x00_irq_update()
309 ucb->irq_mask); in ucb1x00_irq_update()
310 ucb1x00_disable(ucb); in ucb1x00_irq_update()
319 struct ucb1x00 *ucb = irq_data_get_irq_chip_data(data); in ucb1x00_irq_mask() local
320 unsigned mask = 1 << (data->irq - ucb->irq_base); in ucb1x00_irq_mask()
322 raw_spin_lock(&ucb->irq_lock); in ucb1x00_irq_mask()
323 ucb->irq_mask &= ~mask; in ucb1x00_irq_mask()
324 ucb1x00_irq_update(ucb, mask); in ucb1x00_irq_mask()
325 raw_spin_unlock(&ucb->irq_lock); in ucb1x00_irq_mask()
330 struct ucb1x00 *ucb = irq_data_get_irq_chip_data(data); in ucb1x00_irq_unmask() local
331 unsigned mask = 1 << (data->irq - ucb->irq_base); in ucb1x00_irq_unmask()
333 raw_spin_lock(&ucb->irq_lock); in ucb1x00_irq_unmask()
334 ucb->irq_mask |= mask; in ucb1x00_irq_unmask()
335 ucb1x00_irq_update(ucb, mask); in ucb1x00_irq_unmask()
336 raw_spin_unlock(&ucb->irq_lock); in ucb1x00_irq_unmask()
341 struct ucb1x00 *ucb = irq_data_get_irq_chip_data(data); in ucb1x00_irq_set_type() local
342 unsigned mask = 1 << (data->irq - ucb->irq_base); in ucb1x00_irq_set_type()
344 raw_spin_lock(&ucb->irq_lock); in ucb1x00_irq_set_type()
346 ucb->irq_ris_enbl |= mask; in ucb1x00_irq_set_type()
348 ucb->irq_ris_enbl &= ~mask; in ucb1x00_irq_set_type()
351 ucb->irq_fal_enbl |= mask; in ucb1x00_irq_set_type()
353 ucb->irq_fal_enbl &= ~mask; in ucb1x00_irq_set_type()
354 if (ucb->irq_mask & mask) { in ucb1x00_irq_set_type()
355 ucb1x00_reg_write(ucb, UCB_IE_RIS, ucb->irq_ris_enbl & in ucb1x00_irq_set_type()
356 ucb->irq_mask); in ucb1x00_irq_set_type()
357 ucb1x00_reg_write(ucb, UCB_IE_FAL, ucb->irq_fal_enbl & in ucb1x00_irq_set_type()
358 ucb->irq_mask); in ucb1x00_irq_set_type()
360 raw_spin_unlock(&ucb->irq_lock); in ucb1x00_irq_set_type()
367 struct ucb1x00 *ucb = irq_data_get_irq_chip_data(data); in ucb1x00_irq_set_wake() local
368 struct ucb1x00_plat_data *pdata = ucb->mcp->attached_device.platform_data; in ucb1x00_irq_set_wake()
369 unsigned mask = 1 << (data->irq - ucb->irq_base); in ucb1x00_irq_set_wake()
374 raw_spin_lock(&ucb->irq_lock); in ucb1x00_irq_set_wake()
376 ucb->irq_wake |= mask; in ucb1x00_irq_set_wake()
378 ucb->irq_wake &= ~mask; in ucb1x00_irq_set_wake()
379 raw_spin_unlock(&ucb->irq_lock); in ucb1x00_irq_set_wake()
393 static int ucb1x00_add_dev(struct ucb1x00 *ucb, struct ucb1x00_driver *drv) in ucb1x00_add_dev() argument
402 dev->ucb = ucb; in ucb1x00_add_dev()
411 list_add_tail(&dev->dev_node, &ucb->devs); in ucb1x00_add_dev()
444 static int ucb1x00_detect_irq(struct ucb1x00 *ucb) in ucb1x00_detect_irq() argument
457 ucb1x00_reg_write(ucb, UCB_IE_RIS, UCB_IE_ADC); in ucb1x00_detect_irq()
458 ucb1x00_reg_write(ucb, UCB_IE_FAL, UCB_IE_ADC); in ucb1x00_detect_irq()
459 ucb1x00_reg_write(ucb, UCB_IE_CLEAR, 0xffff); in ucb1x00_detect_irq()
460 ucb1x00_reg_write(ucb, UCB_IE_CLEAR, 0); in ucb1x00_detect_irq()
465 ucb1x00_reg_write(ucb, UCB_ADC_CR, UCB_ADC_ENA); in ucb1x00_detect_irq()
466 ucb1x00_reg_write(ucb, UCB_ADC_CR, UCB_ADC_ENA | UCB_ADC_START); in ucb1x00_detect_irq()
471 while ((ucb1x00_reg_read(ucb, UCB_ADC_DATA) & UCB_ADC_DAT_VAL) == 0); in ucb1x00_detect_irq()
472 ucb1x00_reg_write(ucb, UCB_ADC_CR, 0); in ucb1x00_detect_irq()
477 ucb1x00_reg_write(ucb, UCB_IE_RIS, 0); in ucb1x00_detect_irq()
478 ucb1x00_reg_write(ucb, UCB_IE_FAL, 0); in ucb1x00_detect_irq()
479 ucb1x00_reg_write(ucb, UCB_IE_CLEAR, 0xffff); in ucb1x00_detect_irq()
480 ucb1x00_reg_write(ucb, UCB_IE_CLEAR, 0); in ucb1x00_detect_irq()
490 struct ucb1x00 *ucb = classdev_to_ucb1x00(dev); in ucb1x00_release() local
491 kfree(ucb); in ucb1x00_release()
503 struct ucb1x00 *ucb; in ucb1x00_probe() local
520 ucb = kzalloc(sizeof(struct ucb1x00), GFP_KERNEL); in ucb1x00_probe()
522 if (!ucb) in ucb1x00_probe()
525 device_initialize(&ucb->dev); in ucb1x00_probe()
526 ucb->dev.class = &ucb1x00_class; in ucb1x00_probe()
527 ucb->dev.parent = &mcp->attached_device; in ucb1x00_probe()
528 dev_set_name(&ucb->dev, "ucb1x00"); in ucb1x00_probe()
530 raw_spin_lock_init(&ucb->irq_lock); in ucb1x00_probe()
531 spin_lock_init(&ucb->io_lock); in ucb1x00_probe()
532 mutex_init(&ucb->adc_mutex); in ucb1x00_probe()
534 ucb->id = id; in ucb1x00_probe()
535 ucb->mcp = mcp; in ucb1x00_probe()
537 ret = device_add(&ucb->dev); in ucb1x00_probe()
541 ucb1x00_enable(ucb); in ucb1x00_probe()
542 ucb->irq = ucb1x00_detect_irq(ucb); in ucb1x00_probe()
543 ucb1x00_disable(ucb); in ucb1x00_probe()
544 if (ucb->irq == NO_IRQ) { in ucb1x00_probe()
545 dev_err(&ucb->dev, "IRQ probe failed\n"); in ucb1x00_probe()
550 ucb->gpio.base = -1; in ucb1x00_probe()
552 ucb->irq_base = irq_alloc_descs(-1, irq_base, 16, -1); in ucb1x00_probe()
553 if (ucb->irq_base < 0) { in ucb1x00_probe()
554 dev_err(&ucb->dev, "unable to allocate 16 irqs: %d\n", in ucb1x00_probe()
555 ucb->irq_base); in ucb1x00_probe()
556 ret = ucb->irq_base; in ucb1x00_probe()
561 unsigned irq = ucb->irq_base + i; in ucb1x00_probe()
564 irq_set_chip_data(irq, ucb); in ucb1x00_probe()
568 irq_set_irq_type(ucb->irq, IRQ_TYPE_EDGE_RISING); in ucb1x00_probe()
569 irq_set_handler_data(ucb->irq, ucb); in ucb1x00_probe()
570 irq_set_chained_handler(ucb->irq, ucb1x00_irq); in ucb1x00_probe()
573 ucb->gpio.label = dev_name(&ucb->dev); in ucb1x00_probe()
574 ucb->gpio.dev = &ucb->dev; in ucb1x00_probe()
575 ucb->gpio.owner = THIS_MODULE; in ucb1x00_probe()
576 ucb->gpio.base = pdata->gpio_base; in ucb1x00_probe()
577 ucb->gpio.ngpio = 10; in ucb1x00_probe()
578 ucb->gpio.set = ucb1x00_gpio_set; in ucb1x00_probe()
579 ucb->gpio.get = ucb1x00_gpio_get; in ucb1x00_probe()
580 ucb->gpio.direction_input = ucb1x00_gpio_direction_input; in ucb1x00_probe()
581 ucb->gpio.direction_output = ucb1x00_gpio_direction_output; in ucb1x00_probe()
582 ucb->gpio.to_irq = ucb1x00_to_irq; in ucb1x00_probe()
583 ret = gpiochip_add(&ucb->gpio); in ucb1x00_probe()
587 dev_info(&ucb->dev, "gpio_base not set so no gpiolib support"); in ucb1x00_probe()
589 mcp_set_drvdata(mcp, ucb); in ucb1x00_probe()
592 device_set_wakeup_capable(&ucb->dev, pdata->can_wakeup); in ucb1x00_probe()
594 INIT_LIST_HEAD(&ucb->devs); in ucb1x00_probe()
596 list_add_tail(&ucb->node, &ucb1x00_devices); in ucb1x00_probe()
598 ucb1x00_add_dev(ucb, drv); in ucb1x00_probe()
605 irq_set_chained_handler(ucb->irq, NULL); in ucb1x00_probe()
607 if (ucb->irq_base > 0) in ucb1x00_probe()
608 irq_free_descs(ucb->irq_base, 16); in ucb1x00_probe()
610 device_del(&ucb->dev); in ucb1x00_probe()
612 put_device(&ucb->dev); in ucb1x00_probe()
622 struct ucb1x00 *ucb = mcp_get_drvdata(mcp); in ucb1x00_remove() local
626 list_del(&ucb->node); in ucb1x00_remove()
627 list_for_each_safe(l, n, &ucb->devs) { in ucb1x00_remove()
633 if (ucb->gpio.base != -1) in ucb1x00_remove()
634 gpiochip_remove(&ucb->gpio); in ucb1x00_remove()
636 irq_set_chained_handler(ucb->irq, NULL); in ucb1x00_remove()
637 irq_free_descs(ucb->irq_base, 16); in ucb1x00_remove()
638 device_unregister(&ucb->dev); in ucb1x00_remove()
646 struct ucb1x00 *ucb; in ucb1x00_register_driver() local
651 list_for_each_entry(ucb, &ucb1x00_devices, node) { in ucb1x00_register_driver()
652 ucb1x00_add_dev(ucb, drv); in ucb1x00_register_driver()
675 struct ucb1x00 *ucb = dev_get_drvdata(dev); in ucb1x00_suspend() local
679 list_for_each_entry(udev, &ucb->devs, dev_node) { in ucb1x00_suspend()
685 if (ucb->irq_wake) { in ucb1x00_suspend()
688 raw_spin_lock_irqsave(&ucb->irq_lock, flags); in ucb1x00_suspend()
689 ucb1x00_enable(ucb); in ucb1x00_suspend()
690 ucb1x00_reg_write(ucb, UCB_IE_RIS, ucb->irq_ris_enbl & in ucb1x00_suspend()
691 ucb->irq_wake); in ucb1x00_suspend()
692 ucb1x00_reg_write(ucb, UCB_IE_FAL, ucb->irq_fal_enbl & in ucb1x00_suspend()
693 ucb->irq_wake); in ucb1x00_suspend()
694 ucb1x00_disable(ucb); in ucb1x00_suspend()
695 raw_spin_unlock_irqrestore(&ucb->irq_lock, flags); in ucb1x00_suspend()
697 enable_irq_wake(ucb->irq); in ucb1x00_suspend()
707 struct ucb1x00 *ucb = dev_get_drvdata(dev); in ucb1x00_resume() local
710 if (!ucb->irq_wake && pdata && pdata->reset) in ucb1x00_resume()
713 ucb1x00_enable(ucb); in ucb1x00_resume()
714 ucb1x00_reg_write(ucb, UCB_IO_DATA, ucb->io_out); in ucb1x00_resume()
715 ucb1x00_reg_write(ucb, UCB_IO_DIR, ucb->io_dir); in ucb1x00_resume()
717 if (ucb->irq_wake) { in ucb1x00_resume()
720 raw_spin_lock_irqsave(&ucb->irq_lock, flags); in ucb1x00_resume()
721 ucb1x00_reg_write(ucb, UCB_IE_RIS, ucb->irq_ris_enbl & in ucb1x00_resume()
722 ucb->irq_mask); in ucb1x00_resume()
723 ucb1x00_reg_write(ucb, UCB_IE_FAL, ucb->irq_fal_enbl & in ucb1x00_resume()
724 ucb->irq_mask); in ucb1x00_resume()
725 raw_spin_unlock_irqrestore(&ucb->irq_lock, flags); in ucb1x00_resume()
727 disable_irq_wake(ucb->irq); in ucb1x00_resume()
729 ucb1x00_disable(ucb); in ucb1x00_resume()
732 list_for_each_entry(udev, &ucb->devs, dev_node) { in ucb1x00_resume()