od 26 arch/arm/include/asm/device.h struct omap_device *od; od 75 arch/arm/mach-omap2/hsmmc.c struct omap_device *od; od 115 arch/arm/mach-omap2/hsmmc.c od = omap_device_alloc(pdev, ohs, 1); od 116 arch/arm/mach-omap2/hsmmc.c if (IS_ERR(od)) { od 139 arch/arm/mach-omap2/hsmmc.c omap_device_delete(od); od 20 arch/arm/mach-omap2/omap-iommu.c struct omap_device *od; od 23 arch/arm/mach-omap2/omap-iommu.c od = to_omap_device(pdev); od 24 arch/arm/mach-omap2/omap-iommu.c if (!od) od 27 arch/arm/mach-omap2/omap-iommu.c if (od->hwmods_cnt != 1) od 30 arch/arm/mach-omap2/omap-iommu.c pwrdm = omap_hwmod_get_pwrdm(od->hwmods[0]); od 44 arch/arm/mach-omap2/omap_device.c static void _add_clkdev(struct omap_device *od, const char *clk_alias, od 53 arch/arm/mach-omap2/omap_device.c dev_dbg(&od->pdev->dev, "Creating %s -> %s\n", clk_alias, clk_name); od 55 arch/arm/mach-omap2/omap_device.c r = clk_get_sys(dev_name(&od->pdev->dev), clk_alias); od 57 arch/arm/mach-omap2/omap_device.c dev_dbg(&od->pdev->dev, od 73 arch/arm/mach-omap2/omap_device.c dev_name(&od->pdev->dev)); od 75 arch/arm/mach-omap2/omap_device.c rc = clk_add_alias(clk_alias, dev_name(&od->pdev->dev), od 81 arch/arm/mach-omap2/omap_device.c dev_err(&od->pdev->dev, od 84 arch/arm/mach-omap2/omap_device.c dev_err(&od->pdev->dev, od 108 arch/arm/mach-omap2/omap_device.c static void _add_hwmod_clocks_clkdev(struct omap_device *od, od 113 arch/arm/mach-omap2/omap_device.c _add_clkdev(od, "fck", oh->main_clk); od 116 arch/arm/mach-omap2/omap_device.c _add_clkdev(od, oh->opt_clks[i].role, oh->opt_clks[i].clk); od 135 arch/arm/mach-omap2/omap_device.c struct omap_device *od; od 180 arch/arm/mach-omap2/omap_device.c od = omap_device_alloc(pdev, hwmods, oh_cnt); od 181 arch/arm/mach-omap2/omap_device.c if (IS_ERR(od)) { od 184 arch/arm/mach-omap2/omap_device.c ret = PTR_ERR(od); od 218 arch/arm/mach-omap2/omap_device.c struct omap_device *od; od 223 arch/arm/mach-omap2/omap_device.c if (pdev->archdata.od) od 224 arch/arm/mach-omap2/omap_device.c omap_device_delete(pdev->archdata.od); od 227 arch/arm/mach-omap2/omap_device.c od = to_omap_device(pdev); od 228 arch/arm/mach-omap2/omap_device.c if (od && (od->_state == OMAP_DEVICE_STATE_ENABLED)) { od 236 arch/arm/mach-omap2/omap_device.c od = to_omap_device(pdev); od 237 arch/arm/mach-omap2/omap_device.c if (od && (od->_state == OMAP_DEVICE_STATE_ENABLED) && od 239 arch/arm/mach-omap2/omap_device.c od->_driver_status = BUS_NOTIFY_BIND_DRIVER; od 249 arch/arm/mach-omap2/omap_device.c od = to_omap_device(pdev); od 250 arch/arm/mach-omap2/omap_device.c if (od) od 251 arch/arm/mach-omap2/omap_device.c od->_driver_status = event; od 263 arch/arm/mach-omap2/omap_device.c static int _omap_device_enable_hwmods(struct omap_device *od) od 268 arch/arm/mach-omap2/omap_device.c for (i = 0; i < od->hwmods_cnt; i++) od 269 arch/arm/mach-omap2/omap_device.c ret |= omap_hwmod_enable(od->hwmods[i]); od 280 arch/arm/mach-omap2/omap_device.c static int _omap_device_idle_hwmods(struct omap_device *od) od 285 arch/arm/mach-omap2/omap_device.c for (i = 0; i < od->hwmods_cnt; i++) od 286 arch/arm/mach-omap2/omap_device.c ret |= omap_hwmod_idle(od->hwmods[i]); od 310 arch/arm/mach-omap2/omap_device.c struct omap_device *od; od 313 arch/arm/mach-omap2/omap_device.c od = to_omap_device(pdev); od 315 arch/arm/mach-omap2/omap_device.c if (od->hwmods_cnt) od 316 arch/arm/mach-omap2/omap_device.c ret = omap_hwmod_get_context_loss_count(od->hwmods[0]); od 337 arch/arm/mach-omap2/omap_device.c struct omap_device *od; od 341 arch/arm/mach-omap2/omap_device.c od = kzalloc(sizeof(struct omap_device), GFP_KERNEL); od 342 arch/arm/mach-omap2/omap_device.c if (!od) { od 346 arch/arm/mach-omap2/omap_device.c od->hwmods_cnt = oh_cnt; od 352 arch/arm/mach-omap2/omap_device.c od->hwmods = hwmods; od 353 arch/arm/mach-omap2/omap_device.c od->pdev = pdev; od 354 arch/arm/mach-omap2/omap_device.c pdev->archdata.od = od; od 357 arch/arm/mach-omap2/omap_device.c hwmods[i]->od = od; od 358 arch/arm/mach-omap2/omap_device.c _add_hwmod_clocks_clkdev(od, hwmods[i]); od 361 arch/arm/mach-omap2/omap_device.c return od; od 364 arch/arm/mach-omap2/omap_device.c kfree(od); od 371 arch/arm/mach-omap2/omap_device.c void omap_device_delete(struct omap_device *od) od 373 arch/arm/mach-omap2/omap_device.c if (!od) od 376 arch/arm/mach-omap2/omap_device.c od->pdev->archdata.od = NULL; od 377 arch/arm/mach-omap2/omap_device.c kfree(od->hwmods); od 378 arch/arm/mach-omap2/omap_device.c kfree(od); od 402 arch/arm/mach-omap2/omap_device.c if (!oh || !oh->od || !oh->od->pdev) od 405 arch/arm/mach-omap2/omap_device.c np = oh->od->pdev->dev.of_node; od 487 arch/arm/mach-omap2/omap_device.c struct omap_device *od; od 524 arch/arm/mach-omap2/omap_device.c od = omap_device_alloc(pdev, &oh, 1); od 525 arch/arm/mach-omap2/omap_device.c if (IS_ERR(od)) { od 526 arch/arm/mach-omap2/omap_device.c ret = PTR_ERR(od); od 541 arch/arm/mach-omap2/omap_device.c omap_device_delete(od); od 596 arch/arm/mach-omap2/omap_device.c struct omap_device *od = to_omap_device(pdev); od 600 arch/arm/mach-omap2/omap_device.c if (od->_driver_status != BUS_NOTIFY_BOUND_DRIVER) od 608 arch/arm/mach-omap2/omap_device.c od->flags |= OMAP_DEVICE_SUSPENDED; od 618 arch/arm/mach-omap2/omap_device.c struct omap_device *od = to_omap_device(pdev); od 620 arch/arm/mach-omap2/omap_device.c if (od->flags & OMAP_DEVICE_SUSPENDED) { od 621 arch/arm/mach-omap2/omap_device.c od->flags &= ~OMAP_DEVICE_SUSPENDED; od 684 arch/arm/mach-omap2/omap_device.c struct omap_device *od; od 686 arch/arm/mach-omap2/omap_device.c od = to_omap_device(pdev); od 688 arch/arm/mach-omap2/omap_device.c if (od->_state == OMAP_DEVICE_STATE_ENABLED) { od 691 arch/arm/mach-omap2/omap_device.c __func__, od->_state); od 695 arch/arm/mach-omap2/omap_device.c ret = _omap_device_enable_hwmods(od); od 698 arch/arm/mach-omap2/omap_device.c od->_state = OMAP_DEVICE_STATE_ENABLED; od 715 arch/arm/mach-omap2/omap_device.c struct omap_device *od; od 717 arch/arm/mach-omap2/omap_device.c od = to_omap_device(pdev); od 719 arch/arm/mach-omap2/omap_device.c if (od->_state != OMAP_DEVICE_STATE_ENABLED) { od 722 arch/arm/mach-omap2/omap_device.c __func__, od->_state); od 726 arch/arm/mach-omap2/omap_device.c ret = _omap_device_idle_hwmods(od); od 729 arch/arm/mach-omap2/omap_device.c od->_state = OMAP_DEVICE_STATE_IDLE; od 748 arch/arm/mach-omap2/omap_device.c struct omap_device *od = to_omap_device(pdev); od 752 arch/arm/mach-omap2/omap_device.c for (i = 0; i < od->hwmods_cnt; i++) { od 753 arch/arm/mach-omap2/omap_device.c ret = omap_hwmod_assert_hardreset(od->hwmods[i], name); od 776 arch/arm/mach-omap2/omap_device.c struct omap_device *od = to_omap_device(pdev); od 780 arch/arm/mach-omap2/omap_device.c for (i = 0; i < od->hwmods_cnt; i++) { od 781 arch/arm/mach-omap2/omap_device.c ret = omap_hwmod_deassert_hardreset(od->hwmods[i], name); od 812 arch/arm/mach-omap2/omap_device.c if (!oh->od) { od 818 arch/arm/mach-omap2/omap_device.c return &oh->od->pdev->dev; od 843 arch/arm/mach-omap2/omap_device.c struct omap_device *od = to_omap_device(pdev); od 846 arch/arm/mach-omap2/omap_device.c if (!od) od 858 arch/arm/mach-omap2/omap_device.c for (i = 0; i < od->hwmods_cnt; i++) od 859 arch/arm/mach-omap2/omap_device.c if (od->hwmods[i]->flags & HWMOD_INIT_NO_IDLE) od 862 arch/arm/mach-omap2/omap_device.c if (od->_driver_status != BUS_NOTIFY_BOUND_DRIVER && od 863 arch/arm/mach-omap2/omap_device.c od->_driver_status != BUS_NOTIFY_BIND_DRIVER) { od 864 arch/arm/mach-omap2/omap_device.c if (od->_state == OMAP_DEVICE_STATE_ENABLED) { od 77 arch/arm/mach-omap2/omap_device.h void omap_device_delete(struct omap_device *od); od 95 arch/arm/mach-omap2/omap_device.h return pdev ? pdev->archdata.od : NULL; od 580 arch/arm/mach-omap2/omap_hwmod.h struct omap_device *od; od 18 arch/mips/include/asm/txx9pio.h __u32 od; od 204 arch/x86/include/asm/inst.h .byte \mod | (\opd1 & 7) | ((\opd2 & 7) << 3) od 350 drivers/block/paride/bpck.c { int i, j, k, p, v, f, om, od; od 354 drivers/block/paride/bpck.c om = pi->mode; od = pi->delay; od 396 drivers/block/paride/bpck.c pi->mode = om; pi->delay = od; od 138 drivers/clk/clk-aspeed.c u32 od = (val >> 4) & 0x1; od 141 drivers/clk/clk-aspeed.c mult = (2 - od) * (n + 2); od 187 drivers/clk/clk-ast2600.c u32 od = (val >> 4) & 0x1; od 190 drivers/clk/clk-ast2600.c mult = (2 - od) * (m + 2); od 77 drivers/clk/ingenic/cgu.c unsigned m, n, od_enc, od; od 102 drivers/clk/ingenic/cgu.c for (od = 0; od < pll_info->od_max; od++) { od 103 drivers/clk/ingenic/cgu.c if (pll_info->od_encoding[od] == od_enc) od 106 drivers/clk/ingenic/cgu.c BUG_ON(od == pll_info->od_max); od 107 drivers/clk/ingenic/cgu.c od++; od 109 drivers/clk/ingenic/cgu.c return div_u64((u64)parent_rate * m, n * od); od 118 drivers/clk/ingenic/cgu.c unsigned m, n, od; od 121 drivers/clk/ingenic/cgu.c od = 1; od 131 drivers/clk/ingenic/cgu.c m = (rate / MHZ) * od * n / (parent_rate / MHZ); od 140 drivers/clk/ingenic/cgu.c *pod = od; od 142 drivers/clk/ingenic/cgu.c return div_u64((u64)parent_rate * m, n * od); od 176 drivers/clk/ingenic/cgu.c unsigned int m, n, od; od 180 drivers/clk/ingenic/cgu.c &m, &n, &od); od 195 drivers/clk/ingenic/cgu.c ctl |= pll_info->od_encoding[od - 1] << pll_info->od_shift; od 89 drivers/clk/sirf/clk-common.c u32 od = ((cfg0 >> 19) & (BIT(4) - 1)) + 1; od 91 drivers/clk/sirf/clk-common.c return fin / MHZ * nf / nr / od * MHZ; od 98 drivers/clk/sirf/clk-common.c unsigned long fin, nf, nr, od; od 118 drivers/clk/sirf/clk-common.c od = 1; od 121 drivers/clk/sirf/clk-common.c do_div(dividend, nr * od); od 130 drivers/clk/sirf/clk-common.c unsigned long fin, nf, nr, od, reg; od 147 drivers/clk/sirf/clk-common.c od = 1; od 149 drivers/clk/sirf/clk-common.c reg = (nf - 1) | ((nr - 1) << 13) | ((od - 1) << 19); od 320 drivers/cpufreq/cpufreq_ondemand.c gov_show_one(od, powersave_bias); od 699 drivers/dma/bcm2835-dma.c struct bcm2835_dmadev *od = to_bcm2835_dma_dev(chan->device); od 752 drivers/dma/bcm2835-dma.c if (buf_addr == od->zero_page && !c->is_lite_channel) od 847 drivers/dma/bcm2835-dma.c static void bcm2835_dma_free(struct bcm2835_dmadev *od) od 851 drivers/dma/bcm2835-dma.c list_for_each_entry_safe(c, next, &od->ddev.channels, od 857 drivers/dma/bcm2835-dma.c dma_unmap_page_attrs(od->ddev.dev, od->zero_page, PAGE_SIZE, od 885 drivers/dma/bcm2835-dma.c struct bcm2835_dmadev *od; od 904 drivers/dma/bcm2835-dma.c od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); od 905 drivers/dma/bcm2835-dma.c if (!od) od 908 drivers/dma/bcm2835-dma.c pdev->dev.dma_parms = &od->dma_parms; od 916 drivers/dma/bcm2835-dma.c od->base = base; od 918 drivers/dma/bcm2835-dma.c dma_cap_set(DMA_SLAVE, od->ddev.cap_mask); od 919 drivers/dma/bcm2835-dma.c dma_cap_set(DMA_PRIVATE, od->ddev.cap_mask); od 920 drivers/dma/bcm2835-dma.c dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask); od 921 drivers/dma/bcm2835-dma.c dma_cap_set(DMA_MEMCPY, od->ddev.cap_mask); od 922 drivers/dma/bcm2835-dma.c od->ddev.device_alloc_chan_resources = bcm2835_dma_alloc_chan_resources; od 923 drivers/dma/bcm2835-dma.c od->ddev.device_free_chan_resources = bcm2835_dma_free_chan_resources; od 924 drivers/dma/bcm2835-dma.c od->ddev.device_tx_status = bcm2835_dma_tx_status; od 925 drivers/dma/bcm2835-dma.c od->ddev.device_issue_pending = bcm2835_dma_issue_pending; od 926 drivers/dma/bcm2835-dma.c od->ddev.device_prep_dma_cyclic = bcm2835_dma_prep_dma_cyclic; od 927 drivers/dma/bcm2835-dma.c od->ddev.device_prep_slave_sg = bcm2835_dma_prep_slave_sg; od 928 drivers/dma/bcm2835-dma.c od->ddev.device_prep_dma_memcpy = bcm2835_dma_prep_dma_memcpy; od 929 drivers/dma/bcm2835-dma.c od->ddev.device_config = bcm2835_dma_slave_config; od 930 drivers/dma/bcm2835-dma.c od->ddev.device_terminate_all = bcm2835_dma_terminate_all; od 931 drivers/dma/bcm2835-dma.c od->ddev.device_synchronize = bcm2835_dma_synchronize; od 932 drivers/dma/bcm2835-dma.c od->ddev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); od 933 drivers/dma/bcm2835-dma.c od->ddev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); od 934 drivers/dma/bcm2835-dma.c od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) | od 936 drivers/dma/bcm2835-dma.c od->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; od 937 drivers/dma/bcm2835-dma.c od->ddev.descriptor_reuse = true; od 938 drivers/dma/bcm2835-dma.c od->ddev.dev = &pdev->dev; od 939 drivers/dma/bcm2835-dma.c INIT_LIST_HEAD(&od->ddev.channels); od 941 drivers/dma/bcm2835-dma.c platform_set_drvdata(pdev, od); od 943 drivers/dma/bcm2835-dma.c od->zero_page = dma_map_page_attrs(od->ddev.dev, ZERO_PAGE(0), 0, od 946 drivers/dma/bcm2835-dma.c if (dma_mapping_error(od->ddev.dev, od->zero_page)) { od 999 drivers/dma/bcm2835-dma.c rc = bcm2835_dma_chan_init(od, i, irq[i], irq_flags); od 1008 drivers/dma/bcm2835-dma.c bcm2835_dma_xlate, od); od 1014 drivers/dma/bcm2835-dma.c rc = dma_async_device_register(&od->ddev); od 1026 drivers/dma/bcm2835-dma.c bcm2835_dma_free(od); od 1032 drivers/dma/bcm2835-dma.c struct bcm2835_dmadev *od = platform_get_drvdata(pdev); od 1034 drivers/dma/bcm2835-dma.c dma_async_device_unregister(&od->ddev); od 1035 drivers/dma/bcm2835-dma.c bcm2835_dma_free(od); od 254 drivers/dma/owl-dma.c static void dma_update(struct owl_dma *od, u32 reg, u32 val, bool state) od 258 drivers/dma/owl-dma.c regval = readl(od->base + reg); od 265 drivers/dma/owl-dma.c writel(val, od->base + reg); od 268 drivers/dma/owl-dma.c static void dma_writel(struct owl_dma *od, u32 reg, u32 data) od 270 drivers/dma/owl-dma.c writel(data, od->base + reg); od 273 drivers/dma/owl-dma.c static u32 dma_readl(struct owl_dma *od, u32 reg) od 275 drivers/dma/owl-dma.c return readl(od->base + reg); od 321 drivers/dma/owl-dma.c static void owl_dma_free_lli(struct owl_dma *od, od 325 drivers/dma/owl-dma.c dma_pool_free(od->lli_pool, lli, lli->phys); od 328 drivers/dma/owl-dma.c static struct owl_dma_lli *owl_dma_alloc_lli(struct owl_dma *od) od 333 drivers/dma/owl-dma.c lli = dma_pool_alloc(od->lli_pool, GFP_NOWAIT, &phys); od 428 drivers/dma/owl-dma.c static struct owl_dma_pchan *owl_dma_get_pchan(struct owl_dma *od, od 435 drivers/dma/owl-dma.c for (i = 0; i < od->nr_pchans; i++) { od 436 drivers/dma/owl-dma.c pchan = &od->pchans[i]; od 438 drivers/dma/owl-dma.c spin_lock_irqsave(&od->lock, flags); od 441 drivers/dma/owl-dma.c spin_unlock_irqrestore(&od->lock, flags); od 445 drivers/dma/owl-dma.c spin_unlock_irqrestore(&od->lock, flags); od 451 drivers/dma/owl-dma.c static int owl_dma_pchan_busy(struct owl_dma *od, struct owl_dma_pchan *pchan) od 455 drivers/dma/owl-dma.c val = dma_readl(od, OWL_DMA_IDLE_STAT); od 460 drivers/dma/owl-dma.c static void owl_dma_terminate_pchan(struct owl_dma *od, od 469 drivers/dma/owl-dma.c spin_lock_irqsave(&od->lock, flags); od 470 drivers/dma/owl-dma.c dma_update(od, OWL_DMA_IRQ_EN0, (1 << pchan->id), false); od 472 drivers/dma/owl-dma.c irq_pd = dma_readl(od, OWL_DMA_IRQ_PD0); od 474 drivers/dma/owl-dma.c dev_warn(od->dma.dev, od 477 drivers/dma/owl-dma.c dma_writel(od, OWL_DMA_IRQ_PD0, (1 << pchan->id)); od 482 drivers/dma/owl-dma.c spin_unlock_irqrestore(&od->lock, flags); od 497 drivers/dma/owl-dma.c struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); od 510 drivers/dma/owl-dma.c while (owl_dma_pchan_busy(od, pchan)) od 530 drivers/dma/owl-dma.c spin_lock_irqsave(&od->lock, flags); od 532 drivers/dma/owl-dma.c dma_update(od, OWL_DMA_IRQ_EN0, (1 << pchan->id), true); od 534 drivers/dma/owl-dma.c spin_unlock_irqrestore(&od->lock, flags); od 544 drivers/dma/owl-dma.c static void owl_dma_phy_free(struct owl_dma *od, struct owl_dma_vchan *vchan) od 547 drivers/dma/owl-dma.c owl_dma_terminate_pchan(od, vchan->pchan); od 554 drivers/dma/owl-dma.c struct owl_dma *od = dev_id; od 561 drivers/dma/owl-dma.c spin_lock(&od->lock); od 563 drivers/dma/owl-dma.c pending = dma_readl(od, OWL_DMA_IRQ_PD0); od 566 drivers/dma/owl-dma.c for_each_set_bit(i, &pending, od->nr_pchans) { od 567 drivers/dma/owl-dma.c pchan = &od->pchans[i]; od 572 drivers/dma/owl-dma.c dma_writel(od, OWL_DMA_IRQ_PD0, pending); od 575 drivers/dma/owl-dma.c for (i = 0; i < od->nr_pchans; i++) { od 576 drivers/dma/owl-dma.c pchan = &od->pchans[i]; od 581 drivers/dma/owl-dma.c dma_readl(od, OWL_DMA_IRQ_PD0); od 583 drivers/dma/owl-dma.c global_irq_pending = dma_readl(od, OWL_DMA_IRQ_PD0); od 586 drivers/dma/owl-dma.c dev_dbg(od->dma.dev, od 598 drivers/dma/owl-dma.c spin_unlock(&od->lock); od 600 drivers/dma/owl-dma.c for_each_set_bit(i, &pending, od->nr_pchans) { od 603 drivers/dma/owl-dma.c pchan = &od->pchans[i]; od 607 drivers/dma/owl-dma.c dev_warn(od->dma.dev, "no vchan attached on pchan %d\n", od 627 drivers/dma/owl-dma.c owl_dma_phy_free(od, vchan); od 636 drivers/dma/owl-dma.c static void owl_dma_free_txd(struct owl_dma *od, struct owl_dma_txd *txd) od 644 drivers/dma/owl-dma.c owl_dma_free_lli(od, lli); od 651 drivers/dma/owl-dma.c struct owl_dma *od = to_owl_dma(vd->tx.chan->device); od 654 drivers/dma/owl-dma.c owl_dma_free_txd(od, txd); od 659 drivers/dma/owl-dma.c struct owl_dma *od = to_owl_dma(chan->device); od 667 drivers/dma/owl-dma.c owl_dma_phy_free(od, vchan); od 799 drivers/dma/owl-dma.c struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); od 802 drivers/dma/owl-dma.c pchan = owl_dma_get_pchan(od, vchan); od 806 drivers/dma/owl-dma.c dev_dbg(od->dma.dev, "allocated pchan %d\n", pchan->id); od 830 drivers/dma/owl-dma.c struct owl_dma *od = to_owl_dma(chan->device); od 848 drivers/dma/owl-dma.c lli = owl_dma_alloc_lli(od); od 870 drivers/dma/owl-dma.c owl_dma_free_txd(od, txd); od 881 drivers/dma/owl-dma.c struct owl_dma *od = to_owl_dma(chan->device); od 902 drivers/dma/owl-dma.c dev_err(od->dma.dev, od 907 drivers/dma/owl-dma.c lli = owl_dma_alloc_lli(od); od 934 drivers/dma/owl-dma.c owl_dma_free_txd(od, txd); od 946 drivers/dma/owl-dma.c struct owl_dma *od = to_owl_dma(chan->device); od 963 drivers/dma/owl-dma.c lli = owl_dma_alloc_lli(od); od 996 drivers/dma/owl-dma.c owl_dma_free_txd(od, txd); od 1009 drivers/dma/owl-dma.c static inline void owl_dma_free(struct owl_dma *od) od 1015 drivers/dma/owl-dma.c next, &od->dma.channels, vc.chan.device_node) { od 1024 drivers/dma/owl-dma.c struct owl_dma *od = ofdma->of_dma_data; od 1029 drivers/dma/owl-dma.c if (drq > od->nr_vchans) od 1032 drivers/dma/owl-dma.c chan = dma_get_any_slave_channel(&od->dma); od 1045 drivers/dma/owl-dma.c struct owl_dma *od; od 1049 drivers/dma/owl-dma.c od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); od 1050 drivers/dma/owl-dma.c if (!od) od 1057 drivers/dma/owl-dma.c od->base = devm_ioremap_resource(&pdev->dev, res); od 1058 drivers/dma/owl-dma.c if (IS_ERR(od->base)) od 1059 drivers/dma/owl-dma.c return PTR_ERR(od->base); od 1076 drivers/dma/owl-dma.c od->nr_pchans = nr_channels; od 1077 drivers/dma/owl-dma.c od->nr_vchans = nr_requests; od 1081 drivers/dma/owl-dma.c platform_set_drvdata(pdev, od); od 1082 drivers/dma/owl-dma.c spin_lock_init(&od->lock); od 1084 drivers/dma/owl-dma.c dma_cap_set(DMA_MEMCPY, od->dma.cap_mask); od 1085 drivers/dma/owl-dma.c dma_cap_set(DMA_SLAVE, od->dma.cap_mask); od 1086 drivers/dma/owl-dma.c dma_cap_set(DMA_CYCLIC, od->dma.cap_mask); od 1088 drivers/dma/owl-dma.c od->dma.dev = &pdev->dev; od 1089 drivers/dma/owl-dma.c od->dma.device_free_chan_resources = owl_dma_free_chan_resources; od 1090 drivers/dma/owl-dma.c od->dma.device_tx_status = owl_dma_tx_status; od 1091 drivers/dma/owl-dma.c od->dma.device_issue_pending = owl_dma_issue_pending; od 1092 drivers/dma/owl-dma.c od->dma.device_prep_dma_memcpy = owl_dma_prep_memcpy; od 1093 drivers/dma/owl-dma.c od->dma.device_prep_slave_sg = owl_dma_prep_slave_sg; od 1094 drivers/dma/owl-dma.c od->dma.device_prep_dma_cyclic = owl_prep_dma_cyclic; od 1095 drivers/dma/owl-dma.c od->dma.device_config = owl_dma_config; od 1096 drivers/dma/owl-dma.c od->dma.device_pause = owl_dma_pause; od 1097 drivers/dma/owl-dma.c od->dma.device_resume = owl_dma_resume; od 1098 drivers/dma/owl-dma.c od->dma.device_terminate_all = owl_dma_terminate_all; od 1099 drivers/dma/owl-dma.c od->dma.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); od 1100 drivers/dma/owl-dma.c od->dma.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); od 1101 drivers/dma/owl-dma.c od->dma.directions = BIT(DMA_MEM_TO_MEM); od 1102 drivers/dma/owl-dma.c od->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; od 1104 drivers/dma/owl-dma.c INIT_LIST_HEAD(&od->dma.channels); od 1106 drivers/dma/owl-dma.c od->clk = devm_clk_get(&pdev->dev, NULL); od 1107 drivers/dma/owl-dma.c if (IS_ERR(od->clk)) { od 1109 drivers/dma/owl-dma.c return PTR_ERR(od->clk); od 1117 drivers/dma/owl-dma.c od->irq = platform_get_irq(pdev, 0); od 1118 drivers/dma/owl-dma.c ret = devm_request_irq(&pdev->dev, od->irq, owl_dma_interrupt, 0, od 1119 drivers/dma/owl-dma.c dev_name(&pdev->dev), od); od 1126 drivers/dma/owl-dma.c od->pchans = devm_kcalloc(&pdev->dev, od->nr_pchans, od 1128 drivers/dma/owl-dma.c if (!od->pchans) od 1131 drivers/dma/owl-dma.c for (i = 0; i < od->nr_pchans; i++) { od 1132 drivers/dma/owl-dma.c struct owl_dma_pchan *pchan = &od->pchans[i]; od 1135 drivers/dma/owl-dma.c pchan->base = od->base + OWL_DMA_CHAN_BASE(i); od 1139 drivers/dma/owl-dma.c od->vchans = devm_kcalloc(&pdev->dev, od->nr_vchans, od 1141 drivers/dma/owl-dma.c if (!od->vchans) od 1144 drivers/dma/owl-dma.c for (i = 0; i < od->nr_vchans; i++) { od 1145 drivers/dma/owl-dma.c struct owl_dma_vchan *vchan = &od->vchans[i]; od 1148 drivers/dma/owl-dma.c vchan_init(&vchan->vc, &od->dma); od 1152 drivers/dma/owl-dma.c od->lli_pool = dma_pool_create(dev_name(od->dma.dev), od->dma.dev, od 1156 drivers/dma/owl-dma.c if (!od->lli_pool) { od 1161 drivers/dma/owl-dma.c clk_prepare_enable(od->clk); od 1163 drivers/dma/owl-dma.c ret = dma_async_device_register(&od->dma); od 1171 drivers/dma/owl-dma.c owl_dma_of_xlate, od); od 1180 drivers/dma/owl-dma.c dma_async_device_unregister(&od->dma); od 1182 drivers/dma/owl-dma.c clk_disable_unprepare(od->clk); od 1183 drivers/dma/owl-dma.c dma_pool_destroy(od->lli_pool); od 1190 drivers/dma/owl-dma.c struct owl_dma *od = platform_get_drvdata(pdev); od 1193 drivers/dma/owl-dma.c dma_async_device_unregister(&od->dma); od 1196 drivers/dma/owl-dma.c dma_writel(od, OWL_DMA_IRQ_EN0, 0x0); od 1199 drivers/dma/owl-dma.c devm_free_irq(od->dma.dev, od->irq, od); od 1201 drivers/dma/owl-dma.c owl_dma_free(od); od 1203 drivers/dma/owl-dma.c clk_disable_unprepare(od->clk); od 231 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(vd->tx.chan->device); od 236 drivers/dma/ti/omap-dma.c dma_pool_free(od->desc_pool, d->sg[i].t2_desc, od 329 drivers/dma/ti/omap-dma.c static void omap_dma_glbl_write(struct omap_dmadev *od, unsigned reg, unsigned val) od 331 drivers/dma/ti/omap-dma.c const struct omap_dma_reg *r = od->reg_map + reg; od 335 drivers/dma/ti/omap-dma.c omap_dma_write(val, r->type, od->base + r->offset); od 338 drivers/dma/ti/omap-dma.c static unsigned omap_dma_glbl_read(struct omap_dmadev *od, unsigned reg) od 340 drivers/dma/ti/omap-dma.c const struct omap_dma_reg *r = od->reg_map + reg; od 344 drivers/dma/ti/omap-dma.c return omap_dma_read(r->type, od->base + r->offset); od 379 drivers/dma/ti/omap-dma.c static void omap_dma_assign(struct omap_dmadev *od, struct omap_chan *c, od 382 drivers/dma/ti/omap-dma.c c->channel_base = od->base + od->plat->channel_stride * lch; od 384 drivers/dma/ti/omap-dma.c od->lch_map[lch] = c; od 389 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); od 392 drivers/dma/ti/omap-dma.c if (__dma_omap15xx(od->plat->dma_attr)) od 414 drivers/dma/ti/omap-dma.c } else if (od->ll123_supported) { od 452 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); od 461 drivers/dma/ti/omap-dma.c if (od->plat->errata & DMA_ERRATA_i541 && val & CCR_TRIGGER_SRC) { od 464 drivers/dma/ti/omap-dma.c sysconfig = omap_dma_glbl_read(od, OCP_SYSCONFIG); od 467 drivers/dma/ti/omap-dma.c omap_dma_glbl_write(od, OCP_SYSCONFIG, val); od 476 drivers/dma/ti/omap-dma.c omap_dma_glbl_write(od, OCP_SYSCONFIG, sysconfig); od 490 drivers/dma/ti/omap-dma.c if (!__dma_omap15xx(od->plat->dma_attr) && c->cyclic) { od 598 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = devid; od 601 drivers/dma/ti/omap-dma.c spin_lock(&od->irq_lock); od 603 drivers/dma/ti/omap-dma.c status = omap_dma_glbl_read(od, IRQSTATUS_L1); od 604 drivers/dma/ti/omap-dma.c status &= od->irq_enable_mask; od 606 drivers/dma/ti/omap-dma.c spin_unlock(&od->irq_lock); od 618 drivers/dma/ti/omap-dma.c c = od->lch_map[channel]; od 621 drivers/dma/ti/omap-dma.c dev_err(od->ddev.dev, "invalid channel %u\n", channel); od 626 drivers/dma/ti/omap-dma.c omap_dma_glbl_write(od, IRQSTATUS_L1, mask); od 631 drivers/dma/ti/omap-dma.c spin_unlock(&od->irq_lock); od 638 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(chan->device); od 640 drivers/dma/ti/omap-dma.c struct device *dev = od->ddev.dev; od 643 drivers/dma/ti/omap-dma.c if (od->legacy) { od 654 drivers/dma/ti/omap-dma.c omap_dma_assign(od, c, c->dma_ch); od 656 drivers/dma/ti/omap-dma.c if (!od->legacy) { od 659 drivers/dma/ti/omap-dma.c spin_lock_irq(&od->irq_lock); od 661 drivers/dma/ti/omap-dma.c omap_dma_glbl_write(od, IRQSTATUS_L1, val); od 662 drivers/dma/ti/omap-dma.c od->irq_enable_mask |= val; od 663 drivers/dma/ti/omap-dma.c omap_dma_glbl_write(od, IRQENABLE_L1, od->irq_enable_mask); od 665 drivers/dma/ti/omap-dma.c val = omap_dma_glbl_read(od, IRQENABLE_L0); od 667 drivers/dma/ti/omap-dma.c omap_dma_glbl_write(od, IRQENABLE_L0, val); od 668 drivers/dma/ti/omap-dma.c spin_unlock_irq(&od->irq_lock); od 673 drivers/dma/ti/omap-dma.c if (__dma_omap16xx(od->plat->dma_attr)) { od 684 drivers/dma/ti/omap-dma.c if (od->plat->errata & DMA_ERRATA_IFRAME_BUFFERING) od 692 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(chan->device); od 695 drivers/dma/ti/omap-dma.c if (!od->legacy) { od 696 drivers/dma/ti/omap-dma.c spin_lock_irq(&od->irq_lock); od 697 drivers/dma/ti/omap-dma.c od->irq_enable_mask &= ~BIT(c->dma_ch); od 698 drivers/dma/ti/omap-dma.c omap_dma_glbl_write(od, IRQENABLE_L1, od->irq_enable_mask); od 699 drivers/dma/ti/omap-dma.c spin_unlock_irq(&od->irq_lock); od 703 drivers/dma/ti/omap-dma.c od->lch_map[c->dma_ch] = NULL; od 707 drivers/dma/ti/omap-dma.c dev_dbg(od->ddev.dev, "freeing channel %u used for %u\n", c->dma_ch, od 751 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); od 755 drivers/dma/ti/omap-dma.c if (val == 0 && od->plat->errata & DMA_ERRATA_3_3) od 763 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); od 766 drivers/dma/ti/omap-dma.c if (__dma_omap15xx(od->plat->dma_attr)) { od 789 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); od 792 drivers/dma/ti/omap-dma.c if (__dma_omap15xx(od->plat->dma_attr)) { od 889 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(chan->device); od 1009 drivers/dma/ti/omap-dma.c if (od->plat->errata & DMA_ERRATA_PARALLEL_CHANNELS) od 1025 drivers/dma/ti/omap-dma.c d->using_ll = od->ll123_supported; od 1035 drivers/dma/ti/omap-dma.c osg->t2_desc = dma_pool_alloc(od->desc_pool, GFP_ATOMIC, od 1057 drivers/dma/ti/omap-dma.c dma_pool_free(od->desc_pool, osg->t2_desc, od 1071 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(chan->device); od 1158 drivers/dma/ti/omap-dma.c if (__dma_omap15xx(od->plat->dma_attr)) od 1355 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(chan->device); od 1360 drivers/dma/ti/omap-dma.c spin_lock_irqsave(&od->irq_lock, flags); od 1400 drivers/dma/ti/omap-dma.c spin_unlock_irqrestore(&od->irq_lock, flags); od 1408 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(chan->device); od 1412 drivers/dma/ti/omap-dma.c spin_lock_irqsave(&od->irq_lock, flags); od 1424 drivers/dma/ti/omap-dma.c spin_unlock_irqrestore(&od->irq_lock, flags); od 1429 drivers/dma/ti/omap-dma.c static int omap_dma_chan_init(struct omap_dmadev *od) od 1437 drivers/dma/ti/omap-dma.c c->reg_map = od->reg_map; od 1439 drivers/dma/ti/omap-dma.c vchan_init(&c->vc, &od->ddev); od 1444 drivers/dma/ti/omap-dma.c static void omap_dma_free(struct omap_dmadev *od) od 1446 drivers/dma/ti/omap-dma.c while (!list_empty(&od->ddev.channels)) { od 1447 drivers/dma/ti/omap-dma.c struct omap_chan *c = list_first_entry(&od->ddev.channels, od 1462 drivers/dma/ti/omap-dma.c struct omap_dmadev *od; od 1467 drivers/dma/ti/omap-dma.c od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); od 1468 drivers/dma/ti/omap-dma.c if (!od) od 1472 drivers/dma/ti/omap-dma.c od->base = devm_ioremap_resource(&pdev->dev, res); od 1473 drivers/dma/ti/omap-dma.c if (IS_ERR(od->base)) od 1474 drivers/dma/ti/omap-dma.c return PTR_ERR(od->base); od 1476 drivers/dma/ti/omap-dma.c od->plat = omap_get_plat_info(); od 1477 drivers/dma/ti/omap-dma.c if (!od->plat) od 1480 drivers/dma/ti/omap-dma.c od->reg_map = od->plat->reg_map; od 1482 drivers/dma/ti/omap-dma.c dma_cap_set(DMA_SLAVE, od->ddev.cap_mask); od 1483 drivers/dma/ti/omap-dma.c dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask); od 1484 drivers/dma/ti/omap-dma.c dma_cap_set(DMA_MEMCPY, od->ddev.cap_mask); od 1485 drivers/dma/ti/omap-dma.c dma_cap_set(DMA_INTERLEAVE, od->ddev.cap_mask); od 1486 drivers/dma/ti/omap-dma.c od->ddev.device_alloc_chan_resources = omap_dma_alloc_chan_resources; od 1487 drivers/dma/ti/omap-dma.c od->ddev.device_free_chan_resources = omap_dma_free_chan_resources; od 1488 drivers/dma/ti/omap-dma.c od->ddev.device_tx_status = omap_dma_tx_status; od 1489 drivers/dma/ti/omap-dma.c od->ddev.device_issue_pending = omap_dma_issue_pending; od 1490 drivers/dma/ti/omap-dma.c od->ddev.device_prep_slave_sg = omap_dma_prep_slave_sg; od 1491 drivers/dma/ti/omap-dma.c od->ddev.device_prep_dma_cyclic = omap_dma_prep_dma_cyclic; od 1492 drivers/dma/ti/omap-dma.c od->ddev.device_prep_dma_memcpy = omap_dma_prep_dma_memcpy; od 1493 drivers/dma/ti/omap-dma.c od->ddev.device_prep_interleaved_dma = omap_dma_prep_dma_interleaved; od 1494 drivers/dma/ti/omap-dma.c od->ddev.device_config = omap_dma_slave_config; od 1495 drivers/dma/ti/omap-dma.c od->ddev.device_pause = omap_dma_pause; od 1496 drivers/dma/ti/omap-dma.c od->ddev.device_resume = omap_dma_resume; od 1497 drivers/dma/ti/omap-dma.c od->ddev.device_terminate_all = omap_dma_terminate_all; od 1498 drivers/dma/ti/omap-dma.c od->ddev.device_synchronize = omap_dma_synchronize; od 1499 drivers/dma/ti/omap-dma.c od->ddev.src_addr_widths = OMAP_DMA_BUSWIDTHS; od 1500 drivers/dma/ti/omap-dma.c od->ddev.dst_addr_widths = OMAP_DMA_BUSWIDTHS; od 1501 drivers/dma/ti/omap-dma.c od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); od 1502 drivers/dma/ti/omap-dma.c if (__dma_omap15xx(od->plat->dma_attr)) od 1503 drivers/dma/ti/omap-dma.c od->ddev.residue_granularity = od 1506 drivers/dma/ti/omap-dma.c od->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; od 1507 drivers/dma/ti/omap-dma.c od->ddev.max_burst = SZ_16M - 1; /* CCEN: 24bit unsigned */ od 1508 drivers/dma/ti/omap-dma.c od->ddev.dev = &pdev->dev; od 1509 drivers/dma/ti/omap-dma.c INIT_LIST_HEAD(&od->ddev.channels); od 1510 drivers/dma/ti/omap-dma.c spin_lock_init(&od->lock); od 1511 drivers/dma/ti/omap-dma.c spin_lock_init(&od->irq_lock); od 1514 drivers/dma/ti/omap-dma.c od->dma_requests = OMAP_SDMA_REQUESTS; od 1517 drivers/dma/ti/omap-dma.c &od->dma_requests)) { od 1525 drivers/dma/ti/omap-dma.c lch_count = od->plat->dma_attr->lch_count; od 1536 drivers/dma/ti/omap-dma.c od->lch_map = devm_kcalloc(&pdev->dev, lch_count, sizeof(*od->lch_map), od 1538 drivers/dma/ti/omap-dma.c if (!od->lch_map) od 1541 drivers/dma/ti/omap-dma.c for (i = 0; i < od->dma_requests; i++) { od 1542 drivers/dma/ti/omap-dma.c rc = omap_dma_chan_init(od); od 1544 drivers/dma/ti/omap-dma.c omap_dma_free(od); od 1552 drivers/dma/ti/omap-dma.c od->legacy = true; od 1555 drivers/dma/ti/omap-dma.c od->irq_enable_mask = 0; od 1556 drivers/dma/ti/omap-dma.c omap_dma_glbl_write(od, IRQENABLE_L1, 0); od 1559 drivers/dma/ti/omap-dma.c IRQF_SHARED, "omap-dma-engine", od); od 1561 drivers/dma/ti/omap-dma.c omap_dma_free(od); od 1566 drivers/dma/ti/omap-dma.c if (omap_dma_glbl_read(od, CAPS_0) & CAPS_0_SUPPORT_LL123) od 1567 drivers/dma/ti/omap-dma.c od->ll123_supported = true; od 1569 drivers/dma/ti/omap-dma.c od->ddev.filter.map = od->plat->slave_map; od 1570 drivers/dma/ti/omap-dma.c od->ddev.filter.mapcnt = od->plat->slavecnt; od 1571 drivers/dma/ti/omap-dma.c od->ddev.filter.fn = omap_dma_filter_fn; od 1573 drivers/dma/ti/omap-dma.c if (od->ll123_supported) { od 1574 drivers/dma/ti/omap-dma.c od->desc_pool = dma_pool_create(dev_name(&pdev->dev), od 1578 drivers/dma/ti/omap-dma.c if (!od->desc_pool) { od 1581 drivers/dma/ti/omap-dma.c od->ll123_supported = false; od 1585 drivers/dma/ti/omap-dma.c rc = dma_async_device_register(&od->ddev); od 1589 drivers/dma/ti/omap-dma.c omap_dma_free(od); od 1593 drivers/dma/ti/omap-dma.c platform_set_drvdata(pdev, od); od 1596 drivers/dma/ti/omap-dma.c omap_dma_info.dma_cap = od->ddev.cap_mask; od 1603 drivers/dma/ti/omap-dma.c dma_async_device_unregister(&od->ddev); od 1604 drivers/dma/ti/omap-dma.c omap_dma_free(od); od 1609 drivers/dma/ti/omap-dma.c od->ll123_supported ? " (LinkedList1/2/3 supported)" : ""); od 1616 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = platform_get_drvdata(pdev); od 1623 drivers/dma/ti/omap-dma.c devm_free_irq(&pdev->dev, irq, od); od 1625 drivers/dma/ti/omap-dma.c dma_async_device_unregister(&od->ddev); od 1627 drivers/dma/ti/omap-dma.c if (!od->legacy) { od 1629 drivers/dma/ti/omap-dma.c omap_dma_glbl_write(od, IRQENABLE_L0, 0); od 1632 drivers/dma/ti/omap-dma.c if (od->ll123_supported) od 1633 drivers/dma/ti/omap-dma.c dma_pool_destroy(od->desc_pool); od 1635 drivers/dma/ti/omap-dma.c omap_dma_free(od); od 1662 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(chan->device); od 1666 drivers/dma/ti/omap-dma.c if (req <= od->dma_requests) { od 435 drivers/gpu/drm/meson/meson_vclk.c static inline unsigned int pll_od_to_reg(unsigned int od) od 437 drivers/gpu/drm/meson/meson_vclk.c switch (od) { od 662 drivers/gpu/drm/meson/meson_vclk.c unsigned int *od) od 665 drivers/gpu/drm/meson/meson_vclk.c for (*od = 16 ; *od > 1 ; *od >>= 1) { od 666 drivers/gpu/drm/meson/meson_vclk.c *m = meson_hdmi_pll_get_m(priv, freq * *od); od 669 drivers/gpu/drm/meson/meson_vclk.c *frac = meson_hdmi_pll_get_frac(priv, *m, freq * *od); od 672 drivers/gpu/drm/meson/meson_vclk.c freq, *m, *frac, *od); od 685 drivers/gpu/drm/meson/meson_vclk.c unsigned int od, m, frac; od 690 drivers/gpu/drm/meson/meson_vclk.c if (meson_hdmi_pll_find_params(priv, freq, &m, &frac, &od)) od 701 drivers/gpu/drm/meson/meson_vclk.c unsigned int od, m, frac, od1, od2, od3; od 703 drivers/gpu/drm/meson/meson_vclk.c if (meson_hdmi_pll_find_params(priv, pll_freq, &m, &frac, &od)) { od 705 drivers/gpu/drm/meson/meson_vclk.c if (od < 4) { od 709 drivers/gpu/drm/meson/meson_vclk.c od2 = od / 4; od 710 drivers/gpu/drm/meson/meson_vclk.c od1 = od / od2; od 506 drivers/hwmon/ibmpowernv.c static void populate_sensor(struct sensor_data *sdata, int od, int hd, int sid, od 519 drivers/hwmon/ibmpowernv.c sdata->opal_index = od; od 113 drivers/infiniband/hw/hfi1/opfn.c struct hfi1_opfn_data *od; od 116 drivers/infiniband/hw/hfi1/opfn.c od = container_of(work, struct hfi1_opfn_data, opfn_work); od 117 drivers/infiniband/hw/hfi1/opfn.c qpriv = container_of(od, struct hfi1_qp_priv, opfn); od 229 drivers/pinctrl/pinctrl-st.c struct regmap_field *alt, *oe, *pu, *od; od 244 drivers/pinctrl/pinctrl-st.c const int alt, oe, pu, od, rt; od 346 drivers/pinctrl/pinctrl-st.c .alt = 0, .oe = 40, .pu = 50, .od = 60, .rt = 100, od 358 drivers/pinctrl/pinctrl-st.c .od = 60, od 388 drivers/pinctrl/pinctrl-st.c struct regmap_field *open_drain = pc->od; od 591 drivers/pinctrl/pinctrl-st.c if (pc->od) { od 592 drivers/pinctrl/pinctrl-st.c regmap_field_read(pc->od, &od_value); od 1149 drivers/pinctrl/pinctrl-st.c pc->od = st_pc_get_value(dev, regmap, bank/4, data->od, lsb, msb); od 1550 drivers/pinctrl/tegra/pinctrl-tegra114.c #define PINGROUP(pg_name, f0, f1, f2, f3, r, od, ior, rcv_sel) \ od 1571 drivers/pinctrl/tegra/pinctrl-tegra114.c .odrain_bit = PINGROUP_BIT_##od(6), \ od 1719 drivers/pinctrl/tegra/pinctrl-tegra124.c #define PINGROUP(pg_name, f0, f1, f2, f3, r, od, ior, rcv_sel) \ od 1740 drivers/pinctrl/tegra/pinctrl-tegra124.c .odrain_bit = PINGROUP_BIT_##od(6), \ od 2111 drivers/pinctrl/tegra/pinctrl-tegra30.c #define PINGROUP(pg_name, f0, f1, f2, f3, r, od, ior) \ od 2132 drivers/pinctrl/tegra/pinctrl-tegra30.c .odrain_bit = PINGROUP_BIT_##od(6), \ od 74 drivers/scsi/libsas/sas_host_smp.c static u8 *to_sas_gpio_gp_bit(unsigned int od, u8 *data, u8 index, u8 count, u8 *bit) od 84 drivers/scsi/libsas/sas_host_smp.c if (od < index * 32) od 87 drivers/scsi/libsas/sas_host_smp.c od -= index * 32; od 88 drivers/scsi/libsas/sas_host_smp.c reg = od >> 5; od 93 drivers/scsi/libsas/sas_host_smp.c od &= (1 << 5) - 1; od 94 drivers/scsi/libsas/sas_host_smp.c byte = 3 - (od >> 3); od 95 drivers/scsi/libsas/sas_host_smp.c *bit = od & ((1 << 3) - 1); od 100 drivers/scsi/libsas/sas_host_smp.c int try_test_sas_gpio_gp_bit(unsigned int od, u8 *data, u8 index, u8 count) od 105 drivers/scsi/libsas/sas_host_smp.c byte = to_sas_gpio_gp_bit(od, data, index, count, &bit); od 782 drivers/usb/gadget/function/f_uvc.c struct uvc_output_terminal_descriptor *od; od 821 drivers/usb/gadget/function/f_uvc.c od = &opts->uvc_output_terminal; od 822 drivers/usb/gadget/function/f_uvc.c od->bLength = UVC_DT_OUTPUT_TERMINAL_SIZE; od 823 drivers/usb/gadget/function/f_uvc.c od->bDescriptorType = USB_DT_CS_INTERFACE; od 824 drivers/usb/gadget/function/f_uvc.c od->bDescriptorSubType = UVC_VC_OUTPUT_TERMINAL; od 825 drivers/usb/gadget/function/f_uvc.c od->bTerminalID = 3; od 826 drivers/usb/gadget/function/f_uvc.c od->wTerminalType = cpu_to_le16(0x0101); od 827 drivers/usb/gadget/function/f_uvc.c od->bAssocTerminal = 0; od 828 drivers/usb/gadget/function/f_uvc.c od->bSourceID = 2; od 829 drivers/usb/gadget/function/f_uvc.c od->iTerminal = 0; od 844 drivers/usb/gadget/function/f_uvc.c ctl_cls[3] = (struct uvc_descriptor_header *)od; od 854 drivers/usb/gadget/function/f_uvc.c ctl_cls[3] = (struct uvc_descriptor_header *)od; od 113 drivers/usb/serial/omninet.c struct omninet_data *od; od 115 drivers/usb/serial/omninet.c od = kzalloc(sizeof(*od), GFP_KERNEL); od 116 drivers/usb/serial/omninet.c if (!od) od 119 drivers/usb/serial/omninet.c usb_set_serial_port_data(port, od); od 126 drivers/usb/serial/omninet.c struct omninet_data *od; od 128 drivers/usb/serial/omninet.c od = usb_get_serial_port_data(port); od 129 drivers/usb/serial/omninet.c kfree(od); od 158 drivers/usb/serial/omninet.c struct omninet_data *od = usb_get_serial_port_data(port); od 166 drivers/usb/serial/omninet.c header->oh_seq = od->od_outseq++; od 2618 drivers/video/fbdev/core/fbcon.c struct fbcon_display *od = &fb_display[con]; od 2621 drivers/video/fbdev/core/fbcon.c if (od->fontdata == f->data) od 2623 drivers/video/fbdev/core/fbcon.c return fbcon_do_set_font(vc, f->width, f->height, od->fontdata, od->userfont); od 5914 fs/nfsd/nfs4state.c struct nfsd4_open_downgrade *od = &u->open_downgrade; od 5923 fs/nfsd/nfs4state.c if (od->od_deleg_want) od 5925 fs/nfsd/nfs4state.c od->od_deleg_want); od 5927 fs/nfsd/nfs4state.c status = nfs4_preprocess_confirmed_seqid_op(cstate, od->od_seqid, od 5928 fs/nfsd/nfs4state.c &od->od_stateid, &stp, nn); od 5932 fs/nfsd/nfs4state.c if (!test_access(od->od_share_access, stp)) { od 5934 fs/nfsd/nfs4state.c stp->st_access_bmap, od->od_share_access); od 5937 fs/nfsd/nfs4state.c if (!test_deny(od->od_share_deny, stp)) { od 5939 fs/nfsd/nfs4state.c stp->st_deny_bmap, od->od_share_deny); od 5942 fs/nfsd/nfs4state.c nfs4_stateid_downgrade(stp, od->od_share_access); od 5943 fs/nfsd/nfs4state.c reset_union_bmap_deny(od->od_share_deny, stp); od 5944 fs/nfsd/nfs4state.c nfs4_inc_and_copy_stateid(&od->od_stateid, &stp->st_stid); od 3449 fs/nfsd/nfs4xdr.c nfsd4_encode_open_downgrade(struct nfsd4_compoundres *resp, __be32 nfserr, struct nfsd4_open_downgrade *od) od 3453 fs/nfsd/nfs4xdr.c return nfsd4_encode_stateid(xdr, &od->od_stateid); od 871 fs/ocfs2/quota_local.c struct ocfs2_dquot *od = private; od 873 fs/ocfs2/quota_local.c struct super_block *sb = od->dq_dquot.dq_sb; od 876 fs/ocfs2/quota_local.c + ol_dqblk_block_offset(sb, od->dq_local_off)); od 879 fs/ocfs2/quota_local.c od->dq_dquot.dq_id)); od 880 fs/ocfs2/quota_local.c spin_lock(&od->dq_dquot.dq_dqb_lock); od 881 fs/ocfs2/quota_local.c dqblk->dqb_spacemod = cpu_to_le64(od->dq_dquot.dq_dqb.dqb_curspace - od 882 fs/ocfs2/quota_local.c od->dq_origspace); od 883 fs/ocfs2/quota_local.c dqblk->dqb_inodemod = cpu_to_le64(od->dq_dquot.dq_dqb.dqb_curinodes - od 884 fs/ocfs2/quota_local.c od->dq_originodes); od 885 fs/ocfs2/quota_local.c spin_unlock(&od->dq_dquot.dq_dqb_lock); od 889 fs/ocfs2/quota_local.c from_kqid(&init_user_ns, od->dq_dquot.dq_id)); od 896 fs/ocfs2/quota_local.c struct ocfs2_dquot *od = OCFS2_DQUOT(dquot); od 901 fs/ocfs2/quota_local.c status = ocfs2_read_quota_phys_block(lqinode, od->dq_local_phys_blk, od 907 fs/ocfs2/quota_local.c status = ocfs2_modify_bh(lqinode, bh, olq_set_dquot, od); od 1222 fs/ocfs2/quota_local.c struct ocfs2_dquot *od = OCFS2_DQUOT(dquot); od 1239 fs/ocfs2/quota_local.c od->dq_local_off = ol_dqblk_off(sb, chunk->qc_num, offset); od 1240 fs/ocfs2/quota_local.c od->dq_chunk = chunk; od 1243 fs/ocfs2/quota_local.c &od->dq_local_phys_blk, od 1274 fs/ocfs2/quota_local.c struct ocfs2_dquot *od = OCFS2_DQUOT(dquot); od 1281 fs/ocfs2/quota_local.c od->dq_chunk->qc_headerbh, OCFS2_JOURNAL_ACCESS_WRITE); od 1286 fs/ocfs2/quota_local.c offset = ol_dqblk_chunk_off(sb, od->dq_chunk->qc_num, od 1287 fs/ocfs2/quota_local.c od->dq_local_off); od 1289 fs/ocfs2/quota_local.c (od->dq_chunk->qc_headerbh->b_data); od 1291 fs/ocfs2/quota_local.c lock_buffer(od->dq_chunk->qc_headerbh); od 1294 fs/ocfs2/quota_local.c unlock_buffer(od->dq_chunk->qc_headerbh); od 1295 fs/ocfs2/quota_local.c ocfs2_journal_dirty(handle, od->dq_chunk->qc_headerbh); od 62 fs/orangefs/dir.c struct orangefs_dir *od, struct dentry *dentry, od 76 fs/orangefs/dir.c op->upcall.req.readdir.token = od->token; od 83 fs/orangefs/dir.c od->error = bufi; od 100 fs/orangefs/dir.c od->error = r; od 107 fs/orangefs/dir.c od->error = r; od 111 fs/orangefs/dir.c od->error = op->downcall.status; od 121 fs/orangefs/dir.c od->error = -EIO; od 127 fs/orangefs/dir.c od->token = resp->token; od 131 fs/orangefs/dir.c static int parse_readdir(struct orangefs_dir *od, od 138 fs/orangefs/dir.c part = od->part; od 151 fs/orangefs/dir.c if (!od->part) od 152 fs/orangefs/dir.c od->part = new; od 156 fs/orangefs/dir.c od->end = count << PART_SHIFT; od 162 fs/orangefs/dir.c struct orangefs_dir *od, struct dentry *dentry) od 169 fs/orangefs/dir.c od->error = -ENOMEM; od 172 fs/orangefs/dir.c r = do_readdir(oi, od, dentry, op); od 174 fs/orangefs/dir.c od->error = r; od 177 fs/orangefs/dir.c r = parse_readdir(od, op); od 179 fs/orangefs/dir.c od->error = r; od 183 fs/orangefs/dir.c od->error = 0; od 186 fs/orangefs/dir.c return od->error; od 242 fs/orangefs/dir.c struct orangefs_dir *od, struct dentry *dentry, od 250 fs/orangefs/dir.c part = od->part; od 257 fs/orangefs/dir.c od->error = -EIO; od 265 fs/orangefs/dir.c od->error = r; od 285 fs/orangefs/dir.c struct orangefs_dir *od = file->private_data; od 290 fs/orangefs/dir.c if (!whence && offset < od->end) { od 291 fs/orangefs/dir.c struct orangefs_dir_part *part = od->part; od 297 fs/orangefs/dir.c od->token = ORANGEFS_ITERATE_START; od 298 fs/orangefs/dir.c od->part = NULL; od 299 fs/orangefs/dir.c od->end = 1 << PART_SHIFT; od 308 fs/orangefs/dir.c struct orangefs_dir *od; od 314 fs/orangefs/dir.c od = file->private_data; od 316 fs/orangefs/dir.c if (od->error) od 317 fs/orangefs/dir.c return od->error; od 343 fs/orangefs/dir.c while (od->token != ORANGEFS_ITERATE_END && od 344 fs/orangefs/dir.c ctx->pos > od->end) { od 345 fs/orangefs/dir.c r = orangefs_dir_more(oi, od, dentry); od 349 fs/orangefs/dir.c if (od->token == ORANGEFS_ITERATE_END && ctx->pos > od->end) od 353 fs/orangefs/dir.c if (ctx->pos < od->end) { od 354 fs/orangefs/dir.c r = orangefs_dir_fill(oi, od, dentry, ctx); od 360 fs/orangefs/dir.c if (od->token != ORANGEFS_ITERATE_END) { od 361 fs/orangefs/dir.c r = orangefs_dir_more(oi, od, dentry); od 364 fs/orangefs/dir.c r = orangefs_dir_fill(oi, od, dentry, ctx); od 372 fs/orangefs/dir.c struct orangefs_dir *od; od 377 fs/orangefs/dir.c od = file->private_data; od 378 fs/orangefs/dir.c od->token = ORANGEFS_ITERATE_START; od 379 fs/orangefs/dir.c od->part = NULL; od 380 fs/orangefs/dir.c od->end = 1 << PART_SHIFT; od 381 fs/orangefs/dir.c od->error = 0; od 387 fs/orangefs/dir.c struct orangefs_dir *od = file->private_data; od 388 fs/orangefs/dir.c struct orangefs_dir_part *part = od->part; od 394 fs/orangefs/dir.c kfree(od); od 238 fs/overlayfs/readdir.c static void ovl_cache_put(struct ovl_dir_file *od, struct dentry *dentry) od 240 fs/overlayfs/readdir.c struct ovl_dir_cache *cache = od->cache; od 336 fs/overlayfs/readdir.c struct ovl_dir_file *od = file->private_data; od 337 fs/overlayfs/readdir.c struct ovl_dir_cache *cache = od->cache; od 342 fs/overlayfs/readdir.c ovl_cache_put(od, dentry); od 343 fs/overlayfs/readdir.c od->cache = NULL; od 344 fs/overlayfs/readdir.c od->cursor = NULL; od 347 fs/overlayfs/readdir.c if (od->is_real != is_real) { od 351 fs/overlayfs/readdir.c od->is_real = false; od 391 fs/overlayfs/readdir.c static void ovl_seek_cursor(struct ovl_dir_file *od, loff_t pos) od 396 fs/overlayfs/readdir.c list_for_each(p, &od->cache->entries) { od 402 fs/overlayfs/readdir.c od->cursor = p; od 676 fs/overlayfs/readdir.c struct ovl_dir_file *od = file->private_data; od 685 fs/overlayfs/readdir.c return od->is_upper && ovl_test_flag(OVL_IMPURE, dir); od 692 fs/overlayfs/readdir.c struct ovl_dir_file *od = file->private_data; od 723 fs/overlayfs/readdir.c err = iterate_dir(od->realfile, &rdt.ctx); od 732 fs/overlayfs/readdir.c struct ovl_dir_file *od = file->private_data; od 740 fs/overlayfs/readdir.c if (od->is_real) { od 752 fs/overlayfs/readdir.c return iterate_dir(od->realfile, ctx); od 755 fs/overlayfs/readdir.c if (!od->cache) { od 762 fs/overlayfs/readdir.c od->cache = cache; od 763 fs/overlayfs/readdir.c ovl_seek_cursor(od, ctx->pos); od 766 fs/overlayfs/readdir.c while (od->cursor != &od->cache->entries) { od 767 fs/overlayfs/readdir.c p = list_entry(od->cursor, struct ovl_cache_entry, l_node); od 777 fs/overlayfs/readdir.c od->cursor = p->l_node.next; od 786 fs/overlayfs/readdir.c struct ovl_dir_file *od = file->private_data; od 792 fs/overlayfs/readdir.c if (od->is_real) { od 793 fs/overlayfs/readdir.c res = vfs_llseek(od->realfile, offset, origin); od 794 fs/overlayfs/readdir.c file->f_pos = od->realfile->f_pos; od 812 fs/overlayfs/readdir.c if (od->cache) od 813 fs/overlayfs/readdir.c ovl_seek_cursor(od, offset); od 826 fs/overlayfs/readdir.c struct ovl_dir_file *od = file->private_data; od 828 fs/overlayfs/readdir.c struct file *realfile = od->realfile; od 837 fs/overlayfs/readdir.c if (!od->is_upper) { od 840 fs/overlayfs/readdir.c realfile = READ_ONCE(od->upperfile); od 848 fs/overlayfs/readdir.c if (!od->upperfile) { od 853 fs/overlayfs/readdir.c smp_store_release(&od->upperfile, realfile); od 858 fs/overlayfs/readdir.c realfile = od->upperfile; od 869 fs/overlayfs/readdir.c struct ovl_dir_file *od = file->private_data; od 871 fs/overlayfs/readdir.c if (od->cache) { od 873 fs/overlayfs/readdir.c ovl_cache_put(od, file->f_path.dentry); od 876 fs/overlayfs/readdir.c fput(od->realfile); od 877 fs/overlayfs/readdir.c if (od->upperfile) od 878 fs/overlayfs/readdir.c fput(od->upperfile); od 879 fs/overlayfs/readdir.c kfree(od); od 888 fs/overlayfs/readdir.c struct ovl_dir_file *od; od 891 fs/overlayfs/readdir.c od = kzalloc(sizeof(struct ovl_dir_file), GFP_KERNEL); od 892 fs/overlayfs/readdir.c if (!od) od 898 fs/overlayfs/readdir.c kfree(od); od 901 fs/overlayfs/readdir.c od->realfile = realfile; od 902 fs/overlayfs/readdir.c od->is_real = ovl_dir_is_real(file->f_path.dentry); od 903 fs/overlayfs/readdir.c od->is_upper = OVL_TYPE_UPPER(type); od 904 fs/overlayfs/readdir.c file->private_data = od; od 453 include/scsi/libsas.h int try_test_sas_gpio_gp_bit(unsigned int od, u8 *data, u8 index, u8 count); od 455 include/scsi/libsas.h static inline int try_test_sas_gpio_gp_bit(unsigned int od, u8 *data, u8 index, u8 count) od 711 net/tipc/bearer.c struct packet_type *pt, struct net_device *od) od 279 sound/usb/usx2y/usb_stream.c struct usb_iso_packet_descriptor *id, *od; od 283 sound/usb/usx2y/usb_stream.c od = io->iso_frame_desc; od 291 sound/usb/usx2y/usb_stream.c od[p].length = l; od 292 sound/usb/usx2y/usb_stream.c od[p].offset = lb; od 304 sound/usb/usx2y/usb_stream.c od[p].length = l; od 305 sound/usb/usx2y/usb_stream.c od[p].offset = lb;