rcdu 35 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 37 drivers/gpu/drm/rcar-du/rcar_du_crtc.c return rcar_du_read(rcdu, rcrtc->mmio_offset + reg); rcdu 42 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 44 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcar_du_write(rcdu, rcrtc->mmio_offset + reg, data); rcdu 49 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 51 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcar_du_write(rcdu, rcrtc->mmio_offset + reg, rcdu 52 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcar_du_read(rcdu, rcrtc->mmio_offset + reg) & ~clr); rcdu 57 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 59 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcar_du_write(rcdu, rcrtc->mmio_offset + reg, rcdu 60 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcar_du_read(rcdu, rcrtc->mmio_offset + reg) | set); rcdu 65 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 68 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcar_du_write(rcdu, rcrtc->mmio_offset + DSYSR, rcrtc->dsysr); rcdu 214 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 219 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (rcdu->info->dpll_mask & (1 << rcrtc->index)) { rcdu 262 drivers/gpu/drm/rcar-du/rcar_du_crtc.c } else if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index)) { rcdu 335 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 381 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE)) { rcdu 382 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (rcdu->info->gen < 3) { rcdu 465 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 472 drivers/gpu/drm/rcar-du/rcar_du_crtc.c dev_warn(rcdu->dev, "page flip timeout\n"); rcdu 566 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 588 drivers/gpu/drm/rcar-du/rcar_du_crtc.c dev_warn(rcdu->dev, "vertical blanking timeout\n"); rcdu 668 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 677 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index) && rcdu 680 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcdu->encoders[RCAR_DU_OUTPUT_LVDS0 + rcrtc->index]; rcdu 696 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 701 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index) && rcdu 704 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcdu->encoders[RCAR_DU_OUTPUT_LVDS0 + rcrtc->index]; rcdu 773 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 777 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (interlaced && !rcar_du_has(rcdu, RCAR_DU_FEATURE_INTERLACED)) rcdu 805 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 811 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (rcdu->info->gen < 3) rcdu 1087 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rcrtc->dev; rcdu 1111 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (rcdu->info->gen < 3) { rcdu 1133 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_device *rcdu = rgrp->dev; rcdu 1134 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct platform_device *pdev = to_platform_device(rcdu->dev); rcdu 1135 drivers/gpu/drm/rcar-du/rcar_du_crtc.c struct rcar_du_crtc *rcrtc = &rcdu->crtcs[swindex]; rcdu 1146 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (rcar_du_has(rcdu, RCAR_DU_FEATURE_CRTC_IRQ_CLOCK)) { rcdu 1153 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcrtc->clock = devm_clk_get(rcdu->dev, name); rcdu 1155 drivers/gpu/drm/rcar-du/rcar_du_crtc.c dev_err(rcdu->dev, "no clock for DU channel %u\n", hwindex); rcdu 1160 drivers/gpu/drm/rcar-du/rcar_du_crtc.c clk = devm_clk_get(rcdu->dev, clk_name); rcdu 1165 drivers/gpu/drm/rcar-du/rcar_du_crtc.c } else if (rcdu->info->dpll_mask & BIT(hwindex)) { rcdu 1171 drivers/gpu/drm/rcar-du/rcar_du_crtc.c dev_err(rcdu->dev, "can't get dclkin.%u: %d\n", hwindex, ret); rcdu 1179 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcrtc->dev = rcdu; rcdu 1185 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE)) rcdu 1190 drivers/gpu/drm/rcar-du/rcar_du_crtc.c ret = drm_crtc_init_with_planes(rcdu->ddev, crtc, primary, NULL, rcdu 1191 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcdu->info->gen <= 2 ? rcdu 1203 drivers/gpu/drm/rcar-du/rcar_du_crtc.c if (rcar_du_has(rcdu, RCAR_DU_FEATURE_CRTC_IRQ_CLOCK)) { rcdu 1213 drivers/gpu/drm/rcar-du/rcar_du_crtc.c dev_err(rcdu->dev, "no IRQ for CRTC %u\n", swindex); rcdu 1217 drivers/gpu/drm/rcar-du/rcar_du_crtc.c ret = devm_request_irq(rcdu->dev, irq, rcar_du_crtc_irq, irqflags, rcdu 1218 drivers/gpu/drm/rcar-du/rcar_du_crtc.c dev_name(rcdu->dev), rcrtc); rcdu 1220 drivers/gpu/drm/rcar-du/rcar_du_crtc.c dev_err(rcdu->dev, rcdu 470 drivers/gpu/drm/rcar-du/rcar_du_drv.c struct rcar_du_device *rcdu = dev_get_drvdata(dev); rcdu 472 drivers/gpu/drm/rcar-du/rcar_du_drv.c return drm_mode_config_helper_suspend(rcdu->ddev); rcdu 477 drivers/gpu/drm/rcar-du/rcar_du_drv.c struct rcar_du_device *rcdu = dev_get_drvdata(dev); rcdu 479 drivers/gpu/drm/rcar-du/rcar_du_drv.c return drm_mode_config_helper_resume(rcdu->ddev); rcdu 493 drivers/gpu/drm/rcar-du/rcar_du_drv.c struct rcar_du_device *rcdu = platform_get_drvdata(pdev); rcdu 494 drivers/gpu/drm/rcar-du/rcar_du_drv.c struct drm_device *ddev = rcdu->ddev; rcdu 508 drivers/gpu/drm/rcar-du/rcar_du_drv.c struct rcar_du_device *rcdu; rcdu 514 drivers/gpu/drm/rcar-du/rcar_du_drv.c rcdu = devm_kzalloc(&pdev->dev, sizeof(*rcdu), GFP_KERNEL); rcdu 515 drivers/gpu/drm/rcar-du/rcar_du_drv.c if (rcdu == NULL) rcdu 518 drivers/gpu/drm/rcar-du/rcar_du_drv.c rcdu->dev = &pdev->dev; rcdu 519 drivers/gpu/drm/rcar-du/rcar_du_drv.c rcdu->info = of_device_get_match_data(rcdu->dev); rcdu 521 drivers/gpu/drm/rcar-du/rcar_du_drv.c platform_set_drvdata(pdev, rcdu); rcdu 525 drivers/gpu/drm/rcar-du/rcar_du_drv.c rcdu->mmio = devm_ioremap_resource(&pdev->dev, mem); rcdu 526 drivers/gpu/drm/rcar-du/rcar_du_drv.c if (IS_ERR(rcdu->mmio)) rcdu 527 drivers/gpu/drm/rcar-du/rcar_du_drv.c return PTR_ERR(rcdu->mmio); rcdu 534 drivers/gpu/drm/rcar-du/rcar_du_drv.c rcdu->ddev = ddev; rcdu 535 drivers/gpu/drm/rcar-du/rcar_du_drv.c ddev->dev_private = rcdu; rcdu 537 drivers/gpu/drm/rcar-du/rcar_du_drv.c ret = rcar_du_modeset_init(rcdu); rcdu 99 drivers/gpu/drm/rcar-du/rcar_du_drv.h static inline bool rcar_du_has(struct rcar_du_device *rcdu, rcdu 102 drivers/gpu/drm/rcar-du/rcar_du_drv.h return rcdu->info->features & feature; rcdu 105 drivers/gpu/drm/rcar-du/rcar_du_drv.h static inline bool rcar_du_needs(struct rcar_du_device *rcdu, rcdu 108 drivers/gpu/drm/rcar-du/rcar_du_drv.h return rcdu->info->quirks & quirk; rcdu 111 drivers/gpu/drm/rcar-du/rcar_du_drv.h static inline u32 rcar_du_read(struct rcar_du_device *rcdu, u32 reg) rcdu 113 drivers/gpu/drm/rcar-du/rcar_du_drv.h return ioread32(rcdu->mmio + reg); rcdu 116 drivers/gpu/drm/rcar-du/rcar_du_drv.h static inline void rcar_du_write(struct rcar_du_device *rcdu, u32 reg, u32 data) rcdu 118 drivers/gpu/drm/rcar-du/rcar_du_drv.h iowrite32(data, rcdu->mmio + reg); rcdu 52 drivers/gpu/drm/rcar-du/rcar_du_encoder.c int rcar_du_encoder_init(struct rcar_du_device *rcdu, rcdu 61 drivers/gpu/drm/rcar-du/rcar_du_encoder.c renc = devm_kzalloc(rcdu->dev, sizeof(*renc), GFP_KERNEL); rcdu 65 drivers/gpu/drm/rcar-du/rcar_du_encoder.c rcdu->encoders[output] = renc; rcdu 69 drivers/gpu/drm/rcar-du/rcar_du_encoder.c dev_dbg(rcdu->dev, "initializing encoder %pOF for output %u\n", rcdu 87 drivers/gpu/drm/rcar-du/rcar_du_encoder.c bridge = devm_drm_panel_bridge_add(rcdu->dev, panel, rcdu 105 drivers/gpu/drm/rcar-du/rcar_du_encoder.c if (rcdu->info->gen >= 3 && output == RCAR_DU_OUTPUT_LVDS1) { rcdu 112 drivers/gpu/drm/rcar-du/rcar_du_encoder.c ret = drm_encoder_init(rcdu->ddev, encoder, &encoder_funcs, rcdu 133 drivers/gpu/drm/rcar-du/rcar_du_encoder.c devm_kfree(rcdu->dev, renc); rcdu 27 drivers/gpu/drm/rcar-du/rcar_du_encoder.h int rcar_du_encoder_init(struct rcar_du_device *rcdu, rcdu 58 drivers/gpu/drm/rcar-du/rcar_du_group.c struct rcar_du_device *rcdu = rgrp->dev; rcdu 61 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rcdu->info->gen < 3) { rcdu 70 drivers/gpu/drm/rcar-du/rcar_du_group.c defr8 |= DEFR8_DRGBS_DU(rcdu->dpad0_source); rcdu 80 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rgrp->index == rcdu->dpad0_source / 2) rcdu 81 drivers/gpu/drm/rcar-du/rcar_du_group.c defr8 |= DEFR8_DRGBS_DU(rcdu->dpad0_source); rcdu 89 drivers/gpu/drm/rcar-du/rcar_du_group.c struct rcar_du_device *rcdu = rgrp->dev; rcdu 103 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rcdu->info->gen < 3 && rgrp->index == 0) { rcdu 108 drivers/gpu/drm/rcar-du/rcar_du_group.c rcrtc = rcdu->crtcs; rcdu 109 drivers/gpu/drm/rcar-du/rcar_du_group.c num_crtcs = rcdu->num_crtcs; rcdu 110 drivers/gpu/drm/rcar-du/rcar_du_group.c } else if (rcdu->info->gen == 3 && rgrp->num_crtcs > 1) { rcdu 115 drivers/gpu/drm/rcar-du/rcar_du_group.c rcrtc = &rcdu->crtcs[rgrp->index * 2]; rcdu 124 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index)) rcdu 137 drivers/gpu/drm/rcar-du/rcar_du_group.c struct rcar_du_device *rcdu = rgrp->dev; rcdu 141 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rcdu->info->gen < 3) { rcdu 150 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rcdu->info->gen >= 2) { rcdu 155 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rcdu->info->gen >= 3) rcdu 205 drivers/gpu/drm/rcar-du/rcar_du_group.c struct rcar_du_device *rcdu = rgrp->dev; rcdu 215 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rcdu->info->channels_mask & BIT(rgrp->index * 2)) { rcdu 258 drivers/gpu/drm/rcar-du/rcar_du_group.c int rcar_du_set_dpad0_vsp1_routing(struct rcar_du_device *rcdu) rcdu 265 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rcdu->info->gen < 2) rcdu 275 drivers/gpu/drm/rcar-du/rcar_du_group.c index = rcdu->info->gen < 3 ? 0 : DIV_ROUND_UP(rcdu->num_crtcs, 2) - 1; rcdu 276 drivers/gpu/drm/rcar-du/rcar_du_group.c rgrp = &rcdu->groups[index]; rcdu 277 drivers/gpu/drm/rcar-du/rcar_du_group.c crtc = &rcdu->crtcs[index * 2]; rcdu 300 drivers/gpu/drm/rcar-du/rcar_du_group.c struct rcar_du_device *rcdu = rgrp->dev; rcdu 304 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rcdu->info->gen < 2) rcdu 321 drivers/gpu/drm/rcar-du/rcar_du_group.c rcrtc = &rcdu->crtcs[rgrp->index * 2 + i]; rcdu 333 drivers/gpu/drm/rcar-du/rcar_du_group.c struct rcar_du_device *rcdu = rgrp->dev; rcdu 343 drivers/gpu/drm/rcar-du/rcar_du_group.c if (rcdu->dpad1_source == rgrp->index * 2) rcdu 61 drivers/gpu/drm/rcar-du/rcar_du_group.h int rcar_du_set_dpad0_vsp1_routing(struct rcar_du_device *rcdu); rcdu 288 drivers/gpu/drm/rcar-du/rcar_du_kms.c struct rcar_du_device *rcdu = dev->dev_private; rcdu 296 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (rcar_du_needs(rcdu, RCAR_DU_QUIRK_ALIGN_128B)) rcdu 310 drivers/gpu/drm/rcar-du/rcar_du_kms.c struct rcar_du_device *rcdu = dev->dev_private; rcdu 323 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (rcdu->info->gen < 3) { rcdu 333 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (rcar_du_needs(rcdu, RCAR_DU_QUIRK_ALIGN_128B)) rcdu 372 drivers/gpu/drm/rcar-du/rcar_du_kms.c struct rcar_du_device *rcdu = dev->dev_private; rcdu 379 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE)) rcdu 388 drivers/gpu/drm/rcar-du/rcar_du_kms.c struct rcar_du_device *rcdu = dev->dev_private; rcdu 397 drivers/gpu/drm/rcar-du/rcar_du_kms.c rcdu->dpad1_source = -1; rcdu 405 drivers/gpu/drm/rcar-du/rcar_du_kms.c rcdu->dpad0_source = rcrtc->index; rcdu 408 drivers/gpu/drm/rcar-du/rcar_du_kms.c rcdu->dpad1_source = rcrtc->index; rcdu 437 drivers/gpu/drm/rcar-du/rcar_du_kms.c static int rcar_du_encoders_init_one(struct rcar_du_device *rcdu, rcdu 447 drivers/gpu/drm/rcar-du/rcar_du_kms.c dev_dbg(rcdu->dev, "unconnected endpoint %pOF, skipping\n", rcdu 453 drivers/gpu/drm/rcar-du/rcar_du_kms.c dev_dbg(rcdu->dev, rcdu 460 drivers/gpu/drm/rcar-du/rcar_du_kms.c ret = rcar_du_encoder_init(rcdu, output, entity); rcdu 462 drivers/gpu/drm/rcar-du/rcar_du_kms.c dev_warn(rcdu->dev, rcdu 471 drivers/gpu/drm/rcar-du/rcar_du_kms.c static int rcar_du_encoders_init(struct rcar_du_device *rcdu) rcdu 473 drivers/gpu/drm/rcar-du/rcar_du_kms.c struct device_node *np = rcdu->dev->of_node; rcdu 495 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (rcdu->info->routes[i].possible_crtcs && rcdu 496 drivers/gpu/drm/rcar-du/rcar_du_kms.c rcdu->info->routes[i].port == ep.port) { rcdu 503 drivers/gpu/drm/rcar-du/rcar_du_kms.c dev_warn(rcdu->dev, rcdu 510 drivers/gpu/drm/rcar-du/rcar_du_kms.c ret = rcar_du_encoders_init_one(rcdu, output, &ep); rcdu 526 drivers/gpu/drm/rcar-du/rcar_du_kms.c static int rcar_du_properties_init(struct rcar_du_device *rcdu) rcdu 533 drivers/gpu/drm/rcar-du/rcar_du_kms.c rcdu->props.colorkey = rcdu 534 drivers/gpu/drm/rcar-du/rcar_du_kms.c drm_property_create_range(rcdu->ddev, 0, "colorkey", rcdu 536 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (rcdu->props.colorkey == NULL) rcdu 542 drivers/gpu/drm/rcar-du/rcar_du_kms.c static int rcar_du_vsps_init(struct rcar_du_device *rcdu) rcdu 544 drivers/gpu/drm/rcar-du/rcar_du_kms.c const struct device_node *np = rcdu->dev->of_node; rcdu 560 drivers/gpu/drm/rcar-du/rcar_du_kms.c cells = of_property_count_u32_elems(np, "vsps") / rcdu->num_crtcs - 1; rcdu 564 drivers/gpu/drm/rcar-du/rcar_du_kms.c for (i = 0; i < rcdu->num_crtcs; ++i) { rcdu 589 drivers/gpu/drm/rcar-du/rcar_du_kms.c rcdu->crtcs[i].vsp = &rcdu->vsps[j]; rcdu 590 drivers/gpu/drm/rcar-du/rcar_du_kms.c rcdu->crtcs[i].vsp_pipe = cells >= 1 ? args.args[0] : 0; rcdu 598 drivers/gpu/drm/rcar-du/rcar_du_kms.c struct rcar_du_vsp *vsp = &rcdu->vsps[i]; rcdu 601 drivers/gpu/drm/rcar-du/rcar_du_kms.c vsp->dev = rcdu; rcdu 617 drivers/gpu/drm/rcar-du/rcar_du_kms.c int rcar_du_modeset_init(struct rcar_du_device *rcdu) rcdu 623 drivers/gpu/drm/rcar-du/rcar_du_kms.c struct drm_device *dev = rcdu->ddev; rcdu 641 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (rcdu->info->gen < 3) { rcdu 653 drivers/gpu/drm/rcar-du/rcar_du_kms.c rcdu->num_crtcs = hweight8(rcdu->info->channels_mask); rcdu 655 drivers/gpu/drm/rcar-du/rcar_du_kms.c ret = rcar_du_properties_init(rcdu); rcdu 663 drivers/gpu/drm/rcar-du/rcar_du_kms.c ret = drm_vblank_init(dev, rcdu->num_crtcs); rcdu 668 drivers/gpu/drm/rcar-du/rcar_du_kms.c num_groups = DIV_ROUND_UP(rcdu->num_crtcs, 2); rcdu 671 drivers/gpu/drm/rcar-du/rcar_du_kms.c struct rcar_du_group *rgrp = &rcdu->groups[i]; rcdu 675 drivers/gpu/drm/rcar-du/rcar_du_kms.c rgrp->dev = rcdu; rcdu 679 drivers/gpu/drm/rcar-du/rcar_du_kms.c rgrp->channels_mask = (rcdu->info->channels_mask >> (2 * i)) rcdu 690 drivers/gpu/drm/rcar-du/rcar_du_kms.c ? (rcdu->info->gen >= 3 ? 0x04 : 0xf0) rcdu 693 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (!rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE)) { rcdu 701 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE)) { rcdu 702 drivers/gpu/drm/rcar-du/rcar_du_kms.c ret = rcar_du_vsps_init(rcdu); rcdu 708 drivers/gpu/drm/rcar-du/rcar_du_kms.c for (swindex = 0, hwindex = 0; swindex < rcdu->num_crtcs; ++hwindex) { rcdu 712 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (!(rcdu->info->channels_mask & BIT(hwindex))) rcdu 715 drivers/gpu/drm/rcar-du/rcar_du_kms.c rgrp = &rcdu->groups[hwindex / 2]; rcdu 723 drivers/gpu/drm/rcar-du/rcar_du_kms.c ret = rcar_du_encoders_init(rcdu); rcdu 728 drivers/gpu/drm/rcar-du/rcar_du_kms.c dev_err(rcdu->dev, "error: no encoder could be initialized\n"); rcdu 742 drivers/gpu/drm/rcar-du/rcar_du_kms.c &rcdu->info->routes[renc->output]; rcdu 749 drivers/gpu/drm/rcar-du/rcar_du_kms.c if (rcdu->info->gen >= 3) { rcdu 750 drivers/gpu/drm/rcar-du/rcar_du_kms.c for (i = 0; i < rcdu->num_crtcs; ++i) { rcdu 751 drivers/gpu/drm/rcar-du/rcar_du_kms.c struct rcar_du_crtc *rcrtc = &rcdu->crtcs[i]; rcdu 753 drivers/gpu/drm/rcar-du/rcar_du_kms.c ret = rcar_du_writeback_init(rcdu, rcrtc); rcdu 767 drivers/gpu/drm/rcar-du/rcar_du_kms.c dpad0_sources = rcdu->info->routes[RCAR_DU_OUTPUT_DPAD0].possible_crtcs; rcdu 768 drivers/gpu/drm/rcar-du/rcar_du_kms.c rcdu->dpad0_source = ffs(dpad0_sources) - 1; rcdu 31 drivers/gpu/drm/rcar-du/rcar_du_kms.h int rcar_du_modeset_init(struct rcar_du_device *rcdu); rcdu 131 drivers/gpu/drm/rcar-du/rcar_du_plane.c struct rcar_du_device *rcdu = dev->dev_private; rcdu 153 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, "%s: checking plane (%u,%tu)\n", __func__, rcdu 162 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, "%s: plane is being disabled\n", rcdu 175 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, "%s: plane needs reallocation\n", rcdu 200 drivers/gpu/drm/rcar-du/rcar_du_plane.c struct rcar_du_group *group = &rcdu->groups[index]; rcdu 203 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, "%s: finding free planes for group %u\n", rcdu 224 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, rcdu 234 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, rcdu 246 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, "%s: group %u free planes mask 0x%02x\n", rcdu 264 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, "%s: allocating plane (%u,%tu)\n", __func__, rcdu 292 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, "%s: no available hardware plane\n", rcdu 297 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, "%s: allocated %u hwplanes (index %u)\n", rcdu 305 drivers/gpu/drm/rcar-du/rcar_du_plane.c dev_dbg(rcdu->dev, "%s: group %u free planes mask 0x%02x\n", rcdu 519 drivers/gpu/drm/rcar-du/rcar_du_plane.c struct rcar_du_device *rcdu = rgrp->dev; rcdu 522 drivers/gpu/drm/rcar-du/rcar_du_plane.c if (rcdu->info->gen < 3) rcdu 533 drivers/gpu/drm/rcar-du/rcar_du_plane.c if (rcdu->info->gen < 3) { rcdu 545 drivers/gpu/drm/rcar-du/rcar_du_plane.c struct rcar_du_device *rcdu = rgrp->dev; rcdu 552 drivers/gpu/drm/rcar-du/rcar_du_plane.c if (rcdu->info->gen < 3) rcdu 558 drivers/gpu/drm/rcar-du/rcar_du_plane.c if (rcdu->vspd1_sink != vspd1_sink) { rcdu 559 drivers/gpu/drm/rcar-du/rcar_du_plane.c rcdu->vspd1_sink = vspd1_sink; rcdu 560 drivers/gpu/drm/rcar-du/rcar_du_plane.c rcar_du_set_dpad0_vsp1_routing(rcdu); rcdu 703 drivers/gpu/drm/rcar-du/rcar_du_plane.c struct rcar_du_device *rcdu = to_rcar_plane(plane)->group->dev; rcdu 705 drivers/gpu/drm/rcar-du/rcar_du_plane.c if (property == rcdu->props.colorkey) rcdu 719 drivers/gpu/drm/rcar-du/rcar_du_plane.c struct rcar_du_device *rcdu = to_rcar_plane(plane)->group->dev; rcdu 721 drivers/gpu/drm/rcar-du/rcar_du_plane.c if (property == rcdu->props.colorkey) rcdu 755 drivers/gpu/drm/rcar-du/rcar_du_plane.c struct rcar_du_device *rcdu = rgrp->dev; rcdu 766 drivers/gpu/drm/rcar-du/rcar_du_plane.c crtcs = ((1 << rcdu->num_crtcs) - 1) & (3 << (2 * rgrp->index)); rcdu 776 drivers/gpu/drm/rcar-du/rcar_du_plane.c ret = drm_universal_plane_init(rcdu->ddev, &plane->plane, crtcs, rcdu 792 drivers/gpu/drm/rcar-du/rcar_du_plane.c rcdu->props.colorkey, rcdu 50 drivers/gpu/drm/rcar-du/rcar_du_vsp.c struct rcar_du_device *rcdu = crtc->dev; rcdu 77 drivers/gpu/drm/rcar-du/rcar_du_vsp.c if (rcdu->info->gen >= 3) rcdu 186 drivers/gpu/drm/rcar-du/rcar_du_vsp.c struct rcar_du_device *rcdu = vsp->dev; rcdu 194 drivers/gpu/drm/rcar-du/rcar_du_vsp.c ret = dma_get_sgtable(rcdu->dev, sgt, gem->vaddr, gem->paddr, rcdu 347 drivers/gpu/drm/rcar-du/rcar_du_vsp.c struct rcar_du_device *rcdu = vsp->dev; rcdu 368 drivers/gpu/drm/rcar-du/rcar_du_vsp.c vsp->num_planes = rcdu->info->gen >= 3 ? 5 : 4; rcdu 370 drivers/gpu/drm/rcar-du/rcar_du_vsp.c vsp->planes = devm_kcalloc(rcdu->dev, vsp->num_planes, rcdu 384 drivers/gpu/drm/rcar-du/rcar_du_vsp.c ret = drm_universal_plane_init(rcdu->ddev, &plane->plane, crtcs, rcdu 198 drivers/gpu/drm/rcar-du/rcar_du_writeback.c int rcar_du_writeback_init(struct rcar_du_device *rcdu, rcdu 207 drivers/gpu/drm/rcar-du/rcar_du_writeback.c return drm_writeback_connector_init(rcdu->ddev, wb_conn, rcdu 18 drivers/gpu/drm/rcar-du/rcar_du_writeback.h int rcar_du_writeback_init(struct rcar_du_device *rcdu, rcdu 24 drivers/gpu/drm/rcar-du/rcar_du_writeback.h static inline int rcar_du_writeback_init(struct rcar_du_device *rcdu,