Lines Matching refs:par

281 static int vmlfb_get_gpu(struct vml_par *par)  in vmlfb_get_gpu()  argument
285 par->gpu = pci_get_device(PCI_VENDOR_ID_INTEL, VML_DEVICE_GPU, NULL); in vmlfb_get_gpu()
287 if (!par->gpu) { in vmlfb_get_gpu()
294 if (pci_enable_device(par->gpu) < 0) in vmlfb_get_gpu()
323 static int vmlfb_enable_mmio(struct vml_par *par) in vmlfb_enable_mmio() argument
327 par->vdc_mem_base = pci_resource_start(par->vdc, 0); in vmlfb_enable_mmio()
328 par->vdc_mem_size = pci_resource_len(par->vdc, 0); in vmlfb_enable_mmio()
329 if (!request_mem_region(par->vdc_mem_base, par->vdc_mem_size, "vmlfb")) { in vmlfb_enable_mmio()
334 par->vdc_mem = ioremap_nocache(par->vdc_mem_base, par->vdc_mem_size); in vmlfb_enable_mmio()
335 if (par->vdc_mem == NULL) { in vmlfb_enable_mmio()
342 par->gpu_mem_base = pci_resource_start(par->gpu, 0); in vmlfb_enable_mmio()
343 par->gpu_mem_size = pci_resource_len(par->gpu, 0); in vmlfb_enable_mmio()
344 if (!request_mem_region(par->gpu_mem_base, par->gpu_mem_size, "vmlfb")) { in vmlfb_enable_mmio()
349 par->gpu_mem = ioremap_nocache(par->gpu_mem_base, par->gpu_mem_size); in vmlfb_enable_mmio()
350 if (par->gpu_mem == NULL) { in vmlfb_enable_mmio()
359 release_mem_region(par->gpu_mem_base, par->gpu_mem_size); in vmlfb_enable_mmio()
361 iounmap(par->vdc_mem); in vmlfb_enable_mmio()
363 release_mem_region(par->vdc_mem_base, par->vdc_mem_size); in vmlfb_enable_mmio()
371 static void vmlfb_disable_mmio(struct vml_par *par) in vmlfb_disable_mmio() argument
373 iounmap(par->gpu_mem); in vmlfb_disable_mmio()
374 release_mem_region(par->gpu_mem_base, par->gpu_mem_size); in vmlfb_disable_mmio()
375 iounmap(par->vdc_mem); in vmlfb_disable_mmio()
376 release_mem_region(par->vdc_mem_base, par->vdc_mem_size); in vmlfb_disable_mmio()
383 static void vmlfb_release_devices(struct vml_par *par) in vmlfb_release_devices() argument
385 if (atomic_dec_and_test(&par->refcount)) { in vmlfb_release_devices()
386 pci_disable_device(par->gpu); in vmlfb_release_devices()
387 pci_disable_device(par->vdc); in vmlfb_release_devices()
399 struct vml_par *par; in vml_pci_remove() local
404 par = vinfo->par; in vml_pci_remove()
409 vmlfb_disable_mmio(par); in vml_pci_remove()
410 vmlfb_release_devices(par); in vml_pci_remove()
412 kfree(par); in vml_pci_remove()
458 struct vml_par *par; in vml_pci_probe() local
461 par = kzalloc(sizeof(*par), GFP_KERNEL); in vml_pci_probe()
462 if (par == NULL) in vml_pci_probe()
471 vinfo->par = par; in vml_pci_probe()
472 par->vdc = dev; in vml_pci_probe()
473 atomic_set(&par->refcount, 1); in vml_pci_probe()
477 if ((err = vmlfb_get_gpu(par))) in vml_pci_probe()
489 err = vmlfb_enable_mmio(par); in vml_pci_probe()
511 info->par = par; in vml_pci_probe()
548 vmlfb_disable_mmio(par); in vml_pci_probe()
550 vmlfb_release_devices(par); in vml_pci_probe()
554 kfree(par); in vml_pci_probe()
712 struct vml_par *par = vinfo->par; in vmlfb_disable_pipe() local
715 VML_WRITE32(par, VML_RCOMPSTAT, 0); in vmlfb_disable_pipe()
716 while (!(VML_READ32(par, VML_RCOMPSTAT) & VML_MDVO_VDC_I_RCOMP)) ; in vmlfb_disable_pipe()
719 VML_WRITE32(par, VML_DSPCCNTR, in vmlfb_disable_pipe()
720 VML_READ32(par, VML_DSPCCNTR) & ~VML_GFX_ENABLE); in vmlfb_disable_pipe()
721 (void)VML_READ32(par, VML_DSPCCNTR); in vmlfb_disable_pipe()
726 VML_WRITE32(par, VML_PIPEACONF, 0); in vmlfb_disable_pipe()
727 (void)VML_READ32(par, VML_PIPEACONF); in vmlfb_disable_pipe()
735 struct vml_par *par = vinfo->par; in vml_dump_regs() local
739 (unsigned)VML_READ32(par, VML_HTOTAL_A)); in vml_dump_regs()
741 (unsigned)VML_READ32(par, VML_HBLANK_A)); in vml_dump_regs()
743 (unsigned)VML_READ32(par, VML_HSYNC_A)); in vml_dump_regs()
745 (unsigned)VML_READ32(par, VML_VTOTAL_A)); in vml_dump_regs()
747 (unsigned)VML_READ32(par, VML_VBLANK_A)); in vml_dump_regs()
749 (unsigned)VML_READ32(par, VML_VSYNC_A)); in vml_dump_regs()
751 (unsigned)VML_READ32(par, VML_DSPCSTRIDE)); in vml_dump_regs()
753 (unsigned)VML_READ32(par, VML_DSPCSIZE)); in vml_dump_regs()
755 (unsigned)VML_READ32(par, VML_DSPCPOS)); in vml_dump_regs()
757 (unsigned)VML_READ32(par, VML_DSPARB)); in vml_dump_regs()
759 (unsigned)VML_READ32(par, VML_DSPCADDR)); in vml_dump_regs()
761 (unsigned)VML_READ32(par, VML_BCLRPAT_A)); in vml_dump_regs()
763 (unsigned)VML_READ32(par, VML_CANVSCLR_A)); in vml_dump_regs()
765 (unsigned)VML_READ32(par, VML_PIPEASRC)); in vml_dump_regs()
767 (unsigned)VML_READ32(par, VML_PIPEACONF)); in vml_dump_regs()
769 (unsigned)VML_READ32(par, VML_DSPCCNTR)); in vml_dump_regs()
771 (unsigned)VML_READ32(par, VML_RCOMPSTAT)); in vml_dump_regs()
778 struct vml_par *par = vinfo->par; in vmlfb_set_par_locked() local
843 VML_WRITE32(par, VML_HTOTAL_A, ((htotal - 1) << 16) | (hactive - 1)); in vmlfb_set_par_locked()
844 VML_WRITE32(par, VML_HBLANK_A, in vmlfb_set_par_locked()
846 VML_WRITE32(par, VML_HSYNC_A, in vmlfb_set_par_locked()
848 VML_WRITE32(par, VML_VTOTAL_A, ((vtotal - 1) << 16) | (vactive - 1)); in vmlfb_set_par_locked()
849 VML_WRITE32(par, VML_VBLANK_A, in vmlfb_set_par_locked()
851 VML_WRITE32(par, VML_VSYNC_A, in vmlfb_set_par_locked()
853 VML_WRITE32(par, VML_DSPCSTRIDE, vinfo->stride); in vmlfb_set_par_locked()
854 VML_WRITE32(par, VML_DSPCSIZE, in vmlfb_set_par_locked()
856 VML_WRITE32(par, VML_DSPCPOS, 0x00000000); in vmlfb_set_par_locked()
857 VML_WRITE32(par, VML_DSPARB, VML_FIFO_DEFAULT); in vmlfb_set_par_locked()
858 VML_WRITE32(par, VML_BCLRPAT_A, 0x00000000); in vmlfb_set_par_locked()
859 VML_WRITE32(par, VML_CANVSCLR_A, 0x00000000); in vmlfb_set_par_locked()
860 VML_WRITE32(par, VML_PIPEASRC, in vmlfb_set_par_locked()
864 VML_WRITE32(par, VML_PIPEACONF, VML_PIPE_ENABLE); in vmlfb_set_par_locked()
866 VML_WRITE32(par, VML_DSPCCNTR, dspcntr); in vmlfb_set_par_locked()
868 VML_WRITE32(par, VML_DSPCADDR, (u32) vinfo->vram_start + in vmlfb_set_par_locked()
872 VML_WRITE32(par, VML_RCOMPSTAT, VML_MDVO_PAD_ENABLE); in vmlfb_set_par_locked()
874 while (!(VML_READ32(par, VML_RCOMPSTAT) & in vmlfb_set_par_locked()
900 struct vml_par *par = vinfo->par; in vmlfb_blank_locked() local
901 u32 cur = VML_READ32(par, VML_PIPEACONF); in vmlfb_blank_locked()
908 VML_WRITE32(par, VML_PIPEACONF, cur & ~VML_PIPE_FORCE_BORDER); in vmlfb_blank_locked()
909 (void)VML_READ32(par, VML_PIPEACONF); in vmlfb_blank_locked()
915 VML_WRITE32(par, VML_PIPEACONF, cur | VML_PIPE_FORCE_BORDER); in vmlfb_blank_locked()
916 (void)VML_READ32(par, VML_PIPEACONF); in vmlfb_blank_locked()
952 struct vml_par *par = vinfo->par; in vmlfb_pan_display() local
955 VML_WRITE32(par, VML_DSPCADDR, (u32) vinfo->vram_start + in vmlfb_pan_display()
958 (void)VML_READ32(par, VML_DSPCADDR); in vmlfb_pan_display()