hvc 47 arch/arm/include/asm/proc-fns.h void (*reset)(unsigned long addr, bool hvc) __attribute__((noreturn)); hvc 96 arch/arm/include/asm/proc-fns.h extern void cpu_reset(unsigned long addr, bool hvc) __attribute__((noreturn)); hvc 332 arch/arm64/include/asm/insn.h __AARCH64_INSN_FUNCS(hvc, 0xFFE0001F, 0xD4000002) hvc 63 arch/um/kernel/tlb.c static int do_ops(struct host_vm_change *hvc, int end, hvc 70 arch/um/kernel/tlb.c op = &hvc->ops[i]; hvc 73 arch/um/kernel/tlb.c if (hvc->userspace) hvc 74 arch/um/kernel/tlb.c ret = map(&hvc->mm->context.id, op->u.mmap.addr, hvc 78 arch/um/kernel/tlb.c &hvc->data); hvc 84 arch/um/kernel/tlb.c if (hvc->userspace) hvc 85 arch/um/kernel/tlb.c ret = unmap(&hvc->mm->context.id, hvc 88 arch/um/kernel/tlb.c &hvc->data); hvc 96 arch/um/kernel/tlb.c if (hvc->userspace) hvc 97 arch/um/kernel/tlb.c ret = protect(&hvc->mm->context.id, hvc 101 arch/um/kernel/tlb.c finished, &hvc->data); hvc 123 arch/um/kernel/tlb.c unsigned int prot, struct host_vm_change *hvc) hvc 129 arch/um/kernel/tlb.c if (hvc->userspace) hvc 133 arch/um/kernel/tlb.c if (hvc->index != 0) { hvc 134 arch/um/kernel/tlb.c last = &hvc->ops[hvc->index - 1]; hvc 144 arch/um/kernel/tlb.c if (hvc->index == ARRAY_SIZE(hvc->ops)) { hvc 145 arch/um/kernel/tlb.c ret = do_ops(hvc, ARRAY_SIZE(hvc->ops), 0); hvc 146 arch/um/kernel/tlb.c hvc->index = 0; hvc 149 arch/um/kernel/tlb.c hvc->ops[hvc->index++] = ((struct host_vm_op) hvc 161 arch/um/kernel/tlb.c struct host_vm_change *hvc) hvc 169 arch/um/kernel/tlb.c if (hvc->index != 0) { hvc 170 arch/um/kernel/tlb.c last = &hvc->ops[hvc->index - 1]; hvc 178 arch/um/kernel/tlb.c if (hvc->index == ARRAY_SIZE(hvc->ops)) { hvc 179 arch/um/kernel/tlb.c ret = do_ops(hvc, ARRAY_SIZE(hvc->ops), 0); hvc 180 arch/um/kernel/tlb.c hvc->index = 0; hvc 183 arch/um/kernel/tlb.c hvc->ops[hvc->index++] = ((struct host_vm_op) hvc 191 arch/um/kernel/tlb.c unsigned int prot, struct host_vm_change *hvc) hvc 196 arch/um/kernel/tlb.c if (hvc->index != 0) { hvc 197 arch/um/kernel/tlb.c last = &hvc->ops[hvc->index - 1]; hvc 206 arch/um/kernel/tlb.c if (hvc->index == ARRAY_SIZE(hvc->ops)) { hvc 207 arch/um/kernel/tlb.c ret = do_ops(hvc, ARRAY_SIZE(hvc->ops), 0); hvc 208 arch/um/kernel/tlb.c hvc->index = 0; hvc 211 arch/um/kernel/tlb.c hvc->ops[hvc->index++] = ((struct host_vm_op) hvc 223 arch/um/kernel/tlb.c struct host_vm_change *hvc) hvc 244 arch/um/kernel/tlb.c if (hvc->force || pte_newpage(*pte)) { hvc 248 arch/um/kernel/tlb.c PAGE_SIZE, prot, hvc); hvc 250 arch/um/kernel/tlb.c ret = add_munmap(addr, PAGE_SIZE, hvc); hvc 252 arch/um/kernel/tlb.c ret = add_mprotect(addr, PAGE_SIZE, prot, hvc); hvc 260 arch/um/kernel/tlb.c struct host_vm_change *hvc) hvc 270 arch/um/kernel/tlb.c if (hvc->force || pmd_newpage(*pmd)) { hvc 271 arch/um/kernel/tlb.c ret = add_munmap(addr, next - addr, hvc); hvc 275 arch/um/kernel/tlb.c else ret = update_pte_range(pmd, addr, next, hvc); hvc 282 arch/um/kernel/tlb.c struct host_vm_change *hvc) hvc 292 arch/um/kernel/tlb.c if (hvc->force || pud_newpage(*pud)) { hvc 293 arch/um/kernel/tlb.c ret = add_munmap(addr, next - addr, hvc); hvc 297 arch/um/kernel/tlb.c else ret = update_pmd_range(pud, addr, next, hvc); hvc 306 arch/um/kernel/tlb.c struct host_vm_change hvc; hvc 310 arch/um/kernel/tlb.c hvc = INIT_HVC(mm, force, userspace); hvc 316 arch/um/kernel/tlb.c ret = add_munmap(addr, next - addr, &hvc); hvc 320 arch/um/kernel/tlb.c else ret = update_pud_range(pgd, addr, next, &hvc); hvc 324 arch/um/kernel/tlb.c ret = do_ops(&hvc, hvc.index, 1); hvc 346 arch/um/kernel/tlb.c struct host_vm_change hvc; hvc 349 arch/um/kernel/tlb.c hvc = INIT_HVC(mm, force, userspace); hvc 358 arch/um/kernel/tlb.c err = add_munmap(addr, last - addr, &hvc); hvc 374 arch/um/kernel/tlb.c err = add_munmap(addr, last - addr, &hvc); hvc 390 arch/um/kernel/tlb.c err = add_munmap(addr, last - addr, &hvc); hvc 402 arch/um/kernel/tlb.c err = add_munmap(addr, PAGE_SIZE, &hvc); hvc 408 arch/um/kernel/tlb.c PAGE_SIZE, 0, &hvc); hvc 412 arch/um/kernel/tlb.c err = add_mprotect(addr, PAGE_SIZE, 0, &hvc); hvc 417 arch/um/kernel/tlb.c err = do_ops(&hvc, hvc.index, 1); hvc 76 drivers/char/virtio_console.c struct hvc_struct *hvc; hvc 333 drivers/char/virtio_console.c if (port->cons.hvc) hvc 1180 drivers/char/virtio_console.c hvc_resize(port->cons.hvc, port->cons.ws); hvc 1250 drivers/char/virtio_console.c port->cons.hvc = hvc_alloc(port->cons.vtermno, 0, &hv_ops, PAGE_SIZE); hvc 1251 drivers/char/virtio_console.c if (IS_ERR(port->cons.hvc)) { hvc 1252 drivers/char/virtio_console.c ret = PTR_ERR(port->cons.hvc); hvc 1255 drivers/char/virtio_console.c port->cons.hvc = NULL; hvc 1379 drivers/char/virtio_console.c port->cons.hvc = NULL; hvc 1538 drivers/char/virtio_console.c hvc_remove(port->cons.hvc); hvc 1633 drivers/char/virtio_console.c port->cons.hvc->irq_requested = 1; hvc 1795 drivers/char/virtio_console.c if (is_console_port(port) && hvc_poll(port->cons.hvc)) hvc 496 drivers/dma/mediatek/mtk-hsdma.c struct mtk_hsdma_vchan *hvc) hvc 501 drivers/dma/mediatek/mtk-hsdma.c lockdep_assert_held(&hvc->vc.lock); hvc 503 drivers/dma/mediatek/mtk-hsdma.c list_for_each_entry_safe(vd, vd2, &hvc->vc.desc_issued, node) { hvc 529 drivers/dma/mediatek/mtk-hsdma.c list_move_tail(&vd->node, &hvc->desc_hw_processing); hvc 535 drivers/dma/mediatek/mtk-hsdma.c struct mtk_hsdma_vchan *hvc; hvc 584 drivers/dma/mediatek/mtk-hsdma.c hvc = to_hsdma_vchan(cb->vd->tx.chan); hvc 586 drivers/dma/mediatek/mtk-hsdma.c spin_lock(&hvc->vc.lock); hvc 594 drivers/dma/mediatek/mtk-hsdma.c if (hvc->issue_synchronize && hvc 595 drivers/dma/mediatek/mtk-hsdma.c list_empty(&hvc->desc_hw_processing)) { hvc 596 drivers/dma/mediatek/mtk-hsdma.c complete(&hvc->issue_completion); hvc 597 drivers/dma/mediatek/mtk-hsdma.c hvc->issue_synchronize = false; hvc 599 drivers/dma/mediatek/mtk-hsdma.c spin_unlock(&hvc->vc.lock); hvc 634 drivers/dma/mediatek/mtk-hsdma.c hvc = &hsdma->vc[i]; hvc 635 drivers/dma/mediatek/mtk-hsdma.c spin_lock(&hvc->vc.lock); hvc 636 drivers/dma/mediatek/mtk-hsdma.c mtk_hsdma_issue_vchan_pending(hsdma, hvc); hvc 637 drivers/dma/mediatek/mtk-hsdma.c spin_unlock(&hvc->vc.lock); hvc 663 drivers/dma/mediatek/mtk-hsdma.c struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c); hvc 666 drivers/dma/mediatek/mtk-hsdma.c list_for_each_entry(vd, &hvc->desc_hw_processing, node) hvc 670 drivers/dma/mediatek/mtk-hsdma.c list_for_each_entry(vd, &hvc->vc.desc_issued, node) hvc 681 drivers/dma/mediatek/mtk-hsdma.c struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c); hvc 692 drivers/dma/mediatek/mtk-hsdma.c spin_lock_irqsave(&hvc->vc.lock, flags); hvc 694 drivers/dma/mediatek/mtk-hsdma.c spin_unlock_irqrestore(&hvc->vc.lock, flags); hvc 709 drivers/dma/mediatek/mtk-hsdma.c struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c); hvc 712 drivers/dma/mediatek/mtk-hsdma.c spin_lock_irqsave(&hvc->vc.lock, flags); hvc 714 drivers/dma/mediatek/mtk-hsdma.c if (vchan_issue_pending(&hvc->vc)) hvc 715 drivers/dma/mediatek/mtk-hsdma.c mtk_hsdma_issue_vchan_pending(hsdma, hvc); hvc 717 drivers/dma/mediatek/mtk-hsdma.c spin_unlock_irqrestore(&hvc->vc.lock, flags); hvc 758 drivers/dma/mediatek/mtk-hsdma.c struct mtk_hsdma_vchan *hvc = to_hsdma_vchan(c); hvc 766 drivers/dma/mediatek/mtk-hsdma.c spin_lock(&hvc->vc.lock); hvc 767 drivers/dma/mediatek/mtk-hsdma.c if (!list_empty(&hvc->desc_hw_processing)) { hvc 768 drivers/dma/mediatek/mtk-hsdma.c hvc->issue_synchronize = true; hvc 771 drivers/dma/mediatek/mtk-hsdma.c spin_unlock(&hvc->vc.lock); hvc 774 drivers/dma/mediatek/mtk-hsdma.c wait_for_completion(&hvc->issue_completion); hvc 779 drivers/dma/mediatek/mtk-hsdma.c WARN_ONCE(!list_empty(&hvc->desc_hw_processing), hvc 783 drivers/dma/mediatek/mtk-hsdma.c vchan_synchronize(&hvc->vc); hvc 785 drivers/dma/mediatek/mtk-hsdma.c WARN_ONCE(!list_empty(&hvc->vc.desc_completed), hvc 65 drivers/tty/hvc/hvc_iucv.c struct hvc_struct *hvc; /* HVC struct reference */ hvc 284 drivers/tty/hvc/hvc_iucv.c __hvc_resize(priv->hvc, *((struct winsize *) rb->mbuf->data)); hvc 1122 drivers/tty/hvc/hvc_iucv.c priv->hvc = hvc_alloc(HVC_IUCV_MAGIC + id, /* PAGE_SIZE */ hvc 1124 drivers/tty/hvc/hvc_iucv.c if (IS_ERR(priv->hvc)) { hvc 1125 drivers/tty/hvc/hvc_iucv.c rc = PTR_ERR(priv->hvc); hvc 1130 drivers/tty/hvc/hvc_iucv.c priv->hvc->irq_requested = 1; hvc 1160 drivers/tty/hvc/hvc_iucv.c hvc_remove(priv->hvc); hvc 1173 drivers/tty/hvc/hvc_iucv.c hvc_remove(priv->hvc); hvc 40 drivers/tty/hvc/hvc_xen.c struct hvc_struct *hvc; hvc 333 drivers/tty/hvc/hvc_xen.c if (info->hvc != NULL) hvc 334 drivers/tty/hvc/hvc_xen.c hvc_remove(info->hvc); hvc 335 drivers/tty/hvc/hvc_xen.c info->hvc = NULL; hvc 383 drivers/tty/hvc/hvc_xen.c info->hvc = hvc_alloc(xenbus_devid_to_vtermno(devid), hvc 385 drivers/tty/hvc/hvc_xen.c if (IS_ERR(info->hvc)) hvc 386 drivers/tty/hvc/hvc_xen.c return PTR_ERR(info->hvc); hvc 549 drivers/tty/hvc/hvc_xen.c info->hvc = hvc_alloc(HVC_COOKIE, info->irq, ops, 256); hvc 550 drivers/tty/hvc/hvc_xen.c if (IS_ERR(info->hvc)) { hvc 551 drivers/tty/hvc/hvc_xen.c r = PTR_ERR(info->hvc);