hvs               650 drivers/gpu/drm/vc4/vc4_crtc.c 	spin_lock_irqsave(&vc4->hvs->mm_lock, flags);
hvs               651 drivers/gpu/drm/vc4/vc4_crtc.c 	ret = drm_mm_insert_node(&vc4->hvs->dlist_mm, &vc4_state->mm,
hvs               653 drivers/gpu/drm/vc4/vc4_crtc.c 	spin_unlock_irqrestore(&vc4->hvs->mm_lock, flags);
hvs               693 drivers/gpu/drm/vc4/vc4_crtc.c 	u32 __iomem *dlist_start = vc4->hvs->dlist + vc4_state->mm.start;
hvs              1000 drivers/gpu/drm/vc4/vc4_crtc.c 		spin_lock_irqsave(&vc4->hvs->mm_lock, flags);
hvs              1002 drivers/gpu/drm/vc4/vc4_crtc.c 		spin_unlock_irqrestore(&vc4->hvs->mm_lock, flags);
hvs                75 drivers/gpu/drm/vc4/vc4_drv.h 	struct vc4_hvs *hvs;
hvs               480 drivers/gpu/drm/vc4/vc4_drv.h #define HVS_READ(offset) readl(vc4->hvs->regs + offset)
hvs               481 drivers/gpu/drm/vc4/vc4_drv.h #define HVS_WRITE(offset, val) writel(val, vc4->hvs->regs + offset)
hvs                67 drivers/gpu/drm/vc4/vc4_hvs.c 	struct drm_printer p = drm_info_printer(&vc4->hvs->pdev->dev);
hvs                70 drivers/gpu/drm/vc4/vc4_hvs.c 	drm_print_regset32(&p, &vc4->hvs->regset);
hvs                76 drivers/gpu/drm/vc4/vc4_hvs.c 			 readl((u32 __iomem *)vc4->hvs->dlist + i + 0),
hvs                77 drivers/gpu/drm/vc4/vc4_hvs.c 			 readl((u32 __iomem *)vc4->hvs->dlist + i + 1),
hvs                78 drivers/gpu/drm/vc4/vc4_hvs.c 			 readl((u32 __iomem *)vc4->hvs->dlist + i + 2),
hvs                79 drivers/gpu/drm/vc4/vc4_hvs.c 			 readl((u32 __iomem *)vc4->hvs->dlist + i + 3));
hvs               129 drivers/gpu/drm/vc4/vc4_hvs.c static int vc4_hvs_upload_linear_kernel(struct vc4_hvs *hvs,
hvs               136 drivers/gpu/drm/vc4/vc4_hvs.c 	ret = drm_mm_insert_node(&hvs->dlist_mm, space, VC4_KERNEL_DWORDS);
hvs               143 drivers/gpu/drm/vc4/vc4_hvs.c 	dst_kernel = hvs->dlist + space->start;
hvs               223 drivers/gpu/drm/vc4/vc4_hvs.c 	struct vc4_hvs *hvs = NULL;
hvs               227 drivers/gpu/drm/vc4/vc4_hvs.c 	hvs = devm_kzalloc(&pdev->dev, sizeof(*hvs), GFP_KERNEL);
hvs               228 drivers/gpu/drm/vc4/vc4_hvs.c 	if (!hvs)
hvs               231 drivers/gpu/drm/vc4/vc4_hvs.c 	hvs->pdev = pdev;
hvs               233 drivers/gpu/drm/vc4/vc4_hvs.c 	hvs->regs = vc4_ioremap_regs(pdev, 0);
hvs               234 drivers/gpu/drm/vc4/vc4_hvs.c 	if (IS_ERR(hvs->regs))
hvs               235 drivers/gpu/drm/vc4/vc4_hvs.c 		return PTR_ERR(hvs->regs);
hvs               237 drivers/gpu/drm/vc4/vc4_hvs.c 	hvs->regset.base = hvs->regs;
hvs               238 drivers/gpu/drm/vc4/vc4_hvs.c 	hvs->regset.regs = hvs_regs;
hvs               239 drivers/gpu/drm/vc4/vc4_hvs.c 	hvs->regset.nregs = ARRAY_SIZE(hvs_regs);
hvs               241 drivers/gpu/drm/vc4/vc4_hvs.c 	hvs->dlist = hvs->regs + SCALER_DLIST_START;
hvs               243 drivers/gpu/drm/vc4/vc4_hvs.c 	spin_lock_init(&hvs->mm_lock);
hvs               250 drivers/gpu/drm/vc4/vc4_hvs.c 	drm_mm_init(&hvs->dlist_mm,
hvs               259 drivers/gpu/drm/vc4/vc4_hvs.c 	drm_mm_init(&hvs->lbm_mm, 0, 96 * 1024);
hvs               264 drivers/gpu/drm/vc4/vc4_hvs.c 	ret = vc4_hvs_upload_linear_kernel(hvs,
hvs               265 drivers/gpu/drm/vc4/vc4_hvs.c 					   &hvs->mitchell_netravali_filter,
hvs               270 drivers/gpu/drm/vc4/vc4_hvs.c 	vc4->hvs = hvs;
hvs               305 drivers/gpu/drm/vc4/vc4_hvs.c 	vc4_debugfs_add_regset32(drm, "hvs_regs", &hvs->regset);
hvs               318 drivers/gpu/drm/vc4/vc4_hvs.c 	if (vc4->hvs->mitchell_netravali_filter.allocated)
hvs               319 drivers/gpu/drm/vc4/vc4_hvs.c 		drm_mm_remove_node(&vc4->hvs->mitchell_netravali_filter);
hvs               321 drivers/gpu/drm/vc4/vc4_hvs.c 	drm_mm_takedown(&vc4->hvs->dlist_mm);
hvs               322 drivers/gpu/drm/vc4/vc4_hvs.c 	drm_mm_takedown(&vc4->hvs->lbm_mm);
hvs               324 drivers/gpu/drm/vc4/vc4_hvs.c 	vc4->hvs = NULL;
hvs                33 drivers/gpu/drm/vc4/vc4_plane.c 	u32 hvs; /* HVS_FORMAT_* */
hvs                37 drivers/gpu/drm/vc4/vc4_plane.c 		.drm = DRM_FORMAT_XRGB8888, .hvs = HVS_PIXEL_FORMAT_RGBA8888,
hvs                41 drivers/gpu/drm/vc4/vc4_plane.c 		.drm = DRM_FORMAT_ARGB8888, .hvs = HVS_PIXEL_FORMAT_RGBA8888,
hvs                45 drivers/gpu/drm/vc4/vc4_plane.c 		.drm = DRM_FORMAT_ABGR8888, .hvs = HVS_PIXEL_FORMAT_RGBA8888,
hvs                49 drivers/gpu/drm/vc4/vc4_plane.c 		.drm = DRM_FORMAT_XBGR8888, .hvs = HVS_PIXEL_FORMAT_RGBA8888,
hvs                53 drivers/gpu/drm/vc4/vc4_plane.c 		.drm = DRM_FORMAT_RGB565, .hvs = HVS_PIXEL_FORMAT_RGB565,
hvs                57 drivers/gpu/drm/vc4/vc4_plane.c 		.drm = DRM_FORMAT_BGR565, .hvs = HVS_PIXEL_FORMAT_RGB565,
hvs                61 drivers/gpu/drm/vc4/vc4_plane.c 		.drm = DRM_FORMAT_ARGB1555, .hvs = HVS_PIXEL_FORMAT_RGBA5551,
hvs                65 drivers/gpu/drm/vc4/vc4_plane.c 		.drm = DRM_FORMAT_XRGB1555, .hvs = HVS_PIXEL_FORMAT_RGBA5551,
hvs                69 drivers/gpu/drm/vc4/vc4_plane.c 		.drm = DRM_FORMAT_RGB888, .hvs = HVS_PIXEL_FORMAT_RGB888,
hvs                73 drivers/gpu/drm/vc4/vc4_plane.c 		.drm = DRM_FORMAT_BGR888, .hvs = HVS_PIXEL_FORMAT_RGB888,
hvs                78 drivers/gpu/drm/vc4/vc4_plane.c 		.hvs = HVS_PIXEL_FORMAT_YCBCR_YUV422_3PLANE,
hvs                83 drivers/gpu/drm/vc4/vc4_plane.c 		.hvs = HVS_PIXEL_FORMAT_YCBCR_YUV422_3PLANE,
hvs                88 drivers/gpu/drm/vc4/vc4_plane.c 		.hvs = HVS_PIXEL_FORMAT_YCBCR_YUV420_3PLANE,
hvs                93 drivers/gpu/drm/vc4/vc4_plane.c 		.hvs = HVS_PIXEL_FORMAT_YCBCR_YUV420_3PLANE,
hvs                98 drivers/gpu/drm/vc4/vc4_plane.c 		.hvs = HVS_PIXEL_FORMAT_YCBCR_YUV420_2PLANE,
hvs               103 drivers/gpu/drm/vc4/vc4_plane.c 		.hvs = HVS_PIXEL_FORMAT_YCBCR_YUV420_2PLANE,
hvs               108 drivers/gpu/drm/vc4/vc4_plane.c 		.hvs = HVS_PIXEL_FORMAT_YCBCR_YUV422_2PLANE,
hvs               113 drivers/gpu/drm/vc4/vc4_plane.c 		.hvs = HVS_PIXEL_FORMAT_YCBCR_YUV422_2PLANE,
hvs               184 drivers/gpu/drm/vc4/vc4_plane.c 		spin_lock_irqsave(&vc4->hvs->mm_lock, irqflags);
hvs               186 drivers/gpu/drm/vc4/vc4_plane.c 		spin_unlock_irqrestore(&vc4->hvs->mm_lock, irqflags);
hvs               563 drivers/gpu/drm/vc4/vc4_plane.c 		spin_lock_irqsave(&vc4->hvs->mm_lock, irqflags);
hvs               564 drivers/gpu/drm/vc4/vc4_plane.c 		ret = drm_mm_insert_node_generic(&vc4->hvs->lbm_mm,
hvs               567 drivers/gpu/drm/vc4/vc4_plane.c 		spin_unlock_irqrestore(&vc4->hvs->mm_lock, irqflags);
hvs               599 drivers/gpu/drm/vc4/vc4_plane.c 	u32 hvs_format = format->hvs;
hvs               893 drivers/gpu/drm/vc4/vc4_plane.c 			u32 kernel = VC4_SET_FIELD(vc4->hvs->mitchell_netravali_filter.start,
hvs               238 net/vmw_vsock/hyperv_transport.c 	struct hvsock *hvs = vsk->trans;
hvs               239 net/vmw_vsock/hyperv_transport.c 	struct vmbus_channel *chan = hvs->chan;
hvs               290 net/vmw_vsock/hyperv_transport.c 	struct hvsock *hvs = NULL;
hvs               333 net/vmw_vsock/hyperv_transport.c 		hvs = vsock_sk(sk)->trans;
hvs               334 net/vmw_vsock/hyperv_transport.c 		hvs->chan = chan;
hvs               371 net/vmw_vsock/hyperv_transport.c 			hvs->chan = NULL;
hvs               423 net/vmw_vsock/hyperv_transport.c 	struct hvsock *hvs;
hvs               426 net/vmw_vsock/hyperv_transport.c 	hvs = kzalloc(sizeof(*hvs), GFP_KERNEL);
hvs               427 net/vmw_vsock/hyperv_transport.c 	if (!hvs)
hvs               430 net/vmw_vsock/hyperv_transport.c 	vsk->trans = hvs;
hvs               431 net/vmw_vsock/hyperv_transport.c 	hvs->vsk = vsk;
hvs               453 net/vmw_vsock/hyperv_transport.c static void hvs_shutdown_lock_held(struct hvsock *hvs, int mode)
hvs               457 net/vmw_vsock/hyperv_transport.c 	if (hvs->fin_sent || !hvs->chan)
hvs               461 net/vmw_vsock/hyperv_transport.c 	(void)hvs_send_data(hvs->chan, (struct hvs_send_buf *)&hdr, 0);
hvs               462 net/vmw_vsock/hyperv_transport.c 	hvs->fin_sent = true;
hvs               531 net/vmw_vsock/hyperv_transport.c 	struct hvsock *hvs = vsk->trans;
hvs               532 net/vmw_vsock/hyperv_transport.c 	struct vmbus_channel *chan = hvs->chan;
hvs               537 net/vmw_vsock/hyperv_transport.c 	kfree(hvs);
hvs               563 net/vmw_vsock/hyperv_transport.c static int hvs_update_recv_data(struct hvsock *hvs)
hvs               568 net/vmw_vsock/hyperv_transport.c 	recv_buf = (struct hvs_recv_buf *)(hvs->recv_desc + 1);
hvs               575 net/vmw_vsock/hyperv_transport.c 		hvs->vsk->peer_shutdown |= SEND_SHUTDOWN;
hvs               577 net/vmw_vsock/hyperv_transport.c 	hvs->recv_data_len = payload_len;
hvs               578 net/vmw_vsock/hyperv_transport.c 	hvs->recv_data_off = 0;
hvs               586 net/vmw_vsock/hyperv_transport.c 	struct hvsock *hvs = vsk->trans;
hvs               587 net/vmw_vsock/hyperv_transport.c 	bool need_refill = !hvs->recv_desc;
hvs               596 net/vmw_vsock/hyperv_transport.c 		hvs->recv_desc = hv_pkt_iter_first(hvs->chan);
hvs               597 net/vmw_vsock/hyperv_transport.c 		ret = hvs_update_recv_data(hvs);
hvs               602 net/vmw_vsock/hyperv_transport.c 	recv_buf = (struct hvs_recv_buf *)(hvs->recv_desc + 1);
hvs               603 net/vmw_vsock/hyperv_transport.c 	to_read = min_t(u32, len, hvs->recv_data_len);
hvs               604 net/vmw_vsock/hyperv_transport.c 	ret = memcpy_to_msg(msg, recv_buf->data + hvs->recv_data_off, to_read);
hvs               608 net/vmw_vsock/hyperv_transport.c 	hvs->recv_data_len -= to_read;
hvs               609 net/vmw_vsock/hyperv_transport.c 	if (hvs->recv_data_len == 0) {
hvs               610 net/vmw_vsock/hyperv_transport.c 		hvs->recv_desc = hv_pkt_iter_next(hvs->chan, hvs->recv_desc);
hvs               611 net/vmw_vsock/hyperv_transport.c 		if (hvs->recv_desc) {
hvs               612 net/vmw_vsock/hyperv_transport.c 			ret = hvs_update_recv_data(hvs);
hvs               617 net/vmw_vsock/hyperv_transport.c 		hvs->recv_data_off += to_read;
hvs               626 net/vmw_vsock/hyperv_transport.c 	struct hvsock *hvs = vsk->trans;
hvs               627 net/vmw_vsock/hyperv_transport.c 	struct vmbus_channel *chan = hvs->chan;
hvs               656 net/vmw_vsock/hyperv_transport.c 		ret = hvs_send_data(hvs->chan, send_buf, to_write);
hvs               673 net/vmw_vsock/hyperv_transport.c 	struct hvsock *hvs = vsk->trans;
hvs               676 net/vmw_vsock/hyperv_transport.c 	if (hvs->recv_data_len > 0)
hvs               679 net/vmw_vsock/hyperv_transport.c 	switch (hvs_channel_readable_payload(hvs->chan)) {
hvs               697 net/vmw_vsock/hyperv_transport.c 	struct hvsock *hvs = vsk->trans;
hvs               699 net/vmw_vsock/hyperv_transport.c 	return hvs_channel_writable_bytes(hvs->chan);
hvs               709 net/vmw_vsock/hyperv_transport.c 	struct hvsock *hvs = vsk->trans;
hvs               711 net/vmw_vsock/hyperv_transport.c 	return hvs->chan != NULL;
hvs               725 net/vmw_vsock/hyperv_transport.c 	struct hvsock *hvs = vsk->trans;
hvs               727 net/vmw_vsock/hyperv_transport.c 	*readable = hvs_channel_readable(hvs->chan);