nvbe               20 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm;
nvbe               23 drivers/gpu/drm/nouveau/nouveau_sgdma.c 		ttm_dma_tt_fini(&nvbe->ttm);
nvbe               24 drivers/gpu/drm/nouveau/nouveau_sgdma.c 		kfree(nvbe);
nvbe               31 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm;
nvbe               35 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	ret = nouveau_mem_host(reg, &nvbe->ttm);
nvbe               45 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	nvbe->mem = mem;
nvbe               52 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm;
nvbe               53 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	nouveau_mem_fini(nvbe->mem);
nvbe               66 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm;
nvbe               70 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	ret = nouveau_mem_host(reg, &nvbe->ttm);
nvbe               74 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	nvbe->mem = mem;
nvbe               88 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_sgdma_be *nvbe;
nvbe               90 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	nvbe = kzalloc(sizeof(*nvbe), GFP_KERNEL);
nvbe               91 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	if (!nvbe)
nvbe               95 drivers/gpu/drm/nouveau/nouveau_sgdma.c 		nvbe->ttm.ttm.func = &nv04_sgdma_backend;
nvbe               97 drivers/gpu/drm/nouveau/nouveau_sgdma.c 		nvbe->ttm.ttm.func = &nv50_sgdma_backend;
nvbe               99 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	if (ttm_dma_tt_init(&nvbe->ttm, bo, page_flags))
nvbe              106 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	return &nvbe->ttm.ttm;