ioatdca           111 drivers/dma/ioat/dca.c 	struct ioat_dca_priv *ioatdca = dca_priv(dca);
ioatdca           116 drivers/dma/ioat/dca.c 	for (i = 0; i < ioatdca->max_requesters; i++) {
ioatdca           117 drivers/dma/ioat/dca.c 		if (ioatdca->req_slots[i].pdev == pdev)
ioatdca           125 drivers/dma/ioat/dca.c 	struct ioat_dca_priv *ioatdca = dca_priv(dca);
ioatdca           137 drivers/dma/ioat/dca.c 	if (ioatdca->requester_count == ioatdca->max_requesters)
ioatdca           140 drivers/dma/ioat/dca.c 	for (i = 0; i < ioatdca->max_requesters; i++) {
ioatdca           141 drivers/dma/ioat/dca.c 		if (ioatdca->req_slots[i].pdev == NULL) {
ioatdca           143 drivers/dma/ioat/dca.c 			ioatdca->requester_count++;
ioatdca           144 drivers/dma/ioat/dca.c 			ioatdca->req_slots[i].pdev = pdev;
ioatdca           145 drivers/dma/ioat/dca.c 			ioatdca->req_slots[i].rid = id;
ioatdca           147 drivers/dma/ioat/dca.c 			      readw(ioatdca->dca_base + IOAT3_DCA_GREQID_OFFSET);
ioatdca           149 drivers/dma/ioat/dca.c 			       ioatdca->iobase + global_req_table + (i * 4));
ioatdca           160 drivers/dma/ioat/dca.c 	struct ioat_dca_priv *ioatdca = dca_priv(dca);
ioatdca           170 drivers/dma/ioat/dca.c 	for (i = 0; i < ioatdca->max_requesters; i++) {
ioatdca           171 drivers/dma/ioat/dca.c 		if (ioatdca->req_slots[i].pdev == pdev) {
ioatdca           173 drivers/dma/ioat/dca.c 			      readw(ioatdca->dca_base + IOAT3_DCA_GREQID_OFFSET);
ioatdca           174 drivers/dma/ioat/dca.c 			writel(0, ioatdca->iobase + global_req_table + (i * 4));
ioatdca           175 drivers/dma/ioat/dca.c 			ioatdca->req_slots[i].pdev = NULL;
ioatdca           176 drivers/dma/ioat/dca.c 			ioatdca->req_slots[i].rid = 0;
ioatdca           177 drivers/dma/ioat/dca.c 			ioatdca->requester_count--;
ioatdca           190 drivers/dma/ioat/dca.c 	struct ioat_dca_priv *ioatdca = dca_priv(dca);
ioatdca           198 drivers/dma/ioat/dca.c 		entry = ioatdca->tag_map[i];
ioatdca           260 drivers/dma/ioat/dca.c 	struct ioat_dca_priv *ioatdca;
ioatdca           289 drivers/dma/ioat/dca.c 				 struct_size(ioatdca, req_slots, slots));
ioatdca           293 drivers/dma/ioat/dca.c 	ioatdca = dca_priv(dca);
ioatdca           294 drivers/dma/ioat/dca.c 	ioatdca->iobase = iobase;
ioatdca           295 drivers/dma/ioat/dca.c 	ioatdca->dca_base = iobase + dca_offset;
ioatdca           296 drivers/dma/ioat/dca.c 	ioatdca->max_requesters = slots;
ioatdca           299 drivers/dma/ioat/dca.c 	csi_fsb_control = readw(ioatdca->dca_base + IOAT3_CSI_CONTROL_OFFSET);
ioatdca           303 drivers/dma/ioat/dca.c 		       ioatdca->dca_base + IOAT3_CSI_CONTROL_OFFSET);
ioatdca           305 drivers/dma/ioat/dca.c 	pcie_control = readw(ioatdca->dca_base + IOAT3_PCI_CONTROL_OFFSET);
ioatdca           309 drivers/dma/ioat/dca.c 		       ioatdca->dca_base + IOAT3_PCI_CONTROL_OFFSET);
ioatdca           317 drivers/dma/ioat/dca.c 		readl(ioatdca->dca_base + IOAT3_APICID_TAG_MAP_OFFSET_LOW);
ioatdca           319 drivers/dma/ioat/dca.c 		readl(ioatdca->dca_base + IOAT3_APICID_TAG_MAP_OFFSET_HIGH);
ioatdca           322 drivers/dma/ioat/dca.c 		ioatdca->tag_map[i] = bit & DCA_TAG_MAP_MASK;
ioatdca           325 drivers/dma/ioat/dca.c 	if (dca3_tag_map_invalid(ioatdca->tag_map)) {