arena 356 arch/alpha/kernel/core_cia.c struct pci_iommu_arena *arena = pci_isa_hose->sg_isa; arena 375 arch/alpha/kernel/core_cia.c addr0 = arena->dma_base; arena 449 arch/alpha/kernel/core_cia.c cia_pci_tbi(arena->hose, 0, -1); arena 464 arch/alpha/kernel/core_cia.c arena->ptes[4] = pte0; arena 488 arch/alpha/kernel/core_cia.c arena->ptes[5] = pte0; arena 501 arch/alpha/kernel/core_cia.c arena->align_entry = 4; arena 524 arch/alpha/kernel/core_cia.c arena->ptes[4] = 0; arena 525 arch/alpha/kernel/core_cia.c arena->ptes[5] = 0; arena 539 arch/alpha/kernel/core_cia.c alpha_mv.mv_pci_tbi(arena->hose, 0, -1); arena 902 arch/alpha/kernel/core_marvel.c struct pci_iommu_arena *arena; arena 918 arch/alpha/kernel/core_marvel.c aper->arena = agp->hose->sg_pci; arena 920 arch/alpha/kernel/core_marvel.c aper->pg_start = iommu_reserve(aper->arena, aper->pg_count, arena 930 arch/alpha/kernel/core_marvel.c aper->arena->dma_base + aper->pg_start * PAGE_SIZE; arena 943 arch/alpha/kernel/core_marvel.c status = iommu_release(aper->arena, aper->pg_start, aper->pg_count); arena 947 arch/alpha/kernel/core_marvel.c iommu_unbind(aper->arena, aper->pg_start, aper->pg_count); arena 948 arch/alpha/kernel/core_marvel.c status = iommu_release(aper->arena, aper->pg_start, arena 1026 arch/alpha/kernel/core_marvel.c return iommu_bind(aper->arena, aper->pg_start + pg_start, arena 1034 arch/alpha/kernel/core_marvel.c return iommu_unbind(aper->arena, aper->pg_start + pg_start, arena 1042 arch/alpha/kernel/core_marvel.c unsigned long baddr = addr - aper->arena->dma_base; arena 1051 arch/alpha/kernel/core_marvel.c pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; arena 585 arch/alpha/kernel/core_titan.c struct pci_iommu_arena *arena; arena 602 arch/alpha/kernel/core_titan.c aper->arena = agp->hose->sg_pci; arena 604 arch/alpha/kernel/core_titan.c aper->pg_start = iommu_reserve(aper->arena, aper->pg_count, arena 613 arch/alpha/kernel/core_titan.c aper->arena->dma_base + aper->pg_start * PAGE_SIZE; arena 626 arch/alpha/kernel/core_titan.c status = iommu_release(aper->arena, aper->pg_start, aper->pg_count); arena 630 arch/alpha/kernel/core_titan.c iommu_unbind(aper->arena, aper->pg_start, aper->pg_count); arena 631 arch/alpha/kernel/core_titan.c status = iommu_release(aper->arena, aper->pg_start, arena 687 arch/alpha/kernel/core_titan.c return iommu_bind(aper->arena, aper->pg_start + pg_start, arena 695 arch/alpha/kernel/core_titan.c return iommu_unbind(aper->arena, aper->pg_start + pg_start, arena 703 arch/alpha/kernel/core_titan.c unsigned long baddr = addr - aper->arena->dma_base; arena 712 arch/alpha/kernel/core_titan.c pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; arena 63 arch/alpha/kernel/pci_iommu.c struct pci_iommu_arena *arena; arena 77 arch/alpha/kernel/pci_iommu.c arena = memblock_alloc_node(sizeof(*arena), align, nid); arena 78 arch/alpha/kernel/pci_iommu.c if (!NODE_DATA(nid) || !arena) { arena 82 arch/alpha/kernel/pci_iommu.c arena = memblock_alloc(sizeof(*arena), SMP_CACHE_BYTES); arena 83 arch/alpha/kernel/pci_iommu.c if (!arena) arena 85 arch/alpha/kernel/pci_iommu.c sizeof(*arena)); arena 88 arch/alpha/kernel/pci_iommu.c arena->ptes = memblock_alloc_node(sizeof(*arena), align, nid); arena 89 arch/alpha/kernel/pci_iommu.c if (!NODE_DATA(nid) || !arena->ptes) { arena 93 arch/alpha/kernel/pci_iommu.c arena->ptes = memblock_alloc(mem_size, align); arena 94 arch/alpha/kernel/pci_iommu.c if (!arena->ptes) arena 101 arch/alpha/kernel/pci_iommu.c arena = memblock_alloc(sizeof(*arena), SMP_CACHE_BYTES); arena 102 arch/alpha/kernel/pci_iommu.c if (!arena) arena 104 arch/alpha/kernel/pci_iommu.c sizeof(*arena)); arena 105 arch/alpha/kernel/pci_iommu.c arena->ptes = memblock_alloc(mem_size, align); arena 106 arch/alpha/kernel/pci_iommu.c if (!arena->ptes) arena 112 arch/alpha/kernel/pci_iommu.c spin_lock_init(&arena->lock); arena 113 arch/alpha/kernel/pci_iommu.c arena->hose = hose; arena 114 arch/alpha/kernel/pci_iommu.c arena->dma_base = base; arena 115 arch/alpha/kernel/pci_iommu.c arena->size = window_size; arena 116 arch/alpha/kernel/pci_iommu.c arena->next_entry = 0; arena 120 arch/alpha/kernel/pci_iommu.c arena->align_entry = 1; arena 122 arch/alpha/kernel/pci_iommu.c return arena; arena 134 arch/alpha/kernel/pci_iommu.c iommu_arena_find_pages(struct device *dev, struct pci_iommu_arena *arena, arena 143 arch/alpha/kernel/pci_iommu.c base = arena->dma_base >> PAGE_SHIFT; arena 152 arch/alpha/kernel/pci_iommu.c ptes = arena->ptes; arena 153 arch/alpha/kernel/pci_iommu.c nent = arena->size >> PAGE_SHIFT; arena 154 arch/alpha/kernel/pci_iommu.c p = ALIGN(arena->next_entry, mask + 1); arena 176 arch/alpha/kernel/pci_iommu.c alpha_mv.mv_pci_tbi(arena->hose, 0, -1); arena 192 arch/alpha/kernel/pci_iommu.c iommu_arena_alloc(struct device *dev, struct pci_iommu_arena *arena, long n, arena 199 arch/alpha/kernel/pci_iommu.c spin_lock_irqsave(&arena->lock, flags); arena 202 arch/alpha/kernel/pci_iommu.c ptes = arena->ptes; arena 203 arch/alpha/kernel/pci_iommu.c mask = max(align, arena->align_entry) - 1; arena 204 arch/alpha/kernel/pci_iommu.c p = iommu_arena_find_pages(dev, arena, n, mask); arena 206 arch/alpha/kernel/pci_iommu.c spin_unlock_irqrestore(&arena->lock, flags); arena 217 arch/alpha/kernel/pci_iommu.c arena->next_entry = p + n; arena 218 arch/alpha/kernel/pci_iommu.c spin_unlock_irqrestore(&arena->lock, flags); arena 224 arch/alpha/kernel/pci_iommu.c iommu_arena_free(struct pci_iommu_arena *arena, long ofs, long n) arena 229 arch/alpha/kernel/pci_iommu.c p = arena->ptes + ofs; arena 269 arch/alpha/kernel/pci_iommu.c struct pci_iommu_arena *arena; arena 309 arch/alpha/kernel/pci_iommu.c arena = hose->sg_pci; arena 310 arch/alpha/kernel/pci_iommu.c if (!arena || arena->dma_base + arena->size - 1 > max_dma) arena 311 arch/alpha/kernel/pci_iommu.c arena = hose->sg_isa; arena 318 arch/alpha/kernel/pci_iommu.c dma_ofs = iommu_arena_alloc(dev, arena, npages, align); arena 327 arch/alpha/kernel/pci_iommu.c arena->ptes[i + dma_ofs] = mk_iommu_pte(paddr); arena 329 arch/alpha/kernel/pci_iommu.c ret = arena->dma_base + dma_ofs * PAGE_SIZE; arena 390 arch/alpha/kernel/pci_iommu.c struct pci_iommu_arena *arena; arena 411 arch/alpha/kernel/pci_iommu.c arena = hose->sg_pci; arena 412 arch/alpha/kernel/pci_iommu.c if (!arena || dma_addr < arena->dma_base) arena 413 arch/alpha/kernel/pci_iommu.c arena = hose->sg_isa; arena 415 arch/alpha/kernel/pci_iommu.c dma_ofs = (dma_addr - arena->dma_base) >> PAGE_SHIFT; arena 416 arch/alpha/kernel/pci_iommu.c if (dma_ofs * PAGE_SIZE >= arena->size) { arena 419 arch/alpha/kernel/pci_iommu.c dma_addr, arena->dma_base, arena->size); arena 426 arch/alpha/kernel/pci_iommu.c spin_lock_irqsave(&arena->lock, flags); arena 428 arch/alpha/kernel/pci_iommu.c iommu_arena_free(arena, dma_ofs, npages); arena 433 arch/alpha/kernel/pci_iommu.c if (dma_ofs >= arena->next_entry) arena 436 arch/alpha/kernel/pci_iommu.c spin_unlock_irqrestore(&arena->lock, flags); arena 568 arch/alpha/kernel/pci_iommu.c struct scatterlist *out, struct pci_iommu_arena *arena, arena 609 arch/alpha/kernel/pci_iommu.c dma_ofs = iommu_arena_alloc(dev, arena, npages, 0); arena 618 arch/alpha/kernel/pci_iommu.c return sg_fill(dev, leader, end, out, arena, max_dma, dac_allowed); arena 621 arch/alpha/kernel/pci_iommu.c out->dma_address = arena->dma_base + dma_ofs*PAGE_SIZE + paddr; arena 629 arch/alpha/kernel/pci_iommu.c ptes = &arena->ptes[dma_ofs]; arena 672 arch/alpha/kernel/pci_iommu.c struct pci_iommu_arena *arena; arena 699 arch/alpha/kernel/pci_iommu.c arena = hose->sg_pci; arena 700 arch/alpha/kernel/pci_iommu.c if (!arena || arena->dma_base + arena->size - 1 > max_dma) arena 701 arch/alpha/kernel/pci_iommu.c arena = hose->sg_isa; arena 704 arch/alpha/kernel/pci_iommu.c arena = NULL; arena 713 arch/alpha/kernel/pci_iommu.c if (sg_fill(dev, sg, end, out, arena, max_dma, dac_allowed) < 0) arena 750 arch/alpha/kernel/pci_iommu.c struct pci_iommu_arena *arena; arena 762 arch/alpha/kernel/pci_iommu.c arena = hose->sg_pci; arena 763 arch/alpha/kernel/pci_iommu.c if (!arena || arena->dma_base + arena->size - 1 > max_dma) arena 764 arch/alpha/kernel/pci_iommu.c arena = hose->sg_isa; arena 768 arch/alpha/kernel/pci_iommu.c spin_lock_irqsave(&arena->lock, flags); arena 800 arch/alpha/kernel/pci_iommu.c ofs = (addr - arena->dma_base) >> PAGE_SHIFT; arena 801 arch/alpha/kernel/pci_iommu.c iommu_arena_free(arena, ofs, npages); arena 811 arch/alpha/kernel/pci_iommu.c if ((fend - arena->dma_base) >> PAGE_SHIFT >= arena->next_entry) arena 814 arch/alpha/kernel/pci_iommu.c spin_unlock_irqrestore(&arena->lock, flags); arena 826 arch/alpha/kernel/pci_iommu.c struct pci_iommu_arena *arena; arena 838 arch/alpha/kernel/pci_iommu.c arena = hose->sg_isa; arena 839 arch/alpha/kernel/pci_iommu.c if (arena && arena->dma_base + arena->size - 1 <= mask) arena 841 arch/alpha/kernel/pci_iommu.c arena = hose->sg_pci; arena 842 arch/alpha/kernel/pci_iommu.c if (arena && arena->dma_base + arena->size - 1 <= mask) arena 857 arch/alpha/kernel/pci_iommu.c iommu_reserve(struct pci_iommu_arena *arena, long pg_count, long align_mask) arena 863 arch/alpha/kernel/pci_iommu.c if (!arena) return -EINVAL; arena 865 arch/alpha/kernel/pci_iommu.c spin_lock_irqsave(&arena->lock, flags); arena 868 arch/alpha/kernel/pci_iommu.c ptes = arena->ptes; arena 869 arch/alpha/kernel/pci_iommu.c p = iommu_arena_find_pages(NULL, arena, pg_count, align_mask); arena 871 arch/alpha/kernel/pci_iommu.c spin_unlock_irqrestore(&arena->lock, flags); arena 881 arch/alpha/kernel/pci_iommu.c arena->next_entry = p + pg_count; arena 882 arch/alpha/kernel/pci_iommu.c spin_unlock_irqrestore(&arena->lock, flags); arena 888 arch/alpha/kernel/pci_iommu.c iommu_release(struct pci_iommu_arena *arena, long pg_start, long pg_count) arena 893 arch/alpha/kernel/pci_iommu.c if (!arena) return -EINVAL; arena 895 arch/alpha/kernel/pci_iommu.c ptes = arena->ptes; arena 902 arch/alpha/kernel/pci_iommu.c iommu_arena_free(arena, pg_start, pg_count); arena 907 arch/alpha/kernel/pci_iommu.c iommu_bind(struct pci_iommu_arena *arena, long pg_start, long pg_count, arena 914 arch/alpha/kernel/pci_iommu.c if (!arena) return -EINVAL; arena 916 arch/alpha/kernel/pci_iommu.c spin_lock_irqsave(&arena->lock, flags); arena 918 arch/alpha/kernel/pci_iommu.c ptes = arena->ptes; arena 922 arch/alpha/kernel/pci_iommu.c spin_unlock_irqrestore(&arena->lock, flags); arena 930 arch/alpha/kernel/pci_iommu.c spin_unlock_irqrestore(&arena->lock, flags); arena 936 arch/alpha/kernel/pci_iommu.c iommu_unbind(struct pci_iommu_arena *arena, long pg_start, long pg_count) arena 941 arch/alpha/kernel/pci_iommu.c if (!arena) return -EINVAL; arena 943 arch/alpha/kernel/pci_iommu.c p = arena->ptes + pg_start; arena 27 drivers/nvdimm/btt.c static struct device *to_dev(struct arena_info *arena) arena 29 drivers/nvdimm/btt.c return &arena->nd_btt->dev; arena 37 drivers/nvdimm/btt.c static int arena_read_bytes(struct arena_info *arena, resource_size_t offset, arena 40 drivers/nvdimm/btt.c struct nd_btt *nd_btt = arena->nd_btt; arena 48 drivers/nvdimm/btt.c static int arena_write_bytes(struct arena_info *arena, resource_size_t offset, arena 51 drivers/nvdimm/btt.c struct nd_btt *nd_btt = arena->nd_btt; arena 59 drivers/nvdimm/btt.c static int btt_info_write(struct arena_info *arena, struct btt_sb *super) arena 68 drivers/nvdimm/btt.c dev_WARN_ONCE(to_dev(arena), !IS_ALIGNED(arena->infooff, 512), arena 69 drivers/nvdimm/btt.c "arena->infooff: %#llx is unaligned\n", arena->infooff); arena 70 drivers/nvdimm/btt.c dev_WARN_ONCE(to_dev(arena), !IS_ALIGNED(arena->info2off, 512), arena 71 drivers/nvdimm/btt.c "arena->info2off: %#llx is unaligned\n", arena->info2off); arena 73 drivers/nvdimm/btt.c ret = arena_write_bytes(arena, arena->info2off, super, arena 78 drivers/nvdimm/btt.c return arena_write_bytes(arena, arena->infooff, super, arena 82 drivers/nvdimm/btt.c static int btt_info_read(struct arena_info *arena, struct btt_sb *super) arena 84 drivers/nvdimm/btt.c return arena_read_bytes(arena, arena->infooff, super, arena 94 drivers/nvdimm/btt.c static int __btt_map_write(struct arena_info *arena, u32 lba, __le32 mapping, arena 97 drivers/nvdimm/btt.c u64 ns_off = arena->mapoff + (lba * MAP_ENT_SIZE); arena 99 drivers/nvdimm/btt.c if (unlikely(lba >= arena->external_nlba)) arena 100 drivers/nvdimm/btt.c dev_err_ratelimited(to_dev(arena), arena 102 drivers/nvdimm/btt.c __func__, lba, arena->external_nlba); arena 103 drivers/nvdimm/btt.c return arena_write_bytes(arena, ns_off, &mapping, MAP_ENT_SIZE, flags); arena 106 drivers/nvdimm/btt.c static int btt_map_write(struct arena_info *arena, u32 lba, u32 mapping, arena 141 drivers/nvdimm/btt.c dev_err_ratelimited(to_dev(arena), arena 147 drivers/nvdimm/btt.c return __btt_map_write(arena, lba, mapping_le, rwb_flags); arena 150 drivers/nvdimm/btt.c static int btt_map_read(struct arena_info *arena, u32 lba, u32 *mapping, arena 156 drivers/nvdimm/btt.c u64 ns_off = arena->mapoff + (lba * MAP_ENT_SIZE); arena 158 drivers/nvdimm/btt.c if (unlikely(lba >= arena->external_nlba)) arena 159 drivers/nvdimm/btt.c dev_err_ratelimited(to_dev(arena), arena 161 drivers/nvdimm/btt.c __func__, lba, arena->external_nlba); arena 163 drivers/nvdimm/btt.c ret = arena_read_bytes(arena, ns_off, &in, MAP_ENT_SIZE, rwb_flags); arena 206 drivers/nvdimm/btt.c static int btt_log_group_read(struct arena_info *arena, u32 lane, arena 209 drivers/nvdimm/btt.c return arena_read_bytes(arena, arena 210 drivers/nvdimm/btt.c arena->logoff + (lane * LOG_GRP_SIZE), log, arena 258 drivers/nvdimm/btt.c struct arena_info *arena; arena 265 drivers/nvdimm/btt.c list_for_each_entry(arena, &btt->arena_list, list) { arena 266 drivers/nvdimm/btt.c arena_debugfs_init(arena, btt->debugfs_dir, i); arena 327 drivers/nvdimm/btt.c static int btt_log_read(struct arena_info *arena, u32 lane, arena 334 drivers/nvdimm/btt.c ret = btt_log_group_read(arena, lane, &log); arena 338 drivers/nvdimm/btt.c old_ent = btt_log_get_old(arena, &log); arena 340 drivers/nvdimm/btt.c dev_err(to_dev(arena), arena 342 drivers/nvdimm/btt.c old_ent, lane, log.ent[arena->log_index[0]].seq, arena 343 drivers/nvdimm/btt.c log.ent[arena->log_index[1]].seq); arena 351 drivers/nvdimm/btt.c memcpy(ent, &log.ent[arena->log_index[ret_ent]], LOG_ENT_SIZE); arena 361 drivers/nvdimm/btt.c static int __btt_log_write(struct arena_info *arena, u32 lane, arena 365 drivers/nvdimm/btt.c u32 group_slot = arena->log_index[sub]; arena 370 drivers/nvdimm/btt.c ns_off = arena->logoff + (lane * LOG_GRP_SIZE) + arena 373 drivers/nvdimm/btt.c ret = arena_write_bytes(arena, ns_off, src, log_half, flags); arena 379 drivers/nvdimm/btt.c return arena_write_bytes(arena, ns_off, src, log_half, flags); arena 382 drivers/nvdimm/btt.c static int btt_flog_write(struct arena_info *arena, u32 lane, u32 sub, arena 387 drivers/nvdimm/btt.c ret = __btt_log_write(arena, lane, sub, ent, NVDIMM_IO_ATOMIC); arena 392 drivers/nvdimm/btt.c arena->freelist[lane].sub = 1 - arena->freelist[lane].sub; arena 393 drivers/nvdimm/btt.c if (++(arena->freelist[lane].seq) == 4) arena 394 drivers/nvdimm/btt.c arena->freelist[lane].seq = 1; arena 396 drivers/nvdimm/btt.c arena->freelist[lane].has_err = 1; arena 397 drivers/nvdimm/btt.c arena->freelist[lane].block = ent_lba(le32_to_cpu(ent->old_map)); arena 406 drivers/nvdimm/btt.c static int btt_map_init(struct arena_info *arena) arena 412 drivers/nvdimm/btt.c size_t mapsize = arena->logoff - arena->mapoff; arena 423 drivers/nvdimm/btt.c dev_WARN_ONCE(to_dev(arena), !IS_ALIGNED(arena->mapoff, 512), arena 424 drivers/nvdimm/btt.c "arena->mapoff: %#llx is unaligned\n", arena->mapoff); arena 429 drivers/nvdimm/btt.c dev_WARN_ONCE(to_dev(arena), size < 512, arena 431 drivers/nvdimm/btt.c ret = arena_write_bytes(arena, arena->mapoff + offset, zerobuf, arena 450 drivers/nvdimm/btt.c static int btt_log_init(struct arena_info *arena) arena 452 drivers/nvdimm/btt.c size_t logsize = arena->info2off - arena->logoff; arena 467 drivers/nvdimm/btt.c dev_WARN_ONCE(to_dev(arena), !IS_ALIGNED(arena->logoff, 512), arena 468 drivers/nvdimm/btt.c "arena->logoff: %#llx is unaligned\n", arena->logoff); arena 473 drivers/nvdimm/btt.c dev_WARN_ONCE(to_dev(arena), size < 512, arena 475 drivers/nvdimm/btt.c ret = arena_write_bytes(arena, arena->logoff + offset, zerobuf, arena 485 drivers/nvdimm/btt.c for (i = 0; i < arena->nfree; i++) { arena 487 drivers/nvdimm/btt.c ent.old_map = cpu_to_le32(arena->external_nlba + i); arena 488 drivers/nvdimm/btt.c ent.new_map = cpu_to_le32(arena->external_nlba + i); arena 490 drivers/nvdimm/btt.c ret = __btt_log_write(arena, i, 0, &ent, 0); arena 500 drivers/nvdimm/btt.c static u64 to_namespace_offset(struct arena_info *arena, u64 lba) arena 502 drivers/nvdimm/btt.c return arena->dataoff + ((u64)lba * arena->internal_lbasize); arena 505 drivers/nvdimm/btt.c static int arena_clear_freelist_error(struct arena_info *arena, u32 lane) arena 509 drivers/nvdimm/btt.c if (arena->freelist[lane].has_err) { arena 511 drivers/nvdimm/btt.c u32 lba = arena->freelist[lane].block; arena 512 drivers/nvdimm/btt.c u64 nsoff = to_namespace_offset(arena, lba); arena 513 drivers/nvdimm/btt.c unsigned long len = arena->sector_size; arena 515 drivers/nvdimm/btt.c mutex_lock(&arena->err_lock); arena 520 drivers/nvdimm/btt.c ret = arena_write_bytes(arena, nsoff, zero_page, arena 527 drivers/nvdimm/btt.c arena->freelist[lane].has_err = 0; arena 529 drivers/nvdimm/btt.c mutex_unlock(&arena->err_lock); arena 534 drivers/nvdimm/btt.c static int btt_freelist_init(struct arena_info *arena) arena 540 drivers/nvdimm/btt.c arena->freelist = kcalloc(arena->nfree, sizeof(struct free_entry), arena 542 drivers/nvdimm/btt.c if (!arena->freelist) arena 545 drivers/nvdimm/btt.c for (i = 0; i < arena->nfree; i++) { arena 546 drivers/nvdimm/btt.c new = btt_log_read(arena, i, &log_new, LOG_NEW_ENT); arena 555 drivers/nvdimm/btt.c arena->freelist[i].sub = 1 - new; arena 556 drivers/nvdimm/btt.c arena->freelist[i].seq = nd_inc_seq(le32_to_cpu(log_new.seq)); arena 557 drivers/nvdimm/btt.c arena->freelist[i].block = log_oldmap; arena 565 drivers/nvdimm/btt.c arena->freelist[i].has_err = 1; arena 566 drivers/nvdimm/btt.c ret = arena_clear_freelist_error(arena, i); arena 568 drivers/nvdimm/btt.c dev_err_ratelimited(to_dev(arena), arena 577 drivers/nvdimm/btt.c ret = btt_map_read(arena, le32_to_cpu(log_new.lba), &map_entry, arena 594 drivers/nvdimm/btt.c ret = btt_map_write(arena, le32_to_cpu(log_new.lba), arena 623 drivers/nvdimm/btt.c static int log_set_indices(struct arena_info *arena) arena 631 drivers/nvdimm/btt.c for (i = 0; i < arena->nfree; i++) { arena 632 drivers/nvdimm/btt.c ret = btt_log_group_read(arena, i, &log); arena 710 drivers/nvdimm/btt.c dev_err(to_dev(arena), "Found an unknown padding scheme\n"); arena 714 drivers/nvdimm/btt.c arena->log_index[0] = log_index[0]; arena 715 drivers/nvdimm/btt.c arena->log_index[1] = log_index[1]; arena 716 drivers/nvdimm/btt.c dev_dbg(to_dev(arena), "log_index_0 = %d\n", log_index[0]); arena 717 drivers/nvdimm/btt.c dev_dbg(to_dev(arena), "log_index_1 = %d\n", log_index[1]); arena 721 drivers/nvdimm/btt.c static int btt_rtt_init(struct arena_info *arena) arena 723 drivers/nvdimm/btt.c arena->rtt = kcalloc(arena->nfree, sizeof(u32), GFP_KERNEL); arena 724 drivers/nvdimm/btt.c if (arena->rtt == NULL) arena 730 drivers/nvdimm/btt.c static int btt_maplocks_init(struct arena_info *arena) arena 734 drivers/nvdimm/btt.c arena->map_locks = kcalloc(arena->nfree, sizeof(struct aligned_lock), arena 736 drivers/nvdimm/btt.c if (!arena->map_locks) arena 739 drivers/nvdimm/btt.c for (i = 0; i < arena->nfree; i++) arena 740 drivers/nvdimm/btt.c spin_lock_init(&arena->map_locks[i].lock); arena 748 drivers/nvdimm/btt.c struct arena_info *arena; arena 752 drivers/nvdimm/btt.c arena = kzalloc(sizeof(struct arena_info), GFP_KERNEL); arena 753 drivers/nvdimm/btt.c if (!arena) arena 755 drivers/nvdimm/btt.c arena->nd_btt = btt->nd_btt; arena 756 drivers/nvdimm/btt.c arena->sector_size = btt->sector_size; arena 757 drivers/nvdimm/btt.c mutex_init(&arena->err_lock); arena 760 drivers/nvdimm/btt.c return arena; arena 762 drivers/nvdimm/btt.c arena->size = size; arena 763 drivers/nvdimm/btt.c arena->external_lba_start = start; arena 764 drivers/nvdimm/btt.c arena->external_lbasize = btt->lbasize; arena 765 drivers/nvdimm/btt.c arena->internal_lbasize = roundup(arena->external_lbasize, arena 767 drivers/nvdimm/btt.c arena->nfree = BTT_DEFAULT_NFREE; arena 768 drivers/nvdimm/btt.c arena->version_major = btt->nd_btt->version_major; arena 769 drivers/nvdimm/btt.c arena->version_minor = btt->nd_btt->version_minor; arena 778 drivers/nvdimm/btt.c logsize = roundup(arena->nfree * LOG_GRP_SIZE, BTT_PG_SIZE); arena 782 drivers/nvdimm/btt.c arena->internal_nlba = div_u64(available - BTT_PG_SIZE, arena 783 drivers/nvdimm/btt.c arena->internal_lbasize + MAP_ENT_SIZE); arena 784 drivers/nvdimm/btt.c arena->external_nlba = arena->internal_nlba - arena->nfree; arena 786 drivers/nvdimm/btt.c mapsize = roundup((arena->external_nlba * MAP_ENT_SIZE), BTT_PG_SIZE); arena 790 drivers/nvdimm/btt.c arena->infooff = arena_off; arena 791 drivers/nvdimm/btt.c arena->dataoff = arena->infooff + BTT_PG_SIZE; arena 792 drivers/nvdimm/btt.c arena->mapoff = arena->dataoff + datasize; arena 793 drivers/nvdimm/btt.c arena->logoff = arena->mapoff + mapsize; arena 794 drivers/nvdimm/btt.c arena->info2off = arena->logoff + logsize; arena 797 drivers/nvdimm/btt.c arena->log_index[0] = 0; arena 798 drivers/nvdimm/btt.c arena->log_index[1] = 1; arena 799 drivers/nvdimm/btt.c return arena; arena 804 drivers/nvdimm/btt.c struct arena_info *arena, *next; arena 806 drivers/nvdimm/btt.c list_for_each_entry_safe(arena, next, &btt->arena_list, list) { arena 807 drivers/nvdimm/btt.c list_del(&arena->list); arena 808 drivers/nvdimm/btt.c kfree(arena->rtt); arena 809 drivers/nvdimm/btt.c kfree(arena->map_locks); arena 810 drivers/nvdimm/btt.c kfree(arena->freelist); arena 811 drivers/nvdimm/btt.c debugfs_remove_recursive(arena->debugfs_dir); arena 812 drivers/nvdimm/btt.c kfree(arena); arena 820 drivers/nvdimm/btt.c static void parse_arena_meta(struct arena_info *arena, struct btt_sb *super, arena 823 drivers/nvdimm/btt.c arena->internal_nlba = le32_to_cpu(super->internal_nlba); arena 824 drivers/nvdimm/btt.c arena->internal_lbasize = le32_to_cpu(super->internal_lbasize); arena 825 drivers/nvdimm/btt.c arena->external_nlba = le32_to_cpu(super->external_nlba); arena 826 drivers/nvdimm/btt.c arena->external_lbasize = le32_to_cpu(super->external_lbasize); arena 827 drivers/nvdimm/btt.c arena->nfree = le32_to_cpu(super->nfree); arena 828 drivers/nvdimm/btt.c arena->version_major = le16_to_cpu(super->version_major); arena 829 drivers/nvdimm/btt.c arena->version_minor = le16_to_cpu(super->version_minor); arena 831 drivers/nvdimm/btt.c arena->nextoff = (super->nextoff == 0) ? 0 : (arena_off + arena 833 drivers/nvdimm/btt.c arena->infooff = arena_off; arena 834 drivers/nvdimm/btt.c arena->dataoff = arena_off + le64_to_cpu(super->dataoff); arena 835 drivers/nvdimm/btt.c arena->mapoff = arena_off + le64_to_cpu(super->mapoff); arena 836 drivers/nvdimm/btt.c arena->logoff = arena_off + le64_to_cpu(super->logoff); arena 837 drivers/nvdimm/btt.c arena->info2off = arena_off + le64_to_cpu(super->info2off); arena 839 drivers/nvdimm/btt.c arena->size = (le64_to_cpu(super->nextoff) > 0) arena 841 drivers/nvdimm/btt.c : (arena->info2off - arena->infooff + BTT_PG_SIZE); arena 843 drivers/nvdimm/btt.c arena->flags = le32_to_cpu(super->flags); arena 849 drivers/nvdimm/btt.c struct arena_info *arena; arena 862 drivers/nvdimm/btt.c arena = alloc_arena(btt, 0, 0, 0); arena 863 drivers/nvdimm/btt.c if (!arena) { arena 868 drivers/nvdimm/btt.c arena->infooff = cur_off; arena 869 drivers/nvdimm/btt.c ret = btt_info_read(arena, super); arena 876 drivers/nvdimm/btt.c dev_info(to_dev(arena), "No existing arenas\n"); arena 879 drivers/nvdimm/btt.c dev_err(to_dev(arena), arena 886 drivers/nvdimm/btt.c arena->external_lba_start = cur_nlba; arena 887 drivers/nvdimm/btt.c parse_arena_meta(arena, super, cur_off); arena 889 drivers/nvdimm/btt.c ret = log_set_indices(arena); arena 891 drivers/nvdimm/btt.c dev_err(to_dev(arena), arena 896 drivers/nvdimm/btt.c ret = btt_freelist_init(arena); arena 900 drivers/nvdimm/btt.c ret = btt_rtt_init(arena); arena 904 drivers/nvdimm/btt.c ret = btt_maplocks_init(arena); arena 908 drivers/nvdimm/btt.c list_add_tail(&arena->list, &btt->arena_list); arena 910 drivers/nvdimm/btt.c remaining -= arena->size; arena 911 drivers/nvdimm/btt.c cur_off += arena->size; arena 912 drivers/nvdimm/btt.c cur_nlba += arena->external_nlba; arena 915 drivers/nvdimm/btt.c if (arena->nextoff == 0) arena 926 drivers/nvdimm/btt.c kfree(arena); arena 939 drivers/nvdimm/btt.c struct arena_info *arena; arena 946 drivers/nvdimm/btt.c arena = alloc_arena(btt, arena_size, btt->nlba, cur_off); arena 947 drivers/nvdimm/btt.c if (!arena) { arena 951 drivers/nvdimm/btt.c btt->nlba += arena->external_nlba; arena 953 drivers/nvdimm/btt.c arena->nextoff = arena->size; arena 955 drivers/nvdimm/btt.c arena->nextoff = 0; arena 957 drivers/nvdimm/btt.c list_add_tail(&arena->list, &btt->arena_list); arena 969 drivers/nvdimm/btt.c static int btt_arena_write_layout(struct arena_info *arena) arena 974 drivers/nvdimm/btt.c struct nd_btt *nd_btt = arena->nd_btt; arena 977 drivers/nvdimm/btt.c ret = btt_map_init(arena); arena 981 drivers/nvdimm/btt.c ret = btt_log_init(arena); arena 992 drivers/nvdimm/btt.c super->flags = cpu_to_le32(arena->flags); arena 993 drivers/nvdimm/btt.c super->version_major = cpu_to_le16(arena->version_major); arena 994 drivers/nvdimm/btt.c super->version_minor = cpu_to_le16(arena->version_minor); arena 995 drivers/nvdimm/btt.c super->external_lbasize = cpu_to_le32(arena->external_lbasize); arena 996 drivers/nvdimm/btt.c super->external_nlba = cpu_to_le32(arena->external_nlba); arena 997 drivers/nvdimm/btt.c super->internal_lbasize = cpu_to_le32(arena->internal_lbasize); arena 998 drivers/nvdimm/btt.c super->internal_nlba = cpu_to_le32(arena->internal_nlba); arena 999 drivers/nvdimm/btt.c super->nfree = cpu_to_le32(arena->nfree); arena 1001 drivers/nvdimm/btt.c super->nextoff = cpu_to_le64(arena->nextoff); arena 1006 drivers/nvdimm/btt.c super->dataoff = cpu_to_le64(arena->dataoff - arena->infooff); arena 1007 drivers/nvdimm/btt.c super->mapoff = cpu_to_le64(arena->mapoff - arena->infooff); arena 1008 drivers/nvdimm/btt.c super->logoff = cpu_to_le64(arena->logoff - arena->infooff); arena 1009 drivers/nvdimm/btt.c super->info2off = cpu_to_le64(arena->info2off - arena->infooff); arena 1015 drivers/nvdimm/btt.c ret = btt_info_write(arena, super); arena 1028 drivers/nvdimm/btt.c struct arena_info *arena; arena 1031 drivers/nvdimm/btt.c list_for_each_entry(arena, &btt->arena_list, list) { arena 1032 drivers/nvdimm/btt.c ret = btt_arena_write_layout(arena); arena 1036 drivers/nvdimm/btt.c ret = btt_freelist_init(arena); arena 1040 drivers/nvdimm/btt.c ret = btt_rtt_init(arena); arena 1044 drivers/nvdimm/btt.c ret = btt_maplocks_init(arena); arena 1069 drivers/nvdimm/btt.c struct arena_info **arena) arena 1076 drivers/nvdimm/btt.c *arena = arena_list; arena 1090 drivers/nvdimm/btt.c static void lock_map(struct arena_info *arena, u32 premap) arena 1091 drivers/nvdimm/btt.c __acquires(&arena->map_locks[idx].lock) arena 1093 drivers/nvdimm/btt.c u32 idx = (premap * MAP_ENT_SIZE / L1_CACHE_BYTES) % arena->nfree; arena 1095 drivers/nvdimm/btt.c spin_lock(&arena->map_locks[idx].lock); arena 1098 drivers/nvdimm/btt.c static void unlock_map(struct arena_info *arena, u32 premap) arena 1099 drivers/nvdimm/btt.c __releases(&arena->map_locks[idx].lock) arena 1101 drivers/nvdimm/btt.c u32 idx = (premap * MAP_ENT_SIZE / L1_CACHE_BYTES) % arena->nfree; arena 1103 drivers/nvdimm/btt.c spin_unlock(&arena->map_locks[idx].lock); arena 1106 drivers/nvdimm/btt.c static int btt_data_read(struct arena_info *arena, struct page *page, arena 1110 drivers/nvdimm/btt.c u64 nsoff = to_namespace_offset(arena, lba); arena 1113 drivers/nvdimm/btt.c ret = arena_read_bytes(arena, nsoff, mem + off, len, NVDIMM_IO_ATOMIC); arena 1119 drivers/nvdimm/btt.c static int btt_data_write(struct arena_info *arena, u32 lba, arena 1123 drivers/nvdimm/btt.c u64 nsoff = to_namespace_offset(arena, lba); arena 1126 drivers/nvdimm/btt.c ret = arena_write_bytes(arena, nsoff, mem + off, len, NVDIMM_IO_ATOMIC); arena 1142 drivers/nvdimm/btt.c struct arena_info *arena, u32 postmap, int rw) arena 1151 drivers/nvdimm/btt.c meta_nsoff = to_namespace_offset(arena, postmap) + btt->sector_size; arena 1168 drivers/nvdimm/btt.c ret = arena_write_bytes(arena, meta_nsoff, arena 1172 drivers/nvdimm/btt.c ret = arena_read_bytes(arena, meta_nsoff, arena 1191 drivers/nvdimm/btt.c struct arena_info *arena, u32 postmap, int rw) arena 1203 drivers/nvdimm/btt.c struct arena_info *arena = NULL; arena 1211 drivers/nvdimm/btt.c ret = lba_to_arena(btt, sector, &premap, &arena); arena 1217 drivers/nvdimm/btt.c ret = btt_map_read(arena, premap, &postmap, &t_flag, &e_flag, arena 1241 drivers/nvdimm/btt.c arena->rtt[lane] = RTT_VALID | postmap; arena 1248 drivers/nvdimm/btt.c ret = btt_map_read(arena, premap, &new_map, &new_t, arena 1262 drivers/nvdimm/btt.c ret = btt_data_read(arena, page, off, postmap, cur_len); arena 1265 drivers/nvdimm/btt.c if (btt_map_write(arena, premap, postmap, 0, 1, NVDIMM_IO_ATOMIC)) arena 1266 drivers/nvdimm/btt.c dev_warn_ratelimited(to_dev(arena), arena 1273 drivers/nvdimm/btt.c ret = btt_rw_integrity(btt, bip, arena, postmap, READ); arena 1278 drivers/nvdimm/btt.c arena->rtt[lane] = RTT_INVALID; arena 1289 drivers/nvdimm/btt.c arena->rtt[lane] = RTT_INVALID; arena 1300 drivers/nvdimm/btt.c static bool btt_is_badblock(struct btt *btt, struct arena_info *arena, arena 1303 drivers/nvdimm/btt.c u64 nsoff = adjust_initial_offset(arena->nd_btt, arena 1304 drivers/nvdimm/btt.c to_namespace_offset(arena, postmap)); arena 1307 drivers/nvdimm/btt.c return is_bad_pmem(btt->phys_bb, phys_sector, arena->internal_lbasize); arena 1315 drivers/nvdimm/btt.c struct arena_info *arena = NULL; arena 1327 drivers/nvdimm/btt.c ret = lba_to_arena(btt, sector, &premap, &arena); arena 1332 drivers/nvdimm/btt.c if ((arena->flags & IB_FLAG_ERROR_MASK) != 0) { arena 1337 drivers/nvdimm/btt.c if (btt_is_badblock(btt, arena, arena->freelist[lane].block)) arena 1338 drivers/nvdimm/btt.c arena->freelist[lane].has_err = 1; arena 1340 drivers/nvdimm/btt.c if (mutex_is_locked(&arena->err_lock) arena 1341 drivers/nvdimm/btt.c || arena->freelist[lane].has_err) { arena 1344 drivers/nvdimm/btt.c ret = arena_clear_freelist_error(arena, lane); arena 1352 drivers/nvdimm/btt.c new_postmap = arena->freelist[lane].block; arena 1355 drivers/nvdimm/btt.c for (i = 0; i < arena->nfree; i++) arena 1356 drivers/nvdimm/btt.c while (arena->rtt[i] == (RTT_VALID | new_postmap)) arena 1360 drivers/nvdimm/btt.c if (new_postmap >= arena->internal_nlba) { arena 1365 drivers/nvdimm/btt.c ret = btt_data_write(arena, new_postmap, page, off, cur_len); arena 1370 drivers/nvdimm/btt.c ret = btt_rw_integrity(btt, bip, arena, new_postmap, arena 1376 drivers/nvdimm/btt.c lock_map(arena, premap); arena 1377 drivers/nvdimm/btt.c ret = btt_map_read(arena, premap, &old_postmap, NULL, &e_flag, arena 1381 drivers/nvdimm/btt.c if (old_postmap >= arena->internal_nlba) { arena 1391 drivers/nvdimm/btt.c log.seq = cpu_to_le32(arena->freelist[lane].seq); arena 1392 drivers/nvdimm/btt.c sub = arena->freelist[lane].sub; arena 1393 drivers/nvdimm/btt.c ret = btt_flog_write(arena, lane, sub, &log); arena 1397 drivers/nvdimm/btt.c ret = btt_map_write(arena, premap, new_postmap, 0, 0, arena 1402 drivers/nvdimm/btt.c unlock_map(arena, premap); arena 1406 drivers/nvdimm/btt.c ret = arena_clear_freelist_error(arena, lane); arena 1419 drivers/nvdimm/btt.c unlock_map(arena, premap);