Lines Matching refs:offset

48 					       u64 offset)  in __lookup_free_space_inode()  argument
59 key.offset = offset; in __lookup_free_space_inode()
134 u64 ino, u64 offset) in __create_free_space_inode() argument
167 btrfs_set_inode_block_group(leaf, inode_item, offset); in __create_free_space_inode()
172 key.offset = offset; in __create_free_space_inode()
469 unsigned offset = 0; in io_ctl_set_crc() local
477 offset = sizeof(u32) * io_ctl->num_pages; in io_ctl_set_crc()
479 crc = btrfs_csum_data(io_ctl->orig + offset, crc, in io_ctl_set_crc()
480 PAGE_CACHE_SIZE - offset); in io_ctl_set_crc()
492 unsigned offset = 0; in io_ctl_check_crc() local
500 offset = sizeof(u32) * io_ctl->num_pages; in io_ctl_check_crc()
507 crc = btrfs_csum_data(io_ctl->orig + offset, crc, in io_ctl_check_crc()
508 PAGE_CACHE_SIZE - offset); in io_ctl_check_crc()
520 static int io_ctl_add_entry(struct btrfs_io_ctl *io_ctl, u64 offset, u64 bytes, in io_ctl_add_entry() argument
529 entry->offset = cpu_to_le64(offset); in io_ctl_add_entry()
603 entry->offset = le64_to_cpu(e->offset); in io_ctl_read_entry()
654 if (prev->offset + prev->bytes == e->offset) { in merge_space_tree()
672 struct btrfs_path *path, u64 offset) in __load_free_space_cache() argument
691 key.offset = offset; in __load_free_space_cache()
715 offset); in __load_free_space_cache()
878 matched = (ctl->free_space == (block_group->key.offset - used - in load_free_space_cache()
938 ret = io_ctl_add_entry(io_ctl, e->offset, e->bytes, in write_cache_extent_entries()
985 struct btrfs_path *path, u64 offset, in update_cache_item() argument
994 key.offset = offset; in update_cache_item()
1011 found_key.offset != offset) { in update_cache_item()
1060 while (start < block_group->key.objectid + block_group->key.offset) { in write_pinned_extent_entries()
1069 block_group->key.offset) in write_pinned_extent_entries()
1074 block_group->key.offset, extent_end + 1); in write_pinned_extent_entries()
1149 struct btrfs_path *path, u64 offset) in btrfs_wait_cache_io() argument
1166 ret = update_cache_item(trans, root, inode, path, offset, in btrfs_wait_cache_io()
1228 struct btrfs_path *path, u64 offset) in __btrfs_write_out_cache() argument
1410 u64 offset) in offset_to_bit() argument
1412 ASSERT(offset >= bitmap_start); in offset_to_bit()
1413 offset -= bitmap_start; in offset_to_bit()
1414 return (unsigned long)(div_u64(offset, unit)); in offset_to_bit()
1423 u64 offset) in offset_to_bitmap()
1429 bitmap_start = offset - ctl->start; in offset_to_bitmap()
1437 static int tree_insert_offset(struct rb_root *root, u64 offset, in tree_insert_offset() argument
1448 if (offset < info->offset) { in tree_insert_offset()
1450 } else if (offset > info->offset) { in tree_insert_offset()
1497 u64 offset, int bitmap_only, int fuzzy) in tree_search_offset() argument
1512 if (offset < entry->offset) in tree_search_offset()
1514 else if (offset > entry->offset) in tree_search_offset()
1534 if (entry->offset != offset) in tree_search_offset()
1550 prev->offset + prev->bytes > offset) in tree_search_offset()
1562 if (entry->offset > offset) { in tree_search_offset()
1567 ASSERT(entry->offset <= offset); in tree_search_offset()
1582 prev->offset + prev->bytes > offset) in tree_search_offset()
1585 if (entry->offset + BITS_PER_BITMAP * ctl->unit > offset) in tree_search_offset()
1587 } else if (entry->offset + entry->bytes > offset) in tree_search_offset()
1595 if (entry->offset + BITS_PER_BITMAP * in tree_search_offset()
1596 ctl->unit > offset) in tree_search_offset()
1599 if (entry->offset + entry->bytes > offset) in tree_search_offset()
1632 ret = tree_insert_offset(&ctl->free_space_offset, info->offset, in link_free_space()
1648 u64 size = block_group->key.offset; in recalculate_thresholds()
1692 u64 offset, u64 bytes) in __bitmap_clear_bits() argument
1696 start = offset_to_bit(info->offset, ctl->unit, offset); in __bitmap_clear_bits()
1706 struct btrfs_free_space *info, u64 offset, in bitmap_clear_bits() argument
1709 __bitmap_clear_bits(ctl, info, offset, bytes); in bitmap_clear_bits()
1714 struct btrfs_free_space *info, u64 offset, in bitmap_set_bits() argument
1719 start = offset_to_bit(info->offset, ctl->unit, offset); in bitmap_set_bits()
1734 struct btrfs_free_space *bitmap_info, u64 *offset, in search_bitmap() argument
1743 i = offset_to_bit(bitmap_info->offset, ctl->unit, in search_bitmap()
1744 max_t(u64, *offset, bitmap_info->offset)); in search_bitmap()
1761 *offset = (u64)(i * ctl->unit) + bitmap_info->offset; in search_bitmap()
1772 find_free_space(struct btrfs_free_space_ctl *ctl, u64 *offset, u64 *bytes, in find_free_space() argument
1784 entry = tree_search_offset(ctl, offset_to_bitmap(ctl, *offset), 0, 1); in find_free_space()
1800 tmp = entry->offset - ctl->start + align - 1; in find_free_space()
1803 align_off = tmp - entry->offset; in find_free_space()
1806 tmp = entry->offset; in find_free_space()
1820 *offset = tmp; in find_free_space()
1829 *offset = tmp; in find_free_space()
1838 struct btrfs_free_space *info, u64 offset) in add_new_bitmap() argument
1840 info->offset = offset_to_bitmap(ctl, offset); in add_new_bitmap()
1861 u64 *offset, u64 *bytes) in remove_from_bitmap() argument
1868 end = bitmap_info->offset + (u64)(BITS_PER_BITMAP * ctl->unit) - 1; in remove_from_bitmap()
1876 search_start = *offset; in remove_from_bitmap()
1880 if (ret < 0 || search_start != *offset) in remove_from_bitmap()
1890 *offset += search_bytes; in remove_from_bitmap()
1921 search_start = *offset; in remove_from_bitmap()
1925 if (ret < 0 || search_start != *offset) in remove_from_bitmap()
1936 struct btrfs_free_space *info, u64 offset, in add_bytes_to_bitmap() argument
1942 end = info->offset + (u64)(BITS_PER_BITMAP * ctl->unit); in add_bytes_to_bitmap()
1944 bytes_to_set = min(end - offset, bytes); in add_bytes_to_bitmap()
1946 bitmap_set_bits(ctl, info, offset, bytes_to_set); in add_bytes_to_bitmap()
1985 if (((BITS_PER_BITMAP * ctl->unit) >> 1) > block_group->key.offset) in use_bitmap()
2002 u64 bytes, offset, bytes_added; in insert_into_bitmap() local
2006 offset = info->offset; in insert_into_bitmap()
2040 if (entry->offset == offset_to_bitmap(ctl, offset)) { in insert_into_bitmap()
2042 offset, bytes); in insert_into_bitmap()
2044 offset += bytes_added; in insert_into_bitmap()
2054 bitmap_info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in insert_into_bitmap()
2061 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes); in insert_into_bitmap()
2063 offset += bytes_added; in insert_into_bitmap()
2074 add_new_bitmap(ctl, info, offset); in insert_into_bitmap()
2118 u64 offset = info->offset; in try_merge_free_space() local
2126 right_info = tree_search_offset(ctl, offset + bytes, 0, 0); in try_merge_free_space()
2131 left_info = tree_search_offset(ctl, offset - 1, 0, 0); in try_merge_free_space()
2144 left_info->offset + left_info->bytes == offset) { in try_merge_free_space()
2149 info->offset = left_info->offset; in try_merge_free_space()
2165 const u64 end = info->offset + info->bytes; in steal_from_bitmap_to_end()
2173 i = offset_to_bit(bitmap->offset, ctl->unit, end); in steal_from_bitmap_to_end()
2202 bitmap_offset = offset_to_bitmap(ctl, info->offset); in steal_from_bitmap_to_front()
2204 if (bitmap_offset == info->offset) { in steal_from_bitmap_to_front()
2205 if (info->offset == 0) in steal_from_bitmap_to_front()
2207 bitmap_offset = offset_to_bitmap(ctl, info->offset - 1); in steal_from_bitmap_to_front()
2214 i = offset_to_bit(bitmap->offset, ctl->unit, info->offset) - 1; in steal_from_bitmap_to_front()
2230 info->offset -= bytes; in steal_from_bitmap_to_front()
2234 bitmap_clear_bits(ctl, bitmap, info->offset, bytes); in steal_from_bitmap_to_front()
2236 __bitmap_clear_bits(ctl, bitmap, info->offset, bytes); in steal_from_bitmap_to_front()
2281 u64 offset, u64 bytes) in __btrfs_add_free_space() argument
2290 info->offset = offset; in __btrfs_add_free_space()
2335 u64 offset, u64 bytes) in btrfs_remove_free_space() argument
2349 info = tree_search_offset(ctl, offset, 0, 0); in btrfs_remove_free_space()
2355 info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in btrfs_remove_free_space()
2371 if (offset == info->offset) { in btrfs_remove_free_space()
2375 info->offset += to_free; in btrfs_remove_free_space()
2383 offset += to_free; in btrfs_remove_free_space()
2387 u64 old_end = info->bytes + info->offset; in btrfs_remove_free_space()
2389 info->bytes = offset - info->offset; in btrfs_remove_free_space()
2396 if (old_end < offset + bytes) { in btrfs_remove_free_space()
2397 bytes -= old_end - offset; in btrfs_remove_free_space()
2398 offset = old_end; in btrfs_remove_free_space()
2400 } else if (old_end == offset + bytes) { in btrfs_remove_free_space()
2406 ret = btrfs_add_free_space(block_group, offset + bytes, in btrfs_remove_free_space()
2407 old_end - (offset + bytes)); in btrfs_remove_free_space()
2413 ret = remove_from_bitmap(ctl, info, &offset, &bytes); in btrfs_remove_free_space()
2438 info->offset, info->bytes, in btrfs_dump_free_space()
2506 entry->offset, &entry->offset_index, bitmap); in __btrfs_return_cluster_to_free_space()
2565 u64 offset, u64 bytes, u64 empty_size, in btrfs_find_space_for_alloc() argument
2576 entry = find_free_space(ctl, &offset, &bytes_search, in btrfs_find_space_for_alloc()
2581 ret = offset; in btrfs_find_space_for_alloc()
2583 bitmap_clear_bits(ctl, entry, offset, bytes); in btrfs_find_space_for_alloc()
2588 align_gap_len = offset - entry->offset; in btrfs_find_space_for_alloc()
2589 align_gap = entry->offset; in btrfs_find_space_for_alloc()
2591 entry->offset = offset + bytes; in btrfs_find_space_for_alloc()
2710 (!entry->bitmap && entry->offset < min_start)) { in btrfs_alloc_from_cluster()
2734 ret = entry->offset; in btrfs_alloc_from_cluster()
2736 entry->offset += bytes; in btrfs_alloc_from_cluster()
2771 u64 offset, u64 bytes, in btrfs_bitmap_cluster() argument
2784 i = offset_to_bit(entry->offset, ctl->unit, in btrfs_bitmap_cluster()
2785 max_t(u64, offset, entry->offset)); in btrfs_bitmap_cluster()
2819 cluster->window_start = start * ctl->unit + entry->offset; in btrfs_bitmap_cluster()
2821 ret = tree_insert_offset(&cluster->root, entry->offset, in btrfs_bitmap_cluster()
2838 struct list_head *bitmaps, u64 offset, u64 bytes, in setup_cluster_no_bitmap() argument
2850 entry = tree_search_offset(ctl, offset, 0, 1); in setup_cluster_no_bitmap()
2894 cluster->window_start = first->offset; in setup_cluster_no_bitmap()
2911 ret = tree_insert_offset(&cluster->root, entry->offset, in setup_cluster_no_bitmap()
2929 struct list_head *bitmaps, u64 offset, u64 bytes, in setup_cluster_bitmap() argument
2935 u64 bitmap_offset = offset_to_bitmap(ctl, offset); in setup_cluster_bitmap()
2945 if (entry->offset != bitmap_offset) { in setup_cluster_bitmap()
2954 ret = btrfs_bitmap_cluster(block_group, entry, cluster, offset, in setup_cluster_bitmap()
2978 u64 offset, u64 bytes, u64 empty_size) in btrfs_find_space_cluster() argument
3022 trace_btrfs_find_cluster(block_group, offset, bytes, empty_size, in btrfs_find_space_cluster()
3025 ret = setup_cluster_no_bitmap(block_group, cluster, &bitmaps, offset, in btrfs_find_space_cluster()
3030 offset, bytes + empty_size, in btrfs_find_space_cluster()
3153 if (entry->offset >= end) { in trim_no_bitmap()
3159 extent_start = entry->offset; in trim_no_bitmap()
3204 u64 offset = offset_to_bitmap(ctl, start); in trim_bitmaps() local
3206 while (offset < end) { in trim_bitmaps()
3219 entry = tree_search_offset(ctl, offset, 1, 0); in trim_bitmaps()
3259 offset += BITS_PER_BITMAP * ctl->unit; in trim_bitmaps()
3262 if (start >= offset + BITS_PER_BITMAP * ctl->unit) in trim_bitmaps()
3263 offset += BITS_PER_BITMAP * ctl->unit; in trim_bitmaps()
3358 ino = entry->offset; in btrfs_find_ino_for_alloc()
3361 entry->offset++; in btrfs_find_ino_for_alloc()
3368 u64 offset = 0; in btrfs_find_ino_for_alloc() local
3372 ret = search_bitmap(ctl, entry, &offset, &count); in btrfs_find_ino_for_alloc()
3376 ino = offset; in btrfs_find_ino_for_alloc()
3377 bitmap_clear_bits(ctl, entry, offset, 1); in btrfs_find_ino_for_alloc()
3509 u64 offset, u64 bytes, bool bitmap) in test_add_free_space_entry() argument
3526 info->offset = offset; in test_add_free_space_entry()
3544 bitmap_info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in test_add_free_space_entry()
3549 add_new_bitmap(ctl, info, offset); in test_add_free_space_entry()
3554 bytes_added = add_bytes_to_bitmap(ctl, bitmap_info, offset, bytes); in test_add_free_space_entry()
3556 offset += bytes_added; in test_add_free_space_entry()
3575 u64 offset, u64 bytes) in test_check_exists() argument
3582 info = tree_search_offset(ctl, offset, 0, 0); in test_check_exists()
3584 info = tree_search_offset(ctl, offset_to_bitmap(ctl, offset), in test_check_exists()
3596 bit_off = offset; in test_check_exists()
3600 if (bit_off == offset) { in test_check_exists()
3603 } else if (bit_off > offset && in test_check_exists()
3604 offset + bytes > bit_off) { in test_check_exists()
3614 if (tmp->offset + tmp->bytes < offset) in test_check_exists()
3616 if (offset + bytes < tmp->offset) { in test_check_exists()
3628 if (offset + bytes < tmp->offset) in test_check_exists()
3630 if (tmp->offset + tmp->bytes < offset) { in test_check_exists()
3642 if (info->offset == offset) { in test_check_exists()
3647 if (offset > info->offset && offset < info->offset + info->bytes) in test_check_exists()