ex 112 arch/arm/include/asm/elf.h #define elf_read_implies_exec(ex,stk) arm_elf_read_implies_exec(stk) ex 137 arch/arm/include/asm/elf.h #define SET_PERSONALITY(ex) elf_set_personality(&(ex)) ex 99 arch/arm64/include/asm/elf.h #define elf_read_implies_exec(ex,stk) (stk != EXSTACK_DISABLE_X) ex 136 arch/arm64/include/asm/elf.h #define SET_PERSONALITY(ex) \ ex 201 arch/arm64/include/asm/elf.h #define COMPAT_SET_PERSONALITY(ex) \ ex 86 arch/csky/include/asm/elf.h #define SET_PERSONALITY(ex) set_personality(PER_LINUX) ex 43 arch/hexagon/kernel/traps.c static const char *ex_name(int ex) ex 45 arch/hexagon/kernel/traps.c switch (ex) { ex 205 arch/ia64/include/asm/elf.h #define elf_read_implies_exec(ex, executable_stack) \ ex 206 arch/ia64/include/asm/elf.h ((executable_stack!=EXSTACK_DISABLE_X) && ((ex).e_flags & EF_IA_64_LINUX_EXECUTABLE_STACK) != 0) ex 444 arch/ia64/include/asm/pal.h ex : 1, /* MC is expected */ ex 696 arch/ia64/include/asm/pal.h #define pmci_proc_machine_check_expected pme_processor.ex ex 27 arch/microblaze/include/asm/elf.h #define SET_PERSONALITY(ex) \ ex 270 arch/mips/boot/elf2ecoff.c Elf32_Ehdr ex; ex 307 arch/mips/boot/elf2ecoff.c i = read(infile, &ex, sizeof ex); ex 308 arch/mips/boot/elf2ecoff.c if (i != sizeof ex) { ex 315 arch/mips/boot/elf2ecoff.c if (ex.e_ident[EI_DATA] == ELFDATA2MSB) ex 326 arch/mips/boot/elf2ecoff.c convert_elf_hdr(&ex); ex 329 arch/mips/boot/elf2ecoff.c ph = (Elf32_Phdr *) saveRead(infile, ex.e_phoff, ex 330 arch/mips/boot/elf2ecoff.c ex.e_phnum * sizeof(Elf32_Phdr), ex 333 arch/mips/boot/elf2ecoff.c convert_elf_phdrs(ph, ex.e_phnum); ex 335 arch/mips/boot/elf2ecoff.c sh = (Elf32_Shdr *) saveRead(infile, ex.e_shoff, ex 336 arch/mips/boot/elf2ecoff.c ex.e_shnum * sizeof(Elf32_Shdr), ex 339 arch/mips/boot/elf2ecoff.c convert_elf_shdrs(sh, ex.e_shnum); ex 347 arch/mips/boot/elf2ecoff.c qsort(ph, ex.e_phnum, sizeof(Elf32_Phdr), phcmp); ex 349 arch/mips/boot/elf2ecoff.c for (i = 0; i < ex.e_phnum; i++) { ex 388 arch/mips/boot/elf2ecoff.c ex.e_phnum, ph[i].p_type); ex 425 arch/mips/boot/elf2ecoff.c eah.entry = ex.e_entry; ex 564 arch/mips/boot/elf2ecoff.c for (i = 0; i < ex.e_phnum; i++) { ex 345 arch/mips/include/asm/elf.h #define SET_PERSONALITY2(ex, state) \ ex 376 arch/mips/include/asm/elf.h #define __SET_PERSONALITY32_O32(ex, state) \ ex 388 arch/mips/include/asm/elf.h #define __SET_PERSONALITY32_O32(ex, state) \ ex 393 arch/mips/include/asm/elf.h #define __SET_PERSONALITY32(ex, state) \ ex 395 arch/mips/include/asm/elf.h if ((((ex).e_flags & EF_MIPS_ABI2) != 0) && \ ex 396 arch/mips/include/asm/elf.h ((ex).e_flags & EF_MIPS_ABI) == 0) \ ex 399 arch/mips/include/asm/elf.h __SET_PERSONALITY32_O32(ex, state); \ ex 402 arch/mips/include/asm/elf.h #define __SET_PERSONALITY32(ex, state) do { } while (0) ex 405 arch/mips/include/asm/elf.h #define SET_PERSONALITY2(ex, state) \ ex 414 arch/mips/include/asm/elf.h if ((ex).e_ident[EI_CLASS] == ELFCLASS32) \ ex 415 arch/mips/include/asm/elf.h __SET_PERSONALITY32(ex, state); \ ex 531 arch/mips/include/asm/elf.h #define elf_read_implies_exec(ex, stk) mips_elf_read_implies_exec(&(ex), stk) ex 158 arch/nds32/include/asm/elf.h #define SET_PERSONALITY(ex) set_personality(PER_LINUX) ex 235 arch/parisc/include/asm/elf.h #define SET_PERSONALITY(ex) \ ex 245 arch/parisc/include/asm/elf.h #define COMPAT_SET_PERSONALITY(ex) \ ex 247 arch/parisc/include/asm/elf.h if ((ex).e_ident[EI_CLASS] == ELFCLASS32) { \ ex 85 arch/powerpc/include/asm/elf.h # define SET_PERSONALITY(ex) \ ex 87 arch/powerpc/include/asm/elf.h if (((ex).e_flags & 0x3) == 2) \ ex 91 arch/powerpc/include/asm/elf.h if ((ex).e_ident[EI_CLASS] == ELFCLASS32) \ ex 106 arch/powerpc/include/asm/elf.h # define elf_read_implies_exec(ex, exec_stk) (is_32bit_task() ? \ ex 109 arch/powerpc/include/asm/elf.h # define elf_read_implies_exec(ex, exec_stk) (exec_stk == EXSTACK_DEFAULT) ex 232 arch/s390/include/asm/elf.h #define SET_PERSONALITY(ex) \ ex 240 arch/s390/include/asm/elf.h #define SET_PERSONALITY(ex) \ ex 245 arch/s390/include/asm/elf.h if ((ex).e_ident[EI_CLASS] == ELFCLASS32) { \ ex 122 arch/s390/include/asm/nospec-insn.h ex 0,0(\ruse) ex 135 arch/s390/include/asm/nospec-insn.h ex 0,0(\ruse) ex 187 arch/sh/include/asm/elf.h #define SET_PERSONALITY(ex) \ ex 101 arch/sh/kernel/cpu/sh5/fpu.c asmlinkage void do_fpu_error(unsigned long ex, struct pt_regs *regs) ex 360 arch/sh/kernel/hw_breakpoint.c unsigned long ex = lookup_exception_vector(); ex 363 arch/sh/kernel/hw_breakpoint.c notify_die(DIE_BREAKPOINT, "breakpoint", regs, 0, ex, SIGTRAP); ex 723 arch/sh/kernel/traps_32.c long ex; ex 725 arch/sh/kernel/traps_32.c ex = lookup_exception_vector(); ex 726 arch/sh/kernel/traps_32.c die_if_kernel("exception", current_pt_regs(), ex); ex 760 arch/sh/kernel/traps_64.c asmlinkage void do_exception_error(unsigned long ex, struct pt_regs *regs) ex 762 arch/sh/kernel/traps_64.c die_if_kernel("exception", regs, ex); ex 204 arch/sparc/include/asm/elf_64.h #define SET_PERSONALITY(ex) \ ex 205 arch/sparc/include/asm/elf_64.h do { if ((ex).e_ident[EI_CLASS] == ELFCLASS32) \ ex 80 arch/unicore32/include/asm/elf.h #define SET_PERSONALITY(ex) elf_set_personality(&(ex)) ex 109 arch/x86/ia32/ia32_aout.c struct exec ex; ex 112 arch/x86/ia32/ia32_aout.c ex = *((struct exec *) bprm->buf); /* exec-header */ ex 113 arch/x86/ia32/ia32_aout.c if ((N_MAGIC(ex) != ZMAGIC && N_MAGIC(ex) != OMAGIC && ex 114 arch/x86/ia32/ia32_aout.c N_MAGIC(ex) != QMAGIC && N_MAGIC(ex) != NMAGIC) || ex 115 arch/x86/ia32/ia32_aout.c N_TRSIZE(ex) || N_DRSIZE(ex) || ex 117 arch/x86/ia32/ia32_aout.c ex.a_text+ex.a_data+N_SYMSIZE(ex)+N_TXTOFF(ex)) { ex 121 arch/x86/ia32/ia32_aout.c fd_offset = N_TXTOFF(ex); ex 130 arch/x86/ia32/ia32_aout.c if (ex.a_data + ex.a_bss > rlim) ex 148 arch/x86/ia32/ia32_aout.c current->mm->end_code = ex.a_text + ex 149 arch/x86/ia32/ia32_aout.c (current->mm->start_code = N_TXTADDR(ex)); ex 150 arch/x86/ia32/ia32_aout.c current->mm->end_data = ex.a_data + ex 151 arch/x86/ia32/ia32_aout.c (current->mm->start_data = N_DATADDR(ex)); ex 152 arch/x86/ia32/ia32_aout.c current->mm->brk = ex.a_bss + ex 153 arch/x86/ia32/ia32_aout.c (current->mm->start_brk = N_BSSADDR(ex)); ex 161 arch/x86/ia32/ia32_aout.c if (N_MAGIC(ex) == OMAGIC) { ex 164 arch/x86/ia32/ia32_aout.c text_addr = N_TXTADDR(ex); ex 165 arch/x86/ia32/ia32_aout.c map_size = ex.a_text+ex.a_data; ex 173 arch/x86/ia32/ia32_aout.c ex.a_text + ex.a_data); ex 179 arch/x86/ia32/ia32_aout.c if ((ex.a_text & 0xfff || ex.a_data & 0xfff) && ex 180 arch/x86/ia32/ia32_aout.c (N_MAGIC(ex) != NMAGIC) && ex 197 arch/x86/ia32/ia32_aout.c error = vm_brk(N_TXTADDR(ex), ex.a_text+ex.a_data); ex 201 arch/x86/ia32/ia32_aout.c read_code(bprm->file, N_TXTADDR(ex), fd_offset, ex 202 arch/x86/ia32/ia32_aout.c ex.a_text+ex.a_data); ex 206 arch/x86/ia32/ia32_aout.c error = vm_mmap(bprm->file, N_TXTADDR(ex), ex.a_text, ex 212 arch/x86/ia32/ia32_aout.c if (error != N_TXTADDR(ex)) ex 215 arch/x86/ia32/ia32_aout.c error = vm_mmap(bprm->file, N_DATADDR(ex), ex.a_data, ex 219 arch/x86/ia32/ia32_aout.c fd_offset + ex.a_text); ex 220 arch/x86/ia32/ia32_aout.c if (error != N_DATADDR(ex)) ex 238 arch/x86/ia32/ia32_aout.c (regs)->ip = ex.a_entry; ex 253 arch/x86/ia32/ia32_aout.c struct exec ex; ex 257 arch/x86/ia32/ia32_aout.c error = kernel_read(file, &ex, sizeof(ex), &pos); ex 258 arch/x86/ia32/ia32_aout.c if (error != sizeof(ex)) ex 262 arch/x86/ia32/ia32_aout.c if ((N_MAGIC(ex) != ZMAGIC && N_MAGIC(ex) != QMAGIC) || N_TRSIZE(ex) || ex 263 arch/x86/ia32/ia32_aout.c N_DRSIZE(ex) || ((ex.a_entry & 0xfff) && N_MAGIC(ex) == ZMAGIC) || ex 265 arch/x86/ia32/ia32_aout.c ex.a_text+ex.a_data+N_SYMSIZE(ex)+N_TXTOFF(ex)) { ex 269 arch/x86/ia32/ia32_aout.c if (N_FLAGS(ex)) ex 275 arch/x86/ia32/ia32_aout.c start_addr = ex.a_entry & 0xfffff000; ex 277 arch/x86/ia32/ia32_aout.c if ((N_TXTOFF(ex) & ~PAGE_MASK) != 0) { ex 288 arch/x86/ia32/ia32_aout.c retval = vm_brk(start_addr, ex.a_text + ex.a_data + ex.a_bss); ex 292 arch/x86/ia32/ia32_aout.c read_code(file, start_addr, N_TXTOFF(ex), ex 293 arch/x86/ia32/ia32_aout.c ex.a_text + ex.a_data); ex 298 arch/x86/ia32/ia32_aout.c error = vm_mmap(file, start_addr, ex.a_text + ex.a_data, ex 301 arch/x86/ia32/ia32_aout.c N_TXTOFF(ex)); ex 306 arch/x86/ia32/ia32_aout.c len = PAGE_ALIGN(ex.a_text + ex.a_data); ex 307 arch/x86/ia32/ia32_aout.c bss = ex.a_text + ex.a_data + ex.a_bss; ex 195 arch/x86/include/asm/elf.h #define COMPAT_SET_PERSONALITY(ex) \ ex 196 arch/x86/include/asm/elf.h set_personality_ia32((ex).e_machine == EM_X86_64) ex 279 arch/x86/include/asm/elf.h #define SET_PERSONALITY(ex) set_personality_64bit() ex 285 arch/x86/include/asm/elf.h #define elf_read_implies_exec(ex, executable_stack) \ ex 1353 arch/x86/kvm/hyperv.c u16 rep_cnt, bool ex) ex 1367 arch/x86/kvm/hyperv.c if (!ex) { ex 1456 arch/x86/kvm/hyperv.c bool ex, bool fast) ex 1470 arch/x86/kvm/hyperv.c if (!ex) { ex 4692 arch/x86/kvm/vmx/vmx.c kvm_run->ex.exception = ex_no; ex 4693 arch/x86/kvm/vmx/vmx.c kvm_run->ex.error_code = error_code; ex 215 arch/x86/um/asm/elf.h #define SET_PERSONALITY(ex) do ; while(0) ex 177 arch/xtensa/include/asm/elf.h #define SET_PERSONALITY(ex) \ ex 287 drivers/block/rbd.c struct ceph_object_extent ex; ex 356 drivers/block/rbd.c list_for_each_entry(oreq, &(ireq)->object_extents, ex.oe_item) ex 358 drivers/block/rbd.c list_for_each_entry_safe(oreq, n, &(ireq)->object_extents, ex.oe_item) ex 1439 drivers/block/rbd.c list_del(&obj_request->ex.oe_item); ex 1449 drivers/block/rbd.c __func__, osd_req, obj_req, obj_req->ex.oe_objno, ex 1450 drivers/block/rbd.c obj_req->ex.oe_off, obj_req->ex.oe_len); ex 1481 drivers/block/rbd.c return !obj_req->ex.oe_off && ex 1482 drivers/block/rbd.c obj_req->ex.oe_len == rbd_dev->layout.object_size; ex 1489 drivers/block/rbd.c return obj_req->ex.oe_off + obj_req->ex.oe_len == ex 1561 drivers/block/rbd.c osd_req->r_data_offset = obj_request->ex.oe_off; ex 1592 drivers/block/rbd.c obj_req->ex.oe_objno); ex 1614 drivers/block/rbd.c ceph_object_extent_init(&obj_request->ex); ex 2112 drivers/block/rbd.c rbd_assert(objno == obj_req->ex.oe_objno); ex 2202 drivers/block/rbd.c if (!update_needed(rbd_dev, obj_req->ex.oe_objno, new_state)) ex 2232 drivers/block/rbd.c ret = rbd_cls_object_map_update(req, which, obj_req->ex.oe_objno, ex 2255 drivers/block/rbd.c struct ceph_file_extent *ex = &img_extents[cnt - 1]; ex 2258 drivers/block/rbd.c if (ex->fe_off + ex->fe_len > overlap) ex 2259 drivers/block/rbd.c ex->fe_len = overlap - ex->fe_off; ex 2278 drivers/block/rbd.c ret = ceph_extent_to_file(&rbd_dev->layout, obj_req->ex.oe_objno, ex 2279 drivers/block/rbd.c entire ? 0 : obj_req->ex.oe_off, ex 2281 drivers/block/rbd.c obj_req->ex.oe_len, ex 2300 drivers/block/rbd.c obj_req->ex.oe_len); ex 2305 drivers/block/rbd.c obj_req->ex.oe_len); ex 2379 drivers/block/rbd.c obj_req->ex.oe_off, obj_req->ex.oe_len, 0, 0); ex 2416 drivers/block/rbd.c obj_req->ex.oe_off, obj_req->ex.oe_len, ex 2437 drivers/block/rbd.c off = round_up(obj_req->ex.oe_off, rbd_dev->opts->alloc_size); ex 2438 drivers/block/rbd.c next_off = round_down(obj_req->ex.oe_off + obj_req->ex.oe_len, ex 2444 drivers/block/rbd.c obj_req, obj_req->ex.oe_off, obj_req->ex.oe_len, ex 2446 drivers/block/rbd.c obj_req->ex.oe_off = off; ex 2447 drivers/block/rbd.c obj_req->ex.oe_len = next_off - off; ex 2487 drivers/block/rbd.c obj_req->ex.oe_off, obj_req->ex.oe_len, ex 2619 drivers/block/rbd.c return &obj_req->ex; ex 2741 drivers/block/rbd.c struct ceph_file_extent ex = { off, len }; ex 2748 drivers/block/rbd.c return rbd_img_fill_request(img_req, &ex, 1, &fctx); ex 2751 drivers/block/rbd.c static void set_bio_pos(struct ceph_object_extent *ex, u32 bytes, void *arg) ex 2754 drivers/block/rbd.c container_of(ex, struct rbd_obj_request, ex); ex 2757 drivers/block/rbd.c dout("%s objno %llu bytes %u\n", __func__, ex->oe_objno, bytes); ex 2762 drivers/block/rbd.c static void count_bio_bvecs(struct ceph_object_extent *ex, u32 bytes, void *arg) ex 2765 drivers/block/rbd.c container_of(ex, struct rbd_obj_request, ex); ex 2768 drivers/block/rbd.c dout("%s objno %llu bytes %u\n", __func__, ex->oe_objno, bytes); ex 2775 drivers/block/rbd.c static void copy_bio_bvecs(struct ceph_object_extent *ex, u32 bytes, void *arg) ex 2778 drivers/block/rbd.c container_of(ex, struct rbd_obj_request, ex); ex 2781 drivers/block/rbd.c dout("%s objno %llu bytes %u\n", __func__, ex->oe_objno, bytes); ex 2808 drivers/block/rbd.c struct ceph_file_extent ex = { off, len }; ex 2811 drivers/block/rbd.c return __rbd_img_fill_from_bio(img_req, &ex, 1, &it); ex 2814 drivers/block/rbd.c static void set_bvec_pos(struct ceph_object_extent *ex, u32 bytes, void *arg) ex 2817 drivers/block/rbd.c container_of(ex, struct rbd_obj_request, ex); ex 2825 drivers/block/rbd.c static void count_bvecs(struct ceph_object_extent *ex, u32 bytes, void *arg) ex 2828 drivers/block/rbd.c container_of(ex, struct rbd_obj_request, ex); ex 2836 drivers/block/rbd.c static void copy_bvecs(struct ceph_object_extent *ex, u32 bytes, void *arg) ex 2839 drivers/block/rbd.c container_of(ex, struct rbd_obj_request, ex); ex 2899 drivers/block/rbd.c if (rbd_object_map_may_exist(rbd_dev, obj_req->ex.oe_objno)) { ex 2905 drivers/block/rbd.c obj_req->ex.oe_objno); ex 2919 drivers/block/rbd.c obj_req->ex.oe_off, obj_req->ex.oe_len, 0, 0); ex 3030 drivers/block/rbd.c rbd_obj_zero_range(obj_req, 0, obj_req->ex.oe_len); ex 3033 drivers/block/rbd.c if (*result < obj_req->ex.oe_len) ex 3035 drivers/block/rbd.c obj_req->ex.oe_len - *result); ex 3037 drivers/block/rbd.c rbd_assert(*result == obj_req->ex.oe_len); ex 3049 drivers/block/rbd.c if (obj_overlap < obj_req->ex.oe_len) ex 3051 drivers/block/rbd.c obj_req->ex.oe_len - obj_overlap); ex 3063 drivers/block/rbd.c if (rbd_object_map_may_exist(rbd_dev, obj_req->ex.oe_objno)) ex 3535 drivers/block/rbd.c obj_op_name(img_req->op_type), obj_req->ex.oe_objno, ex 3536 drivers/block/rbd.c obj_req->ex.oe_off, obj_req->ex.oe_len, *result); ex 74 drivers/infiniband/core/rw.c reg->inv_wr.ex.invalidate_rkey = reg->mr->lkey; ex 163 drivers/infiniband/core/rw.c reg->wr.wr.ex.invalidate_rkey = reg->mr->lkey; ex 1149 drivers/infiniband/core/uverbs_cmd.c tmp.ex.imm_data = wc->ex.imm_data; ex 2130 drivers/infiniband/core/uverbs_cmd.c next->ex.imm_data = ex 2131 drivers/infiniband/core/uverbs_cmd.c (__be32 __force) user_wr->ex.imm_data; ex 2133 drivers/infiniband/core/uverbs_cmd.c next->ex.invalidate_rkey = user_wr->ex.invalidate_rkey; ex 2073 drivers/infiniband/hw/bnxt_re/ib_verbs.c wqe->send.imm_data = wr->ex.imm_data; ex 2077 drivers/infiniband/hw/bnxt_re/ib_verbs.c wqe->send.inv_key = wr->ex.invalidate_rkey; ex 2103 drivers/infiniband/hw/bnxt_re/ib_verbs.c wqe->rdma.imm_data = wr->ex.imm_data; ex 2107 drivers/infiniband/hw/bnxt_re/ib_verbs.c wqe->rdma.inv_key = wr->ex.invalidate_rkey; ex 2157 drivers/infiniband/hw/bnxt_re/ib_verbs.c wqe->local_inv.inv_l_key = wr->ex.invalidate_rkey; ex 3040 drivers/infiniband/hw/bnxt_re/ib_verbs.c wc->ex.imm_data = orig_cqe->immdata; ex 3173 drivers/infiniband/hw/bnxt_re/ib_verbs.c wc->ex.imm_data = cqe->immdata; ex 78 drivers/infiniband/hw/cxgb3/iwch_cq.c wc->ex.invalidate_rkey = CQE_WRID_STAG(cqe); ex 61 drivers/infiniband/hw/cxgb3/iwch_qp.c wqe->send.rem_stag = cpu_to_be32(wr->ex.invalidate_rkey); ex 103 drivers/infiniband/hw/cxgb3/iwch_qp.c wqe->write.sgl[0].stag = wr->ex.imm_data; ex 195 drivers/infiniband/hw/cxgb3/iwch_qp.c wqe->local_inv.stag = cpu_to_be32(wr->ex.invalidate_rkey); ex 801 drivers/infiniband/hw/cxgb4/cq.c wc->ex.invalidate_rkey = CQE_WRID_STAG(&cqe); ex 803 drivers/infiniband/hw/cxgb4/cq.c c4iw_invalidate_mr(qhp->rhp, wc->ex.invalidate_rkey); ex 807 drivers/infiniband/hw/cxgb4/cq.c wc->ex.imm_data = CQE_IMM_DATA(&cqe); ex 514 drivers/infiniband/hw/cxgb4/qp.c wqe->send.stag_inv = cpu_to_be32(wr->ex.invalidate_rkey); ex 570 drivers/infiniband/hw/cxgb4/qp.c wqe->write.iw_imm_data.ib_imm_data.imm_data32 = wr->ex.imm_data; ex 638 drivers/infiniband/hw/cxgb4/qp.c wcwr->stag_inv = cpu_to_be32(wr->next->ex.invalidate_rkey); ex 886 drivers/infiniband/hw/cxgb4/qp.c wqe->inv.stag_inv = cpu_to_be32(wr->ex.invalidate_rkey); ex 1217 drivers/infiniband/hw/cxgb4/qp.c c4iw_invalidate_mr(rhp, wr->ex.invalidate_rkey); ex 562 drivers/infiniband/hw/hfi1/rc.c wqe->wr.ex.invalidate_rkey); ex 610 drivers/infiniband/hw/hfi1/rc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 616 drivers/infiniband/hw/hfi1/rc.c wqe->wr.ex.invalidate_rkey); ex 653 drivers/infiniband/hw/hfi1/rc.c ohdr->u.rc.imm_data = wqe->wr.ex.imm_data; ex 967 drivers/infiniband/hw/hfi1/rc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 972 drivers/infiniband/hw/hfi1/rc.c ohdr->u.ieth = cpu_to_be32(wqe->wr.ex.invalidate_rkey); ex 1012 drivers/infiniband/hw/hfi1/rc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 2947 drivers/infiniband/hw/hfi1/rc.c wc.ex.imm_data = ohdr->u.imm_data; ex 2955 drivers/infiniband/hw/hfi1/rc.c wc.ex.invalidate_rkey = rkey; ex 2964 drivers/infiniband/hw/hfi1/rc.c wc.ex.imm_data = 0; ex 3053 drivers/infiniband/hw/hfi1/rc.c wc.ex.imm_data = ohdr->u.rc.imm_data; ex 140 drivers/infiniband/hw/hfi1/uc.c qp, wqe->wr.ex.invalidate_rkey); ex 173 drivers/infiniband/hw/hfi1/uc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 202 drivers/infiniband/hw/hfi1/uc.c ohdr->u.rc.imm_data = wqe->wr.ex.imm_data; ex 232 drivers/infiniband/hw/hfi1/uc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 258 drivers/infiniband/hw/hfi1/uc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 435 drivers/infiniband/hw/hfi1/uc.c wc.ex.imm_data = ohdr->u.imm_data; ex 440 drivers/infiniband/hw/hfi1/uc.c wc.ex.imm_data = 0; ex 515 drivers/infiniband/hw/hfi1/uc.c wc.ex.imm_data = ohdr->u.rc.imm_data; ex 530 drivers/infiniband/hw/hfi1/uc.c wc.ex.imm_data = ohdr->u.imm_data; ex 154 drivers/infiniband/hw/hfi1/ud.c wc.ex.imm_data = swqe->wr.ex.imm_data; ex 275 drivers/infiniband/hw/hfi1/ud.c ohdr->u.ud.imm_data = wqe->wr.ex.imm_data; ex 968 drivers/infiniband/hw/hfi1/ud.c wc.ex.imm_data = packet->ohdr->u.ud.imm_data; ex 971 drivers/infiniband/hw/hfi1/ud.c wc.ex.imm_data = 0; ex 231 drivers/infiniband/hw/hns/hns_roce_hw_v1.c ctrl->imm_data = wr->ex.imm_data; ex 235 drivers/infiniband/hw/hns/hns_roce_hw_v1.c cpu_to_le32(wr->ex.invalidate_rkey); ex 2347 drivers/infiniband/hw/hns/hns_roce_hw_v1.c wc->ex.imm_data = ex 2355 drivers/infiniband/hw/hns/hns_roce_hw_v1.c wc->ex.imm_data = cpu_to_be32( ex 352 drivers/infiniband/hw/hns/hns_roce_hw_v2.c cpu_to_le32(be32_to_cpu(wr->ex.imm_data)); ex 447 drivers/infiniband/hw/hns/hns_roce_hw_v2.c cpu_to_le32(be32_to_cpu(wr->ex.imm_data)); ex 451 drivers/infiniband/hw/hns/hns_roce_hw_v2.c cpu_to_le32(wr->ex.invalidate_rkey); ex 510 drivers/infiniband/hw/hns/hns_roce_hw_v2.c cpu_to_le32(wr->ex.invalidate_rkey); ex 2900 drivers/infiniband/hw/hns/hns_roce_hw_v2.c wc->ex.imm_data = ex 2910 drivers/infiniband/hw/hns/hns_roce_hw_v2.c wc->ex.imm_data = ex 2916 drivers/infiniband/hw/hns/hns_roce_hw_v2.c wc->ex.invalidate_rkey = le32_to_cpu(cqe->rkey); ex 2165 drivers/infiniband/hw/i40iw/i40iw_verbs.c ret = ukqp->ops.iw_inline_send(ukqp, &info, ib_wr->ex.invalidate_rkey, false); ex 2169 drivers/infiniband/hw/i40iw/i40iw_verbs.c ret = ukqp->ops.iw_send(ukqp, &info, ib_wr->ex.invalidate_rkey, false); ex 2227 drivers/infiniband/hw/i40iw/i40iw_verbs.c info.op.inv_local_stag.target_stag = ib_wr->ex.invalidate_rkey; ex 2401 drivers/infiniband/hw/i40iw/i40iw_verbs.c entry->ex.imm_data = 0; ex 821 drivers/infiniband/hw/mlx4/cq.c wc->ex.imm_data = cqe->immed_rss_invalid; ex 826 drivers/infiniband/hw/mlx4/cq.c wc->ex.invalidate_rkey = be32_to_cpu(cqe->immed_rss_invalid); ex 835 drivers/infiniband/hw/mlx4/cq.c wc->ex.imm_data = cqe->immed_rss_invalid; ex 3175 drivers/infiniband/hw/mlx4/qp.c sqp->ud_header.immediate_data = wr->wr.ex.imm_data; ex 3516 drivers/infiniband/hw/mlx4/qp.c return wr->ex.imm_data; ex 3519 drivers/infiniband/hw/mlx4/qp.c return cpu_to_be32(wr->ex.invalidate_rkey); ex 3661 drivers/infiniband/hw/mlx4/qp.c set_local_inv_seg(wqe, wr->ex.invalidate_rkey); ex 204 drivers/infiniband/hw/mlx5/cq.c wc->ex.imm_data = cqe->imm_inval_pkey; ex 216 drivers/infiniband/hw/mlx5/cq.c wc->ex.imm_data = cqe->imm_inval_pkey; ex 221 drivers/infiniband/hw/mlx5/cq.c wc->ex.invalidate_rkey = be32_to_cpu(cqe->imm_inval_pkey); ex 1163 drivers/infiniband/hw/mlx5/cq.c goto ex; ex 1169 drivers/infiniband/hw/mlx5/cq.c ex: ex 1279 drivers/infiniband/hw/mlx5/cq.c goto ex; ex 1359 drivers/infiniband/hw/mlx5/cq.c ex: ex 139 drivers/infiniband/hw/mlx5/ib_virt.c goto ex; ex 147 drivers/infiniband/hw/mlx5/ib_virt.c ex: ex 4408 drivers/infiniband/hw/mlx5/qp.c return wr->ex.imm_data; ex 4411 drivers/infiniband/hw/mlx5/qp.c return cpu_to_be32(wr->ex.invalidate_rkey); ex 5069 drivers/infiniband/hw/mlx5/qp.c ctrl->imm = cpu_to_be32(wr->ex.invalidate_rkey); ex 616 drivers/infiniband/hw/mthca/mthca_cq.c entry->ex.imm_data = cqe->imm_etype_pkey_eec; ex 622 drivers/infiniband/hw/mthca/mthca_cq.c entry->ex.imm_data = cqe->imm_etype_pkey_eec; ex 1531 drivers/infiniband/hw/mthca/mthca_qp.c sqp->ud_header.immediate_data = wr->wr.ex.imm_data; ex 1678 drivers/infiniband/hw/mthca/mthca_qp.c ((struct mthca_next_seg *) wqe)->imm = wr->ex.imm_data; ex 2008 drivers/infiniband/hw/mthca/mthca_qp.c ((struct mthca_next_seg *) wqe)->imm = wr->ex.imm_data; ex 2144 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c hdr->immdt = ntohl(wr->ex.imm_data); ex 2153 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c hdr->lkey = wr->ex.invalidate_rkey; ex 2158 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c hdr->immdt = ntohl(wr->ex.imm_data); ex 2173 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c hdr->lkey = wr->ex.invalidate_rkey; ex 2711 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c ibwc->ex.imm_data = htonl(le32_to_cpu(cqe->rq.lkey_immdt)); ex 2715 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c ibwc->ex.imm_data = htonl(le32_to_cpu(cqe->rq.lkey_immdt)); ex 2718 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c ibwc->ex.invalidate_rkey = le32_to_cpu(cqe->rq.lkey_immdt); ex 3228 drivers/infiniband/hw/qedr/verbs.c swqe->inv_key_or_imm_data = cpu_to_le32(be32_to_cpu(wr->ex.imm_data)); ex 3254 drivers/infiniband/hw/qedr/verbs.c swqe->inv_key_or_imm_data = cpu_to_le32(wr->ex.invalidate_rkey); ex 3273 drivers/infiniband/hw/qedr/verbs.c rwqe->imm_data = htonl(cpu_to_le32(wr->ex.imm_data)); ex 3347 drivers/infiniband/hw/qedr/verbs.c iwqe->inv_l_key = wr->ex.invalidate_rkey; ex 3886 drivers/infiniband/hw/qedr/verbs.c wc->ex.imm_data = cpu_to_be32(le32_to_cpu(resp->imm_data_or_inv_r_Key)); ex 3896 drivers/infiniband/hw/qedr/verbs.c wc->ex.imm_data = le32_to_cpu(resp->imm_data_or_inv_r_Key); ex 328 drivers/infiniband/hw/qib/qib_rc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 364 drivers/infiniband/hw/qib/qib_rc.c wqe->rdma_wr.wr.ex.imm_data; ex 491 drivers/infiniband/hw/qib/qib_rc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 530 drivers/infiniband/hw/qib/qib_rc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 1845 drivers/infiniband/hw/qib/qib_rc.c wc.ex.imm_data = ohdr->u.imm_data; ex 1853 drivers/infiniband/hw/qib/qib_rc.c wc.ex.imm_data = 0; ex 1931 drivers/infiniband/hw/qib/qib_rc.c wc.ex.imm_data = ohdr->u.rc.imm_data; ex 118 drivers/infiniband/hw/qib/qib_uc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 147 drivers/infiniband/hw/qib/qib_uc.c ohdr->u.rc.imm_data = wqe->wr.ex.imm_data; ex 176 drivers/infiniband/hw/qib/qib_uc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 201 drivers/infiniband/hw/qib/qib_uc.c ohdr->u.imm_data = wqe->wr.ex.imm_data; ex 367 drivers/infiniband/hw/qib/qib_uc.c wc.ex.imm_data = ohdr->u.imm_data; ex 373 drivers/infiniband/hw/qib/qib_uc.c wc.ex.imm_data = 0; ex 440 drivers/infiniband/hw/qib/qib_uc.c wc.ex.imm_data = ohdr->u.rc.imm_data; ex 455 drivers/infiniband/hw/qib/qib_uc.c wc.ex.imm_data = ohdr->u.imm_data; ex 130 drivers/infiniband/hw/qib/qib_ud.c wc.ex.imm_data = swqe->wr.ex.imm_data; ex 340 drivers/infiniband/hw/qib/qib_ud.c ohdr->u.ud.imm_data = wqe->wr.ex.imm_data; ex 510 drivers/infiniband/hw/qib/qib_ud.c wc.ex.imm_data = ohdr->u.ud.imm_data; ex 513 drivers/infiniband/hw/qib/qib_ud.c wc.ex.imm_data = 0; ex 357 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c wc->ex.imm_data = cqe->imm_data; ex 713 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c wqe_hdr->ex.imm_data = wr->ex.imm_data; ex 754 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c wqe_hdr->ex.invalidate_rkey = ex 755 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c wr->ex.invalidate_rkey; ex 127 drivers/infiniband/sw/rdmavt/cq.c uqueue[head].ex.imm_data = entry->ex.imm_data; ex 2055 drivers/infiniband/sw/rdmavt/qp.c qp, wr->ex.invalidate_rkey); ex 3052 drivers/infiniband/sw/rdmavt/qp.c wqe->wr.ex.invalidate_rkey)) ex 3071 drivers/infiniband/sw/rdmavt/qp.c wqe->wr.ex.invalidate_rkey)) { ex 3073 drivers/infiniband/sw/rdmavt/qp.c wc.ex.invalidate_rkey = ex 3074 drivers/infiniband/sw/rdmavt/qp.c wqe->wr.ex.invalidate_rkey; ex 3079 drivers/infiniband/sw/rdmavt/qp.c wc.ex.imm_data = wqe->wr.ex.imm_data; ex 3090 drivers/infiniband/sw/rdmavt/qp.c wc.ex.imm_data = wqe->wr.ex.imm_data; ex 134 drivers/infiniband/sw/rdmavt/trace_cq.h __entry->imm = be32_to_cpu(wc->ex.imm_data); ex 445 drivers/infiniband/sw/rxe/rxe_req.c immdt_set_imm(pkt, ibwr->ex.imm_data); ex 448 drivers/infiniband/sw/rxe/rxe_req.c ieth_set_rkey(pkt, ibwr->ex.invalidate_rkey); ex 633 drivers/infiniband/sw/rxe/rxe_req.c wqe->wr.ex.invalidate_rkey >> 8); ex 636 drivers/infiniband/sw/rxe/rxe_req.c wqe->wr.ex.invalidate_rkey); ex 879 drivers/infiniband/sw/rxe/rxe_resp.c uwc->ex.imm_data = immdt_imm(pkt); ex 884 drivers/infiniband/sw/rxe/rxe_resp.c uwc->ex.invalidate_rkey = ieth_rkey(pkt); ex 909 drivers/infiniband/sw/rxe/rxe_resp.c wc->ex.imm_data = immdt_imm(pkt); ex 916 drivers/infiniband/sw/rxe/rxe_resp.c wc->ex.invalidate_rkey = ieth_rkey(pkt); ex 919 drivers/infiniband/sw/rxe/rxe_resp.c wc->ex.invalidate_rkey >> 8); ex 922 drivers/infiniband/sw/rxe/rxe_resp.c wc->ex.invalidate_rkey); ex 546 drivers/infiniband/sw/rxe/rxe_verbs.c wr->ex.imm_data = ibwr->ex.imm_data; ex 550 drivers/infiniband/sw/rxe/rxe_verbs.c wr->ex.imm_data = ibwr->ex.imm_data; ex 558 drivers/infiniband/sw/rxe/rxe_verbs.c wr->ex.imm_data = ibwr->ex.imm_data; ex 561 drivers/infiniband/sw/rxe/rxe_verbs.c wr->ex.invalidate_rkey = ibwr->ex.invalidate_rkey; ex 573 drivers/infiniband/sw/rxe/rxe_verbs.c wr->ex.invalidate_rkey = ibwr->ex.invalidate_rkey; ex 70 drivers/infiniband/sw/siw/siw_cq.c wc->ex.invalidate_rkey = cqe->inval_stag; ex 844 drivers/infiniband/sw/siw/siw_verbs.c sqe->rkey = wr->ex.invalidate_rkey; ex 902 drivers/infiniband/sw/siw/siw_verbs.c sqe->rkey = wr->ex.invalidate_rkey; ex 613 drivers/infiniband/ulp/iser/iser_initiator.c u32 rkey = wc->ex.invalidate_rkey; ex 373 drivers/infiniband/ulp/iser/iser_memory.c inv_wr->ex.invalidate_rkey = mr->rkey; ex 949 drivers/infiniband/ulp/isert/ib_isert.c send_wr->ex.invalidate_rkey = isert_cmd->inv_rkey; ex 1234 drivers/infiniband/ulp/srp/ib_srp.c .ex.invalidate_rkey = rkey, ex 675 drivers/md/dm-snap.c struct dm_exception *ex; ex 683 drivers/md/dm-snap.c hlist_bl_for_each_entry_safe(ex, pos, n, slot, hash_list) ex 684 drivers/md/dm-snap.c kmem_cache_free(mem, ex); ex 63 drivers/media/pci/solo6x10/solo6x10-v4l2.c int sx, int sy, int ex, int ey, int scale) ex 72 drivers/media/pci/solo6x10/solo6x10-v4l2.c SOLO_VI_WIN_EX(ex) | ex 78 drivers/net/ethernet/freescale/fman/mac.c static void mac_exception(void *handle, enum fman_mac_exceptions ex) ex 86 drivers/net/ethernet/freescale/fman/mac.c if (ex == FM_MAC_EX_10G_RX_FIFO_OVFL) { ex 90 drivers/net/ethernet/freescale/fman/mac.c dev_err(priv->dev, "10G MAC got RX FIFO Error = %x\n", ex); ex 94 drivers/net/ethernet/freescale/fman/mac.c __func__, ex); ex 687 drivers/net/ethernet/mellanox/mlx5/core/vport.c goto ex; ex 696 drivers/net/ethernet/mellanox/mlx5/core/vport.c goto ex; ex 733 drivers/net/ethernet/mellanox/mlx5/core/vport.c ex: ex 1058 drivers/net/ethernet/mellanox/mlx5/core/vport.c goto ex; ex 1088 drivers/net/ethernet/mellanox/mlx5/core/vport.c ex: ex 356 drivers/net/ethernet/nvidia/forcedeth.c struct ring_desc_ex *ex; ex 1030 drivers/net/ethernet/nvidia/forcedeth.c if (np->rx_ring.ex) ex 1035 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex, np->ring_addr); ex 1873 drivers/net/ethernet/nvidia/forcedeth.c less_rx = np->get_rx.ex; ex 1874 drivers/net/ethernet/nvidia/forcedeth.c if (less_rx-- == np->rx_ring.ex) ex 1875 drivers/net/ethernet/nvidia/forcedeth.c less_rx = np->last_rx.ex; ex 1877 drivers/net/ethernet/nvidia/forcedeth.c while (np->put_rx.ex != less_rx) { ex 1891 drivers/net/ethernet/nvidia/forcedeth.c np->put_rx.ex->bufhigh = cpu_to_le32(dma_high(np->put_rx_ctx->dma)); ex 1892 drivers/net/ethernet/nvidia/forcedeth.c np->put_rx.ex->buflow = cpu_to_le32(dma_low(np->put_rx_ctx->dma)); ex 1894 drivers/net/ethernet/nvidia/forcedeth.c np->put_rx.ex->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX2_AVAIL); ex 1895 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(np->put_rx.ex++ == np->last_rx.ex)) ex 1896 drivers/net/ethernet/nvidia/forcedeth.c np->put_rx.ex = np->rx_ring.ex; ex 1930 drivers/net/ethernet/nvidia/forcedeth.c np->last_rx.ex = &np->rx_ring.ex[np->rx_ring_size-1]; ex 1940 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex[i].flaglen = 0; ex 1941 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex[i].txvlan = 0; ex 1942 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex[i].bufhigh = 0; ex 1943 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex[i].buflow = 0; ex 1961 drivers/net/ethernet/nvidia/forcedeth.c np->last_tx.ex = &np->tx_ring.ex[np->tx_ring_size-1]; ex 1976 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[i].flaglen = 0; ex 1977 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[i].txvlan = 0; ex 1978 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[i].bufhigh = 0; ex 1979 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[i].buflow = 0; ex 2039 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[i].flaglen = 0; ex 2040 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[i].txvlan = 0; ex 2041 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[i].bufhigh = 0; ex 2042 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[i].buflow = 0; ex 2070 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex[i].flaglen = 0; ex 2071 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex[i].txvlan = 0; ex 2072 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex[i].bufhigh = 0; ex 2073 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex[i].buflow = 0; ex 2403 drivers/net/ethernet/nvidia/forcedeth.c start_tx = put_tx = np->put_tx.ex; ex 2430 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(put_tx++ == np->last_tx.ex)) ex 2431 drivers/net/ethernet/nvidia/forcedeth.c put_tx = np->tx_ring.ex; ex 2476 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(put_tx++ == np->last_tx.ex)) ex 2477 drivers/net/ethernet/nvidia/forcedeth.c put_tx = np->tx_ring.ex; ex 2483 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(put_tx == np->tx_ring.ex)) ex 2484 drivers/net/ethernet/nvidia/forcedeth.c prev_tx = np->last_tx.ex; ex 2541 drivers/net/ethernet/nvidia/forcedeth.c np->put_tx.ex = put_tx; ex 2647 drivers/net/ethernet/nvidia/forcedeth.c struct ring_desc_ex *orig_get_tx = np->get_tx.ex; ex 2650 drivers/net/ethernet/nvidia/forcedeth.c while ((np->get_tx.ex != np->put_tx.ex) && ex 2651 drivers/net/ethernet/nvidia/forcedeth.c !((flags = le32_to_cpu(np->get_tx.ex->flaglen)) & NV_TX2_VALID) && ex 2684 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(np->get_tx.ex++ == np->last_tx.ex)) ex 2685 drivers/net/ethernet/nvidia/forcedeth.c np->get_tx.ex = np->tx_ring.ex; ex 2692 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely((np->tx_stop == 1) && (np->get_tx.ex != orig_get_tx))) { ex 2755 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i].bufhigh), ex 2756 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i].buflow), ex 2757 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i].flaglen), ex 2758 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i+1].bufhigh), ex 2759 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i+1].buflow), ex 2760 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i+1].flaglen), ex 2761 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i+2].bufhigh), ex 2762 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i+2].buflow), ex 2763 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i+2].flaglen), ex 2764 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i+3].bufhigh), ex 2765 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i+3].buflow), ex 2766 drivers/net/ethernet/nvidia/forcedeth.c le32_to_cpu(np->tx_ring.ex[i+3].flaglen)); ex 2787 drivers/net/ethernet/nvidia/forcedeth.c put_tx.ex = np->tx_change_owner->first_tx_desc; ex 2967 drivers/net/ethernet/nvidia/forcedeth.c while ((np->get_rx.ex != np->put_rx.ex) && ex 2968 drivers/net/ethernet/nvidia/forcedeth.c !((flags = le32_to_cpu(np->get_rx.ex->flaglen)) & NV_RX2_AVAIL) && ex 3014 drivers/net/ethernet/nvidia/forcedeth.c vlanflags = le32_to_cpu(np->get_rx.ex->buflow); ex 3036 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(np->get_rx.ex++ == np->last_rx.ex)) ex 3037 drivers/net/ethernet/nvidia/forcedeth.c np->get_rx.ex = np->rx_ring.ex; ex 4715 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex = (struct ring_desc_ex *)rxtx_ring; ex 4716 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex = &np->rx_ring.ex[np->rx_ring_size]; ex 5160 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[0].bufhigh = cpu_to_le32(dma_high(test_dma_addr)); ex 5161 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[0].buflow = cpu_to_le32(dma_low(test_dma_addr)); ex 5162 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra); ex 5175 drivers/net/ethernet/nvidia/forcedeth.c flags = le32_to_cpu(np->rx_ring.ex[0].flaglen); ex 5176 drivers/net/ethernet/nvidia/forcedeth.c len = nv_descr_getlength_ex(&np->rx_ring.ex[0], np->desc_ver); ex 5823 drivers/net/ethernet/nvidia/forcedeth.c np->rx_ring.ex = dma_alloc_coherent(&pci_dev->dev, ex 5828 drivers/net/ethernet/nvidia/forcedeth.c if (!np->rx_ring.ex) ex 5830 drivers/net/ethernet/nvidia/forcedeth.c np->tx_ring.ex = &np->rx_ring.ex[np->rx_ring_size]; ex 74 drivers/net/team/team_mode_loadbalance.c struct lb_priv_ex *ex; /* priv extension */ ex 93 drivers/net/team/team_mode_loadbalance.c (lb_priv)->ex->tx_hash_to_port_mapping[hash].port ex 96 drivers/net/team/team_mode_loadbalance.c (lb_priv)->ex->tx_hash_to_port_mapping[hash].opt_inst_info ex 108 drivers/net/team/team_mode_loadbalance.c pm = &lb_priv->ex->tx_hash_to_port_mapping[i]; ex 249 drivers/net/team/team_mode_loadbalance.c if (!lb_priv->ex->orig_fprog) { ex 254 drivers/net/team/team_mode_loadbalance.c ctx->data.bin_val.len = lb_priv->ex->orig_fprog->len * ex 256 drivers/net/team/team_mode_loadbalance.c ctx->data.bin_val.ptr = lb_priv->ex->orig_fprog->filter; ex 307 drivers/net/team/team_mode_loadbalance.c if (lb_priv->ex->orig_fprog) { ex 309 drivers/net/team/team_mode_loadbalance.c __fprog_destroy(lb_priv->ex->orig_fprog); ex 315 drivers/net/team/team_mode_loadbalance.c lb_priv->ex->orig_fprog = fprog; ex 329 drivers/net/team/team_mode_loadbalance.c if (!lb_priv->ex->orig_fprog) ex 332 drivers/net/team/team_mode_loadbalance.c __fprog_destroy(lb_priv->ex->orig_fprog); ex 410 drivers/net/team/team_mode_loadbalance.c lb_priv->ex->stats.info[hash].opt_inst_info = info; ex 419 drivers/net/team/team_mode_loadbalance.c ctx->data.bin_val.ptr = &lb_priv->ex->stats.info[hash].stats; ex 500 drivers/net/team/team_mode_loadbalance.c s_info = &lb_priv->ex->stats.info[j]; ex 539 drivers/net/team/team_mode_loadbalance.c ctx->data.u32_val = lb_priv->ex->stats.refresh_interval; ex 550 drivers/net/team/team_mode_loadbalance.c if (lb_priv->ex->stats.refresh_interval == interval) ex 552 drivers/net/team/team_mode_loadbalance.c lb_priv->ex->stats.refresh_interval = interval; ex 554 drivers/net/team/team_mode_loadbalance.c schedule_delayed_work(&lb_priv->ex->stats.refresh_dw, 0); ex 556 drivers/net/team/team_mode_loadbalance.c cancel_delayed_work(&lb_priv->ex->stats.refresh_dw); ex 614 drivers/net/team/team_mode_loadbalance.c lb_priv->ex = kzalloc(sizeof(*lb_priv->ex), GFP_KERNEL); ex 615 drivers/net/team/team_mode_loadbalance.c if (!lb_priv->ex) ex 617 drivers/net/team/team_mode_loadbalance.c lb_priv->ex->team = team; ex 632 drivers/net/team/team_mode_loadbalance.c INIT_DELAYED_WORK(&lb_priv->ex->stats.refresh_dw, lb_stats_refresh); ex 642 drivers/net/team/team_mode_loadbalance.c kfree(lb_priv->ex); ex 653 drivers/net/team/team_mode_loadbalance.c cancel_delayed_work_sync(&lb_priv->ex->stats.refresh_dw); ex 655 drivers/net/team/team_mode_loadbalance.c kfree(lb_priv->ex); ex 1140 drivers/nvme/host/rdma.c .ex.invalidate_rkey = req->mr->rkey, ex 1461 drivers/nvme/host/rdma.c if (unlikely(wc->ex.invalidate_rkey != req->mr->rkey)) { ex 559 drivers/nvme/target/rdma.c rsp->send_wr.ex.invalidate_rkey = rsp->invalidate_rkey; ex 203 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 204 drivers/scsi/libsas/sas_expander.c struct ex_phy *phy = &ex->ex_phy[phy_id]; ex 392 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 409 drivers/scsi/libsas/sas_expander.c if (0 <= single && single < ex->num_phys) { ex 414 drivers/scsi/libsas/sas_expander.c for (i = 0; i < ex->num_phys; i++) { ex 429 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 432 drivers/scsi/libsas/sas_expander.c ex->ex_phy = kcalloc(ex->num_phys, sizeof(*ex->ex_phy), GFP_KERNEL); ex 433 drivers/scsi/libsas/sas_expander.c if (!ex->ex_phy) ex 442 drivers/scsi/libsas/sas_expander.c kfree(ex->ex_phy); ex 443 drivers/scsi/libsas/sas_expander.c ex->ex_phy = NULL; ex 619 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 620 drivers/scsi/libsas/sas_expander.c struct ex_phy *phy = &ex->ex_phy[phy_id]; ex 628 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 631 drivers/scsi/libsas/sas_expander.c for (i = 0; i < ex->num_phys; i++) { ex 632 drivers/scsi/libsas/sas_expander.c struct ex_phy *phy = &ex->ex_phy[i]; ex 1018 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 1019 drivers/scsi/libsas/sas_expander.c struct ex_phy *ex_phy = &ex->ex_phy[phy_id]; ex 1115 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 1118 drivers/scsi/libsas/sas_expander.c for (i = 0; i < ex->num_phys; i++) { ex 1119 drivers/scsi/libsas/sas_expander.c struct ex_phy *phy = &ex->ex_phy[i]; ex 1138 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 1142 drivers/scsi/libsas/sas_expander.c list_for_each_entry(child, &ex->children, siblings) { ex 1176 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 1177 drivers/scsi/libsas/sas_expander.c int i = 0, end = ex->num_phys; ex 1186 drivers/scsi/libsas/sas_expander.c struct ex_phy *ex_phy = &ex->ex_phy[i]; ex 1214 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 1221 drivers/scsi/libsas/sas_expander.c for (i = 0; i < ex->num_phys; i++) { ex 1222 drivers/scsi/libsas/sas_expander.c struct ex_phy *phy = &ex->ex_phy[i]; ex 1390 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 1391 drivers/scsi/libsas/sas_expander.c struct ex_phy *phy = &ex->ex_phy[phy_id]; ex 1411 drivers/scsi/libsas/sas_expander.c for (i = 0; i < ex->max_route_indexes ; i++) { ex 1621 drivers/scsi/libsas/sas_expander.c struct sas_expander_device *ex = ex 1624 drivers/scsi/libsas/sas_expander.c if (level == ex->level) ex 1652 drivers/scsi/libsas/sas_expander.c struct sas_expander_device *ex = rphy_to_expander_device(dev->rphy); ex 1658 drivers/scsi/libsas/sas_expander.c ex->level = dev->port->disc.max_level; /* 0 */ ex 1746 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 1750 drivers/scsi/libsas/sas_expander.c for (i = from_phy; i < ex->num_phys; i++) { ex 1764 drivers/scsi/libsas/sas_expander.c if (phy_change_count != ex->ex_phy[i].phy_change_count) { ex 1766 drivers/scsi/libsas/sas_expander.c ex->ex_phy[i].phy_change_count = ex 1826 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 1835 drivers/scsi/libsas/sas_expander.c if (ex_change_count != -1 && ex_change_count != ex->ex_change_count) { ex 1843 drivers/scsi/libsas/sas_expander.c ex->ex_change_count = ex_change_count; ex 1851 drivers/scsi/libsas/sas_expander.c list_for_each_entry(ch, &ex->children, siblings) { ex 1864 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 1867 drivers/scsi/libsas/sas_expander.c list_for_each_entry_safe(child, n, &ex->children, siblings) { ex 1919 drivers/scsi/libsas/sas_expander.c struct sas_expander_device *ex = ex 1922 drivers/scsi/libsas/sas_expander.c if (level > ex->level) ex 1925 drivers/scsi/libsas/sas_expander.c else if (level == ex->level) ex 1935 drivers/scsi/libsas/sas_expander.c struct sas_expander_device *ex = rphy_to_expander_device(dev->rphy); ex 1936 drivers/scsi/libsas/sas_expander.c int level = ex->level+1; ex 1997 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 1998 drivers/scsi/libsas/sas_expander.c struct ex_phy *phy = &ex->ex_phy[phy_id]; ex 2077 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 2078 drivers/scsi/libsas/sas_expander.c struct ex_phy *changed_phy = &ex->ex_phy[phy_id]; ex 2087 drivers/scsi/libsas/sas_expander.c for (i = 0; i < ex->num_phys; i++) { ex 2088 drivers/scsi/libsas/sas_expander.c struct ex_phy *phy = &ex->ex_phy[i]; ex 2120 drivers/scsi/libsas/sas_expander.c struct expander_device *ex = &dev->ex_dev; ex 2130 drivers/scsi/libsas/sas_expander.c } while (i < ex->num_phys); ex 160 drivers/scsi/libsas/sas_internal.h struct expander_device *ex = &dev->ex_dev; ex 161 drivers/scsi/libsas/sas_internal.h struct ex_phy *ex_phy = &ex->ex_phy[phy_id]; ex 163 drivers/scsi/libsas/sas_internal.h if (!ex->parent_port) { ex 164 drivers/scsi/libsas/sas_internal.h ex->parent_port = sas_port_alloc(&dev->rphy->dev, phy_id); ex 166 drivers/scsi/libsas/sas_internal.h BUG_ON(!ex->parent_port); ex 167 drivers/scsi/libsas/sas_internal.h BUG_ON(sas_port_add(ex->parent_port)); ex 168 drivers/scsi/libsas/sas_internal.h sas_port_mark_backlink(ex->parent_port); ex 170 drivers/scsi/libsas/sas_internal.h sas_port_add_phy(ex->parent_port, ex_phy->phy); ex 872 drivers/thermal/tegra/soctherm.c u32 st, ex = 0, cp = 0, gp = 0, pl = 0, me = 0; ex 889 drivers/thermal/tegra/soctherm.c ex |= cp | gp | pl | me; ex 890 drivers/thermal/tegra/soctherm.c if (ex) { ex 891 drivers/thermal/tegra/soctherm.c writel(ex, ts->regs + THERMCTL_INTR_STATUS); ex 892 drivers/thermal/tegra/soctherm.c st &= ~ex; ex 920 drivers/thermal/tegra/soctherm.c ex |= TH_INTR_IGNORE_MASK; ex 921 drivers/thermal/tegra/soctherm.c st &= ~ex; ex 1032 drivers/thermal/tegra/soctherm.c u32 st, ex, oc1, oc2, oc3, oc4; ex 1041 drivers/thermal/tegra/soctherm.c ex = oc1 | oc2 | oc3 | oc4; ex 1043 drivers/thermal/tegra/soctherm.c pr_err("soctherm: OC ALARM 0x%08x\n", ex); ex 1044 drivers/thermal/tegra/soctherm.c if (ex) { ex 1046 drivers/thermal/tegra/soctherm.c st &= ~ex; ex 120 fs/binfmt_aout.c struct exec ex; ex 126 fs/binfmt_aout.c ex = *((struct exec *) bprm->buf); /* exec-header */ ex 127 fs/binfmt_aout.c if ((N_MAGIC(ex) != ZMAGIC && N_MAGIC(ex) != OMAGIC && ex 128 fs/binfmt_aout.c N_MAGIC(ex) != QMAGIC && N_MAGIC(ex) != NMAGIC) || ex 129 fs/binfmt_aout.c N_TRSIZE(ex) || N_DRSIZE(ex) || ex 130 fs/binfmt_aout.c i_size_read(file_inode(bprm->file)) < ex.a_text+ex.a_data+N_SYMSIZE(ex)+N_TXTOFF(ex)) { ex 141 fs/binfmt_aout.c fd_offset = N_TXTOFF(ex); ex 150 fs/binfmt_aout.c if (ex.a_data + ex.a_bss > rlim) ex 160 fs/binfmt_aout.c SET_AOUT_PERSONALITY(bprm, ex); ex 166 fs/binfmt_aout.c current->mm->end_code = ex.a_text + ex 167 fs/binfmt_aout.c (current->mm->start_code = N_TXTADDR(ex)); ex 168 fs/binfmt_aout.c current->mm->end_data = ex.a_data + ex 169 fs/binfmt_aout.c (current->mm->start_data = N_DATADDR(ex)); ex 170 fs/binfmt_aout.c current->mm->brk = ex.a_bss + ex 171 fs/binfmt_aout.c (current->mm->start_brk = N_BSSADDR(ex)); ex 179 fs/binfmt_aout.c if (N_MAGIC(ex) == OMAGIC) { ex 183 fs/binfmt_aout.c text_addr = N_TXTADDR(ex); ex 187 fs/binfmt_aout.c map_size = ex.a_text+ex.a_data + PAGE_SIZE - 1; ex 190 fs/binfmt_aout.c map_size = ex.a_text+ex.a_data; ex 197 fs/binfmt_aout.c ex.a_text+ex.a_data); ex 201 fs/binfmt_aout.c if ((ex.a_text & 0xfff || ex.a_data & 0xfff) && ex 202 fs/binfmt_aout.c (N_MAGIC(ex) != NMAGIC) && printk_ratelimit()) ex 215 fs/binfmt_aout.c error = vm_brk(N_TXTADDR(ex), ex.a_text+ex.a_data); ex 219 fs/binfmt_aout.c read_code(bprm->file, N_TXTADDR(ex), fd_offset, ex 220 fs/binfmt_aout.c ex.a_text + ex.a_data); ex 224 fs/binfmt_aout.c error = vm_mmap(bprm->file, N_TXTADDR(ex), ex.a_text, ex 229 fs/binfmt_aout.c if (error != N_TXTADDR(ex)) ex 232 fs/binfmt_aout.c error = vm_mmap(bprm->file, N_DATADDR(ex), ex.a_data, ex 235 fs/binfmt_aout.c fd_offset + ex.a_text); ex 236 fs/binfmt_aout.c if (error != N_DATADDR(ex)) ex 249 fs/binfmt_aout.c regs->gp = ex.a_gpvalue; ex 252 fs/binfmt_aout.c start_thread(regs, ex.a_entry, current->mm->start_stack); ex 262 fs/binfmt_aout.c struct exec ex; ex 268 fs/binfmt_aout.c error = kernel_read(file, &ex, sizeof(ex), &pos); ex 269 fs/binfmt_aout.c if (error != sizeof(ex)) ex 273 fs/binfmt_aout.c if ((N_MAGIC(ex) != ZMAGIC && N_MAGIC(ex) != QMAGIC) || N_TRSIZE(ex) || ex 274 fs/binfmt_aout.c N_DRSIZE(ex) || ((ex.a_entry & 0xfff) && N_MAGIC(ex) == ZMAGIC) || ex 275 fs/binfmt_aout.c i_size_read(inode) < ex.a_text+ex.a_data+N_SYMSIZE(ex)+N_TXTOFF(ex)) { ex 286 fs/binfmt_aout.c if (N_FLAGS(ex)) ex 292 fs/binfmt_aout.c start_addr = ex.a_entry & 0xfffff000; ex 294 fs/binfmt_aout.c if ((N_TXTOFF(ex) & ~PAGE_MASK) != 0) { ex 301 fs/binfmt_aout.c retval = vm_brk(start_addr, ex.a_text + ex.a_data + ex.a_bss); ex 305 fs/binfmt_aout.c read_code(file, start_addr, N_TXTOFF(ex), ex 306 fs/binfmt_aout.c ex.a_text + ex.a_data); ex 311 fs/binfmt_aout.c error = vm_mmap(file, start_addr, ex.a_text + ex.a_data, ex 314 fs/binfmt_aout.c N_TXTOFF(ex)); ex 319 fs/binfmt_aout.c len = PAGE_ALIGN(ex.a_text + ex.a_data); ex 320 fs/binfmt_aout.c bss = ex.a_text + ex.a_data + ex.a_bss; ex 57 fs/binfmt_elf.c #define elf_check_fdpic(ex) false ex 3579 fs/ceph/caps.c static void handle_cap_export(struct inode *inode, struct ceph_mds_caps *ex, ex 3588 fs/ceph/caps.c unsigned mseq = le32_to_cpu(ex->migrate_seq); ex 3608 fs/ceph/caps.c if (!cap || cap->cap_id != le64_to_cpu(ex->cap_id)) ex 2583 fs/cifs/smbdirect.c wr->ex.invalidate_rkey = smbdirect_mr->mr->rkey; ex 122 fs/dlm/plock.c op->info.ex = (fl->fl_type == F_WRLCK); ex 334 fs/dlm/plock.c op->info.ex = (fl->fl_type == F_WRLCK); ex 365 fs/dlm/plock.c fl->fl_type = (op->info.ex) ? F_WRLCK : F_RDLCK; ex 222 fs/ext4/ext4_extents.h static inline ext4_fsblk_t ext4_ext_pblock(struct ext4_extent *ex) ex 226 fs/ext4/ext4_extents.h block = le32_to_cpu(ex->ee_start_lo); ex 227 fs/ext4/ext4_extents.h block |= ((ext4_fsblk_t) le16_to_cpu(ex->ee_start_hi) << 31) << 1; ex 249 fs/ext4/ext4_extents.h static inline void ext4_ext_store_pblock(struct ext4_extent *ex, ex 252 fs/ext4/ext4_extents.h ex->ee_start_lo = cpu_to_le32((unsigned long) (pb & 0xffffffff)); ex 253 fs/ext4/ext4_extents.h ex->ee_start_hi = cpu_to_le16((unsigned long) ((pb >> 31) >> 1) & ex 176 fs/ext4/extents.c struct ext4_extent *ex; ex 195 fs/ext4/extents.c ex = path[depth].p_ext; ex 196 fs/ext4/extents.c if (ex) { ex 197 fs/ext4/extents.c ext4_fsblk_t ext_pblk = ext4_ext_pblock(ex); ex 198 fs/ext4/extents.c ext4_lblk_t ext_block = le32_to_cpu(ex->ee_block); ex 222 fs/ext4/extents.c struct ext4_extent *ex, int *err, unsigned int flags) ex 226 fs/ext4/extents.c goal = ext4_ext_find_goal(inode, path, le32_to_cpu(ex->ee_block)); ex 504 fs/ext4/extents.c struct ext4_extent *ex = EXT_FIRST_EXTENT(eh); ex 508 fs/ext4/extents.c for (i = le16_to_cpu(eh->eh_entries); i > 0; i--, ex++) { ex 510 fs/ext4/extents.c ext4_lblk_t lblk = le32_to_cpu(ex->ee_block); ex 511 fs/ext4/extents.c int len = ext4_ext_get_actual_len(ex); ex 517 fs/ext4/extents.c if (ext4_ext_is_unwritten(ex)) ex 520 fs/ext4/extents.c ext4_ext_pblock(ex), status); ex 663 fs/ext4/extents.c struct ext4_extent *ex; ex 670 fs/ext4/extents.c ex = EXT_FIRST_EXTENT(eh); ex 674 fs/ext4/extents.c for (i = 0; i < le16_to_cpu(eh->eh_entries); i++, ex++) { ex 675 fs/ext4/extents.c ext_debug("%d:[%d]%d:%llu ", le32_to_cpu(ex->ee_block), ex 676 fs/ext4/extents.c ext4_ext_is_unwritten(ex), ex 677 fs/ext4/extents.c ext4_ext_get_actual_len(ex), ext4_ext_pblock(ex)); ex 686 fs/ext4/extents.c struct ext4_extent *ex; ex 702 fs/ext4/extents.c ex = path[depth].p_ext; ex 703 fs/ext4/extents.c while (ex <= EXT_MAX_EXTENT(path[depth].p_hdr)) { ex 705 fs/ext4/extents.c le32_to_cpu(ex->ee_block), ex 706 fs/ext4/extents.c ext4_ext_pblock(ex), ex 707 fs/ext4/extents.c ext4_ext_is_unwritten(ex), ex 708 fs/ext4/extents.c ext4_ext_get_actual_len(ex), ex 710 fs/ext4/extents.c ex++; ex 839 fs/ext4/extents.c struct ext4_extent *chex, *ex; ex 842 fs/ext4/extents.c chex = ex = EXT_FIRST_EXTENT(eh); ex 843 fs/ext4/extents.c for (k = 0; k < le16_to_cpu(eh->eh_entries); k++, ex++) { ex 844 fs/ext4/extents.c BUG_ON(k && le32_to_cpu(ex->ee_block) ex 845 fs/ext4/extents.c <= le32_to_cpu(ex[-1].ee_block)); ex 846 fs/ext4/extents.c if (block < le32_to_cpu(ex->ee_block)) ex 848 fs/ext4/extents.c chex = ex; ex 1135 fs/ext4/extents.c struct ext4_extent *ex; ex 1136 fs/ext4/extents.c ex = EXT_FIRST_EXTENT(neh); ex 1137 fs/ext4/extents.c memmove(ex, path[depth].p_ext, sizeof(struct ext4_extent) * m); ex 1448 fs/ext4/extents.c struct ext4_extent *ex; ex 1465 fs/ext4/extents.c ex = path[depth].p_ext; ex 1466 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 1467 fs/ext4/extents.c if (*logical < le32_to_cpu(ex->ee_block)) { ex 1468 fs/ext4/extents.c if (unlikely(EXT_FIRST_EXTENT(path[depth].p_hdr) != ex)) { ex 1471 fs/ext4/extents.c *logical, le32_to_cpu(ex->ee_block)); ex 1489 fs/ext4/extents.c if (unlikely(*logical < (le32_to_cpu(ex->ee_block) + ee_len))) { ex 1492 fs/ext4/extents.c *logical, le32_to_cpu(ex->ee_block), ee_len); ex 1496 fs/ext4/extents.c *logical = le32_to_cpu(ex->ee_block) + ee_len - 1; ex 1497 fs/ext4/extents.c *phys = ext4_ext_pblock(ex) + ee_len - 1; ex 1516 fs/ext4/extents.c struct ext4_extent *ex; ex 1535 fs/ext4/extents.c ex = path[depth].p_ext; ex 1536 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 1537 fs/ext4/extents.c if (*logical < le32_to_cpu(ex->ee_block)) { ex 1538 fs/ext4/extents.c if (unlikely(EXT_FIRST_EXTENT(path[depth].p_hdr) != ex)) { ex 1556 fs/ext4/extents.c if (unlikely(*logical < (le32_to_cpu(ex->ee_block) + ee_len))) { ex 1559 fs/ext4/extents.c *logical, le32_to_cpu(ex->ee_block), ee_len); ex 1563 fs/ext4/extents.c if (ex != EXT_LAST_EXTENT(path[depth].p_hdr)) { ex 1565 fs/ext4/extents.c ex++; ex 1601 fs/ext4/extents.c ex = EXT_FIRST_EXTENT(eh); ex 1603 fs/ext4/extents.c *logical = le32_to_cpu(ex->ee_block); ex 1604 fs/ext4/extents.c *phys = ext4_ext_pblock(ex); ex 1605 fs/ext4/extents.c *ret_ex = ex; ex 1688 fs/ext4/extents.c struct ext4_extent *ex; ex 1693 fs/ext4/extents.c ex = path[depth].p_ext; ex 1695 fs/ext4/extents.c if (unlikely(ex == NULL || eh == NULL)) { ex 1697 fs/ext4/extents.c "ex %p == NULL or eh %p == NULL", ex, eh); ex 1706 fs/ext4/extents.c if (ex != EXT_FIRST_EXTENT(eh)) { ex 1793 fs/ext4/extents.c struct ext4_extent *ex) ex 1803 fs/ext4/extents.c while (ex < EXT_LAST_EXTENT(eh)) { ex 1804 fs/ext4/extents.c if (!ext4_can_extents_be_merged(inode, ex, ex + 1)) ex 1807 fs/ext4/extents.c unwritten = ext4_ext_is_unwritten(ex); ex 1808 fs/ext4/extents.c ex->ee_len = cpu_to_le16(ext4_ext_get_actual_len(ex) ex 1809 fs/ext4/extents.c + ext4_ext_get_actual_len(ex + 1)); ex 1811 fs/ext4/extents.c ext4_ext_mark_unwritten(ex); ex 1813 fs/ext4/extents.c if (ex + 1 < EXT_LAST_EXTENT(eh)) { ex 1814 fs/ext4/extents.c len = (EXT_LAST_EXTENT(eh) - ex - 1) ex 1816 fs/ext4/extents.c memmove(ex + 1, ex + 2, len); ex 1880 fs/ext4/extents.c struct ext4_extent *ex) { ex 1889 fs/ext4/extents.c if (ex > EXT_FIRST_EXTENT(eh)) ex 1890 fs/ext4/extents.c merge_done = ext4_ext_try_to_merge_right(inode, path, ex - 1); ex 1893 fs/ext4/extents.c (void) ext4_ext_try_to_merge_right(inode, path, ex); ex 1961 fs/ext4/extents.c struct ext4_extent *ex, *fex; ex 1975 fs/ext4/extents.c ex = path[depth].p_ext; ex 1983 fs/ext4/extents.c if (ex && !(gb_flags & EXT4_GET_BLOCKS_PRE_IO)) { ex 1992 fs/ext4/extents.c if (ex < EXT_LAST_EXTENT(eh) && ex 1993 fs/ext4/extents.c (le32_to_cpu(ex->ee_block) + ex 1994 fs/ext4/extents.c ext4_ext_get_actual_len(ex) < ex 1996 fs/ext4/extents.c ex += 1; ex 1998 fs/ext4/extents.c } else if ((ex > EXT_FIRST_EXTENT(eh)) && ex 2001 fs/ext4/extents.c le32_to_cpu(ex->ee_block))) ex 2002 fs/ext4/extents.c ex -= 1; ex 2005 fs/ext4/extents.c if (ext4_can_extents_be_merged(inode, ex, newext)) { ex 2010 fs/ext4/extents.c le32_to_cpu(ex->ee_block), ex 2011 fs/ext4/extents.c ext4_ext_is_unwritten(ex), ex 2012 fs/ext4/extents.c ext4_ext_get_actual_len(ex), ex 2013 fs/ext4/extents.c ext4_ext_pblock(ex)); ex 2018 fs/ext4/extents.c unwritten = ext4_ext_is_unwritten(ex); ex 2019 fs/ext4/extents.c ex->ee_len = cpu_to_le16(ext4_ext_get_actual_len(ex) ex 2022 fs/ext4/extents.c ext4_ext_mark_unwritten(ex); ex 2024 fs/ext4/extents.c nearex = ex; ex 2030 fs/ext4/extents.c if (ext4_can_extents_be_merged(inode, newext, ex)) { ex 2036 fs/ext4/extents.c le32_to_cpu(ex->ee_block), ex 2037 fs/ext4/extents.c ext4_ext_is_unwritten(ex), ex 2038 fs/ext4/extents.c ext4_ext_get_actual_len(ex), ex 2039 fs/ext4/extents.c ext4_ext_pblock(ex)); ex 2045 fs/ext4/extents.c unwritten = ext4_ext_is_unwritten(ex); ex 2046 fs/ext4/extents.c ex->ee_block = newext->ee_block; ex 2047 fs/ext4/extents.c ext4_ext_store_pblock(ex, ext4_ext_pblock(newext)); ex 2048 fs/ext4/extents.c ex->ee_len = cpu_to_le16(ext4_ext_get_actual_len(ex) ex 2051 fs/ext4/extents.c ext4_ext_mark_unwritten(ex); ex 2053 fs/ext4/extents.c nearex = ex; ex 2181 fs/ext4/extents.c struct ext4_extent *ex; ex 2209 fs/ext4/extents.c ex = path[depth].p_ext; ex 2214 fs/ext4/extents.c if (!ex) { ex 2219 fs/ext4/extents.c } else if (le32_to_cpu(ex->ee_block) > block) { ex 2222 fs/ext4/extents.c end = le32_to_cpu(ex->ee_block); ex 2225 fs/ext4/extents.c } else if (block >= le32_to_cpu(ex->ee_block) ex 2226 fs/ext4/extents.c + ext4_ext_get_actual_len(ex)) { ex 2232 fs/ext4/extents.c } else if (block >= le32_to_cpu(ex->ee_block)) { ex 2238 fs/ext4/extents.c end = le32_to_cpu(ex->ee_block) ex 2239 fs/ext4/extents.c + ext4_ext_get_actual_len(ex); ex 2253 fs/ext4/extents.c es.es_lblk = le32_to_cpu(ex->ee_block); ex 2254 fs/ext4/extents.c es.es_len = ext4_ext_get_actual_len(ex); ex 2255 fs/ext4/extents.c es.es_pblk = ext4_ext_pblock(ex); ex 2256 fs/ext4/extents.c if (ext4_ext_is_unwritten(ex)) ex 2389 fs/ext4/extents.c struct ext4_extent *ex; ex 2392 fs/ext4/extents.c ex = path[depth].p_ext; ex 2393 fs/ext4/extents.c if (ex == NULL) { ex 2397 fs/ext4/extents.c } else if (*lblk < le32_to_cpu(ex->ee_block)) { ex 2398 fs/ext4/extents.c len = le32_to_cpu(ex->ee_block) - *lblk; ex 2399 fs/ext4/extents.c } else if (*lblk >= le32_to_cpu(ex->ee_block) ex 2400 fs/ext4/extents.c + ext4_ext_get_actual_len(ex)) { ex 2403 fs/ext4/extents.c *lblk = le32_to_cpu(ex->ee_block) + ext4_ext_get_actual_len(ex); ex 2595 fs/ext4/extents.c struct ext4_extent *ex, ex 2600 fs/ext4/extents.c unsigned short ee_len = ext4_ext_get_actual_len(ex); ex 2606 fs/ext4/extents.c if (from < le32_to_cpu(ex->ee_block) || ex 2607 fs/ext4/extents.c to != le32_to_cpu(ex->ee_block) + ee_len - 1) { ex 2610 fs/ext4/extents.c from, to, le32_to_cpu(ex->ee_block), ee_len); ex 2627 fs/ext4/extents.c trace_ext4_remove_blocks(inode, ex, from, to, partial); ex 2633 fs/ext4/extents.c last_pblk = ext4_ext_pblock(ex) + ee_len - 1; ex 2650 fs/ext4/extents.c num = le32_to_cpu(ex->ee_block) + ee_len - from; ex 2651 fs/ext4/extents.c pblk = ext4_ext_pblock(ex) + ee_len - num; ex 2744 fs/ext4/extents.c struct ext4_extent *ex; ex 2757 fs/ext4/extents.c ex = path[depth].p_ext; ex 2758 fs/ext4/extents.c if (!ex) ex 2759 fs/ext4/extents.c ex = EXT_LAST_EXTENT(eh); ex 2761 fs/ext4/extents.c ex_ee_block = le32_to_cpu(ex->ee_block); ex 2762 fs/ext4/extents.c ex_ee_len = ext4_ext_get_actual_len(ex); ex 2764 fs/ext4/extents.c trace_ext4_ext_rm_leaf(inode, start, ex, partial); ex 2766 fs/ext4/extents.c while (ex >= EXT_FIRST_EXTENT(eh) && ex 2769 fs/ext4/extents.c if (ext4_ext_is_unwritten(ex)) ex 2776 fs/ext4/extents.c path[depth].p_ext = ex; ex 2794 fs/ext4/extents.c pblk = ext4_ext_pblock(ex); ex 2798 fs/ext4/extents.c ex--; ex 2799 fs/ext4/extents.c ex_ee_block = le32_to_cpu(ex->ee_block); ex 2800 fs/ext4/extents.c ex_ee_len = ext4_ext_get_actual_len(ex); ex 2824 fs/ext4/extents.c if (ex == EXT_FIRST_EXTENT(eh)) { ex 2838 fs/ext4/extents.c err = ext4_remove_blocks(handle, inode, ex, partial, a, b); ex 2844 fs/ext4/extents.c ext4_ext_store_pblock(ex, 0); ex 2846 fs/ext4/extents.c ex->ee_len = cpu_to_le16(num); ex 2852 fs/ext4/extents.c ext4_ext_mark_unwritten(ex); ex 2864 fs/ext4/extents.c memmove(ex, ex+1, (EXT_LAST_EXTENT(eh) - ex) * ex 2879 fs/ext4/extents.c ext4_ext_pblock(ex)); ex 2880 fs/ext4/extents.c ex--; ex 2881 fs/ext4/extents.c ex_ee_block = le32_to_cpu(ex->ee_block); ex 2882 fs/ext4/extents.c ex_ee_len = ext4_ext_get_actual_len(ex); ex 2895 fs/ext4/extents.c if (partial->state == tofree && ex >= EXT_FIRST_EXTENT(eh)) { ex 2896 fs/ext4/extents.c pblk = ext4_ext_pblock(ex) + ex_ee_len - 1; ex 2973 fs/ext4/extents.c struct ext4_extent *ex; ex 2985 fs/ext4/extents.c ex = path[depth].p_ext; ex 2986 fs/ext4/extents.c if (!ex) { ex 2996 fs/ext4/extents.c ee_block = le32_to_cpu(ex->ee_block); ex 2997 fs/ext4/extents.c ex_end = ee_block + ext4_ext_get_actual_len(ex) - 1; ex 3013 fs/ext4/extents.c pblk = ext4_ext_pblock(ex) + end - ee_block + 2; ex 3043 fs/ext4/extents.c &ex); ex 3252 fs/ext4/extents.c static int ext4_zeroout_es(struct inode *inode, struct ext4_extent *ex) ex 3258 fs/ext4/extents.c ee_block = le32_to_cpu(ex->ee_block); ex 3259 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 3260 fs/ext4/extents.c ee_pblock = ext4_ext_pblock(ex); ex 3270 fs/ext4/extents.c static int ext4_ext_zeroout(struct inode *inode, struct ext4_extent *ex) ex 3275 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 3276 fs/ext4/extents.c ee_pblock = ext4_ext_pblock(ex); ex 3277 fs/ext4/extents.c return ext4_issue_zeroout(inode, le32_to_cpu(ex->ee_block), ee_pblock, ex 3312 fs/ext4/extents.c struct ext4_extent *ex, newex, orig_ex, zero_ex; ex 3326 fs/ext4/extents.c ex = path[depth].p_ext; ex 3327 fs/ext4/extents.c ee_block = le32_to_cpu(ex->ee_block); ex 3328 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 3329 fs/ext4/extents.c newblock = split - ee_block + ext4_ext_pblock(ex); ex 3332 fs/ext4/extents.c BUG_ON(!ext4_ext_is_unwritten(ex) && ex 3348 fs/ext4/extents.c ext4_ext_mark_unwritten(ex); ex 3350 fs/ext4/extents.c ext4_ext_mark_initialized(ex); ex 3353 fs/ext4/extents.c ext4_ext_try_to_merge(handle, inode, path, ex); ex 3360 fs/ext4/extents.c memcpy(&orig_ex, ex, sizeof(orig_ex)); ex 3361 fs/ext4/extents.c ex->ee_len = cpu_to_le16(split - ee_block); ex 3363 fs/ext4/extents.c ext4_ext_mark_unwritten(ex); ex 3391 fs/ext4/extents.c err = ext4_ext_zeroout(inode, ex); ex 3392 fs/ext4/extents.c zero_ex.ee_block = ex->ee_block; ex 3394 fs/ext4/extents.c ext4_ext_get_actual_len(ex)); ex 3396 fs/ext4/extents.c ext4_ext_pblock(ex)); ex 3410 fs/ext4/extents.c ex->ee_len = cpu_to_le16(ee_len); ex 3411 fs/ext4/extents.c ext4_ext_try_to_merge(handle, inode, path, ex); ex 3428 fs/ext4/extents.c ex->ee_len = orig_ex.ee_len; ex 3453 fs/ext4/extents.c struct ext4_extent *ex; ex 3461 fs/ext4/extents.c ex = path[depth].p_ext; ex 3462 fs/ext4/extents.c ee_block = le32_to_cpu(ex->ee_block); ex 3463 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 3464 fs/ext4/extents.c unwritten = ext4_ext_is_unwritten(ex); ex 3489 fs/ext4/extents.c ex = path[depth].p_ext; ex 3490 fs/ext4/extents.c if (!ex) { ex 3495 fs/ext4/extents.c unwritten = ext4_ext_is_unwritten(ex); ex 3547 fs/ext4/extents.c struct ext4_extent *ex, *abut_ex; ex 3566 fs/ext4/extents.c ex = path[depth].p_ext; ex 3567 fs/ext4/extents.c ee_block = le32_to_cpu(ex->ee_block); ex 3568 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 3572 fs/ext4/extents.c trace_ext4_ext_convert_to_initialized_enter(inode, map, ex); ex 3575 fs/ext4/extents.c BUG_ON(!ext4_ext_is_unwritten(ex)); ex 3596 fs/ext4/extents.c (ex > EXT_FIRST_EXTENT(eh))) { /*L2*/ ex 3601 fs/ext4/extents.c abut_ex = ex - 1; ex 3605 fs/ext4/extents.c ee_pblk = ext4_ext_pblock(ex); ex 3625 fs/ext4/extents.c map, ex, abut_ex); ex 3628 fs/ext4/extents.c ex->ee_block = cpu_to_le32(ee_block + map_len); ex 3629 fs/ext4/extents.c ext4_ext_store_pblock(ex, ee_pblk + map_len); ex 3630 fs/ext4/extents.c ex->ee_len = cpu_to_le16(ee_len - map_len); ex 3631 fs/ext4/extents.c ext4_ext_mark_unwritten(ex); /* Restore the flag */ ex 3641 fs/ext4/extents.c ex < EXT_LAST_EXTENT(eh)) { /*L2*/ ex 3647 fs/ext4/extents.c abut_ex = ex + 1; ex 3651 fs/ext4/extents.c ee_pblk = ext4_ext_pblock(ex); ex 3671 fs/ext4/extents.c map, ex, abut_ex); ex 3676 fs/ext4/extents.c ex->ee_len = cpu_to_le16(ee_len - map_len); ex 3677 fs/ext4/extents.c ext4_ext_mark_unwritten(ex); /* Restore the flag */ ex 3733 fs/ext4/extents.c ext4_ext_pblock(ex) + split_map.m_lblk + ex 3744 fs/ext4/extents.c zero_ex2.ee_block = ex->ee_block; ex 3748 fs/ext4/extents.c ext4_ext_pblock(ex)); ex 3807 fs/ext4/extents.c struct ext4_extent *ex; ex 3824 fs/ext4/extents.c ex = path[depth].p_ext; ex 3825 fs/ext4/extents.c ee_block = le32_to_cpu(ex->ee_block); ex 3826 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 3847 fs/ext4/extents.c struct ext4_extent *ex; ex 3854 fs/ext4/extents.c ex = path[depth].p_ext; ex 3855 fs/ext4/extents.c ee_block = le32_to_cpu(ex->ee_block); ex 3856 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 3883 fs/ext4/extents.c ex = path[depth].p_ext; ex 3890 fs/ext4/extents.c ext4_ext_mark_initialized(ex); ex 3895 fs/ext4/extents.c ext4_ext_try_to_merge(handle, inode, path, ex); ex 3964 fs/ext4/extents.c struct ext4_extent *ex; ex 3978 fs/ext4/extents.c ex = path[depth].p_ext; ex 3979 fs/ext4/extents.c ee_block = le32_to_cpu(ex->ee_block); ex 3980 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 3995 fs/ext4/extents.c ex = path[depth].p_ext; ex 3996 fs/ext4/extents.c if (!ex) { ex 4007 fs/ext4/extents.c ext4_ext_mark_unwritten(ex); ex 4012 fs/ext4/extents.c ext4_ext_try_to_merge(handle, inode, path, ex); ex 4189 fs/ext4/extents.c struct ext4_extent *ex, ex 4196 fs/ext4/extents.c ext4_lblk_t ee_block = le32_to_cpu(ex->ee_block); ex 4197 fs/ext4/extents.c ext4_fsblk_t ee_start = ext4_ext_pblock(ex); ex 4198 fs/ext4/extents.c unsigned short ee_len = ext4_ext_get_actual_len(ex); ex 4271 fs/ext4/extents.c struct ext4_extent newex, *ex, *ex2; ex 4309 fs/ext4/extents.c ex = path[depth].p_ext; ex 4310 fs/ext4/extents.c if (ex) { ex 4311 fs/ext4/extents.c ext4_lblk_t ee_block = le32_to_cpu(ex->ee_block); ex 4312 fs/ext4/extents.c ext4_fsblk_t ee_start = ext4_ext_pblock(ex); ex 4320 fs/ext4/extents.c ee_len = ext4_ext_get_actual_len(ex); ex 4336 fs/ext4/extents.c if ((!ext4_ext_is_unwritten(ex)) && ex 4342 fs/ext4/extents.c } else if (!ext4_ext_is_unwritten(ex)) ex 4390 fs/ext4/extents.c if (cluster_offset && ex && ex 4391 fs/ext4/extents.c get_implied_cluster_alloc(inode->i_sb, map, ex, path)) { ex 582 fs/ext4/extents_status.c struct ext4_extent *ex; ex 593 fs/ext4/extents_status.c ex = path[depth].p_ext; ex 595 fs/ext4/extents_status.c if (ex) { ex 597 fs/ext4/extents_status.c ee_block = le32_to_cpu(ex->ee_block); ex 598 fs/ext4/extents_status.c ee_start = ext4_ext_pblock(ex); ex 599 fs/ext4/extents_status.c ee_len = ext4_ext_get_actual_len(ex); ex 601 fs/ext4/extents_status.c ee_status = ext4_ext_is_unwritten(ex) ? 1 : 0; ex 1494 fs/ext4/mballoc.c int needed, struct ext4_free_extent *ex) ex 1501 fs/ext4/mballoc.c BUG_ON(ex == NULL); ex 1507 fs/ext4/mballoc.c ex->fe_len = 0; ex 1508 fs/ext4/mballoc.c ex->fe_start = 0; ex 1509 fs/ext4/mballoc.c ex->fe_group = 0; ex 1517 fs/ext4/mballoc.c ex->fe_len = 1 << order; ex 1518 fs/ext4/mballoc.c ex->fe_start = block << order; ex 1519 fs/ext4/mballoc.c ex->fe_group = e4b->bd_group; ex 1522 fs/ext4/mballoc.c next = next - ex->fe_start; ex 1523 fs/ext4/mballoc.c ex->fe_len -= next; ex 1524 fs/ext4/mballoc.c ex->fe_start += next; ex 1526 fs/ext4/mballoc.c while (needed > ex->fe_len && ex 1539 fs/ext4/mballoc.c ex->fe_len += 1 << order; ex 1542 fs/ext4/mballoc.c if (ex->fe_start + ex->fe_len > EXT4_CLUSTERS_PER_GROUP(e4b->bd_sb)) { ex 1547 fs/ext4/mballoc.c block, order, needed, ex->fe_group, ex->fe_start, ex 1548 fs/ext4/mballoc.c ex->fe_len, ex->fe_logical); ex 1549 fs/ext4/mballoc.c ex->fe_len = 0; ex 1550 fs/ext4/mballoc.c ex->fe_start = 0; ex 1551 fs/ext4/mballoc.c ex->fe_group = 0; ex 1553 fs/ext4/mballoc.c return ex->fe_len; ex 1556 fs/ext4/mballoc.c static int mb_mark_used(struct ext4_buddy *e4b, struct ext4_free_extent *ex) ex 1562 fs/ext4/mballoc.c int start = ex->fe_start; ex 1563 fs/ext4/mballoc.c int len = ex->fe_len; ex 1569 fs/ext4/mballoc.c BUG_ON(e4b->bd_group != ex->fe_group); ex 1625 fs/ext4/mballoc.c ext4_set_bits(e4b->bd_bitmap, ex->fe_start, len0); ex 1686 fs/ext4/mballoc.c struct ext4_free_extent ex; ex 1711 fs/ext4/mballoc.c max = mb_find_extent(e4b, bex->fe_start, gex->fe_len, &ex); ex 1730 fs/ext4/mballoc.c struct ext4_free_extent *ex, ex 1736 fs/ext4/mballoc.c BUG_ON(ex->fe_len <= 0); ex 1737 fs/ext4/mballoc.c BUG_ON(ex->fe_len > EXT4_CLUSTERS_PER_GROUP(ac->ac_sb)); ex 1738 fs/ext4/mballoc.c BUG_ON(ex->fe_start >= EXT4_CLUSTERS_PER_GROUP(ac->ac_sb)); ex 1747 fs/ext4/mballoc.c *bex = *ex; ex 1755 fs/ext4/mballoc.c if (ex->fe_len == gex->fe_len) { ex 1756 fs/ext4/mballoc.c *bex = *ex; ex 1765 fs/ext4/mballoc.c *bex = *ex; ex 1775 fs/ext4/mballoc.c if (ex->fe_len > bex->fe_len) ex 1776 fs/ext4/mballoc.c *bex = *ex; ex 1777 fs/ext4/mballoc.c } else if (ex->fe_len > gex->fe_len) { ex 1781 fs/ext4/mballoc.c if (ex->fe_len < bex->fe_len) ex 1782 fs/ext4/mballoc.c *bex = *ex; ex 1792 fs/ext4/mballoc.c struct ext4_free_extent ex = ac->ac_b_ex; ex 1793 fs/ext4/mballoc.c ext4_group_t group = ex.fe_group; ex 1797 fs/ext4/mballoc.c BUG_ON(ex.fe_len <= 0); ex 1803 fs/ext4/mballoc.c max = mb_find_extent(e4b, ex.fe_start, ex.fe_len, &ex); ex 1806 fs/ext4/mballoc.c ac->ac_b_ex = ex; ex 1825 fs/ext4/mballoc.c struct ext4_free_extent ex; ex 1843 fs/ext4/mballoc.c ac->ac_g_ex.fe_len, &ex); ex 1844 fs/ext4/mballoc.c ex.fe_logical = 0xDEADFA11; /* debug value */ ex 1850 fs/ext4/mballoc.c ex.fe_start; ex 1854 fs/ext4/mballoc.c ac->ac_b_ex = ex; ex 1858 fs/ext4/mballoc.c BUG_ON(ex.fe_len <= 0); ex 1859 fs/ext4/mballoc.c BUG_ON(ex.fe_group != ac->ac_g_ex.fe_group); ex 1860 fs/ext4/mballoc.c BUG_ON(ex.fe_start != ac->ac_g_ex.fe_start); ex 1862 fs/ext4/mballoc.c ac->ac_b_ex = ex; ex 1867 fs/ext4/mballoc.c BUG_ON(ex.fe_len <= 0); ex 1868 fs/ext4/mballoc.c BUG_ON(ex.fe_group != ac->ac_g_ex.fe_group); ex 1869 fs/ext4/mballoc.c BUG_ON(ex.fe_start != ac->ac_g_ex.fe_start); ex 1871 fs/ext4/mballoc.c ac->ac_b_ex = ex; ex 1934 fs/ext4/mballoc.c struct ext4_free_extent ex; ex 1962 fs/ext4/mballoc.c mb_find_extent(e4b, i, ac->ac_g_ex.fe_len, &ex); ex 1963 fs/ext4/mballoc.c if (WARN_ON(ex.fe_len <= 0)) ex 1965 fs/ext4/mballoc.c if (free < ex.fe_len) { ex 1969 fs/ext4/mballoc.c free, ex.fe_len); ex 1979 fs/ext4/mballoc.c ex.fe_logical = 0xDEADC0DE; /* debug value */ ex 1980 fs/ext4/mballoc.c ext4_mb_measure_extent(ac, &ex, e4b); ex 1982 fs/ext4/mballoc.c i += ex.fe_len; ex 1983 fs/ext4/mballoc.c free -= ex.fe_len; ex 2000 fs/ext4/mballoc.c struct ext4_free_extent ex; ex 2017 fs/ext4/mballoc.c max = mb_find_extent(e4b, i, sbi->s_stripe, &ex); ex 2020 fs/ext4/mballoc.c ex.fe_logical = 0xDEADF00D; /* debug value */ ex 2021 fs/ext4/mballoc.c ac->ac_b_ex = ex; ex 5135 fs/ext4/mballoc.c struct ext4_free_extent ex; ex 5142 fs/ext4/mballoc.c ex.fe_start = start; ex 5143 fs/ext4/mballoc.c ex.fe_group = group; ex 5144 fs/ext4/mballoc.c ex.fe_len = count; ex 5150 fs/ext4/mballoc.c mb_mark_used(e4b, &ex); ex 5154 fs/ext4/mballoc.c mb_free_blocks(NULL, e4b, start, ex.fe_len); ex 618 fs/ext4/migrate.c struct ext4_extent *ex; ex 654 fs/ext4/migrate.c ex = EXT_FIRST_EXTENT(eh); ex 663 fs/ext4/migrate.c len = le16_to_cpu(ex->ee_len); ex 664 fs/ext4/migrate.c blk = ext4_ext_pblock(ex); ex 665 fs/ext4/migrate.c start = le32_to_cpu(ex->ee_block); ex 619 fs/ext4/move_extent.c struct ext4_extent *ex; ex 628 fs/ext4/move_extent.c ex = path[path->p_depth].p_ext; ex 630 fs/ext4/move_extent.c cur_blk = le32_to_cpu(ex->ee_block); ex 631 fs/ext4/move_extent.c cur_len = ext4_ext_get_actual_len(ex); ex 653 fs/ext4/move_extent.c unwritten = ext4_ext_is_unwritten(ex); ex 19 fs/hpfs/ea.c char ex[4 + 255 + 1 + 8]; ex 20 fs/hpfs/ea.c struct extended_attribute *ea = (struct extended_attribute *)ex; ex 26 fs/hpfs/ea.c if (hpfs_ea_read(s, a, ano, pos, 4, ex)) return; ex 33 fs/hpfs/ea.c if (hpfs_ea_read(s, a, ano, pos + 4, ea->namelen + 9, ex+4)) ex 80 fs/hpfs/ea.c char ex[4 + 255 + 1 + 8]; ex 98 fs/hpfs/ea.c ea = (struct extended_attribute *)ex; ex 104 fs/hpfs/ea.c if (hpfs_ea_read(s, a, ano, pos, 4, ex)) return -EIO; ex 105 fs/hpfs/ea.c if (hpfs_ea_read(s, a, ano, pos + 4, ea->namelen + 1 + (ea_indirect(ea) ? 8 : 0), ex + 4)) ex 155 fs/hpfs/ea.c char ex[4 + 255 + 1 + 8]; ex 156 fs/hpfs/ea.c ea = (struct extended_attribute *)ex; ex 162 fs/hpfs/ea.c if (hpfs_ea_read(s, a, ano, pos, 4, ex)) return NULL; ex 163 fs/hpfs/ea.c if (hpfs_ea_read(s, a, ano, pos + 4, ea->namelen + 1 + (ea_indirect(ea) ? 8 : 0), ex + 4)) ex 216 fs/hpfs/ea.c char ex[4 + 255 + 1 + 8]; ex 217 fs/hpfs/ea.c ea = (struct extended_attribute *)ex; ex 223 fs/hpfs/ea.c if (hpfs_ea_read(s, a, ano, pos, 4, ex)) return; ex 224 fs/hpfs/ea.c if (hpfs_ea_read(s, a, ano, pos + 4, ea->namelen + 1 + (ea_indirect(ea) ? 8 : 0), ex + 4)) ex 1696 fs/ocfs2/dlmglue.c int ex, ex 1699 fs/ocfs2/dlmglue.c int level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 1968 fs/ocfs2/dlmglue.c int ocfs2_file_lock(struct file *file, int ex, int trylock) ex 1970 fs/ocfs2/dlmglue.c int ret, level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 2060 fs/ocfs2/dlmglue.c lockres->l_name, ex, trylock, ret); ex 2412 fs/ocfs2/dlmglue.c int ex, ex 2424 fs/ocfs2/dlmglue.c ex ? "EXMODE" : "PRMODE"); ex 2431 fs/ocfs2/dlmglue.c if (ex) ex 2444 fs/ocfs2/dlmglue.c level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 2508 fs/ocfs2/dlmglue.c ocfs2_inode_unlock(inode, ex); ex 2533 fs/ocfs2/dlmglue.c int ex, ex 2538 fs/ocfs2/dlmglue.c ret = ocfs2_inode_lock_full(inode, ret_bh, ex, OCFS2_LOCK_NONBLOCK); ex 2548 fs/ocfs2/dlmglue.c if (ocfs2_inode_lock(inode, ret_bh, ex) == 0) ex 2549 fs/ocfs2/dlmglue.c ocfs2_inode_unlock(inode, ex); ex 2602 fs/ocfs2/dlmglue.c int ex) ex 2604 fs/ocfs2/dlmglue.c int level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 2610 fs/ocfs2/dlmglue.c ex ? "EXMODE" : "PRMODE"); ex 2660 fs/ocfs2/dlmglue.c int ex, ex 2677 fs/ocfs2/dlmglue.c status = ocfs2_inode_lock_full(inode, ret_bh, ex, 0); ex 2684 fs/ocfs2/dlmglue.c oh->oh_ex = ex; ex 2689 fs/ocfs2/dlmglue.c if (unlikely(ex && !tmp_oh->oh_ex)) { ex 2704 fs/ocfs2/dlmglue.c status = ocfs2_inode_lock_full(inode, ret_bh, ex, ex 2716 fs/ocfs2/dlmglue.c int ex, ex 2776 fs/ocfs2/dlmglue.c int ex) ex 2779 fs/ocfs2/dlmglue.c int level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 2815 fs/ocfs2/dlmglue.c int ex) ex 2817 fs/ocfs2/dlmglue.c int level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 2850 fs/ocfs2/dlmglue.c int ocfs2_nfs_sync_lock(struct ocfs2_super *osb, int ex) ex 2861 fs/ocfs2/dlmglue.c status = ocfs2_cluster_lock(osb, lockres, ex ? LKM_EXMODE : LKM_PRMODE, ex 2869 fs/ocfs2/dlmglue.c void ocfs2_nfs_sync_unlock(struct ocfs2_super *osb, int ex) ex 2875 fs/ocfs2/dlmglue.c ex ? LKM_EXMODE : LKM_PRMODE); ex 2942 fs/ocfs2/dlmglue.c int ocfs2_dentry_lock(struct dentry *dentry, int ex) ex 2945 fs/ocfs2/dlmglue.c int level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 2952 fs/ocfs2/dlmglue.c if (ex) ex 2967 fs/ocfs2/dlmglue.c void ocfs2_dentry_unlock(struct dentry *dentry, int ex) ex 2969 fs/ocfs2/dlmglue.c int level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 4144 fs/ocfs2/dlmglue.c void ocfs2_qinfo_unlock(struct ocfs2_mem_dqinfo *oinfo, int ex) ex 4148 fs/ocfs2/dlmglue.c int level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 4199 fs/ocfs2/dlmglue.c int ocfs2_qinfo_lock(struct ocfs2_mem_dqinfo *oinfo, int ex) ex 4203 fs/ocfs2/dlmglue.c int level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 4208 fs/ocfs2/dlmglue.c if (ex) ex 4225 fs/ocfs2/dlmglue.c ocfs2_qinfo_unlock(oinfo, ex); ex 4231 fs/ocfs2/dlmglue.c int ocfs2_refcount_lock(struct ocfs2_refcount_tree *ref_tree, int ex) ex 4234 fs/ocfs2/dlmglue.c int level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 4252 fs/ocfs2/dlmglue.c void ocfs2_refcount_unlock(struct ocfs2_refcount_tree *ref_tree, int ex) ex 4254 fs/ocfs2/dlmglue.c int level = ex ? DLM_LOCK_EX : DLM_LOCK_PR; ex 139 fs/ocfs2/dlmglue.h int ex, ex 144 fs/ocfs2/dlmglue.h int ex, ex 158 fs/ocfs2/dlmglue.h int ex); ex 160 fs/ocfs2/dlmglue.h int ex); ex 162 fs/ocfs2/dlmglue.h int ex); ex 168 fs/ocfs2/dlmglue.h int ocfs2_nfs_sync_lock(struct ocfs2_super *osb, int ex); ex 169 fs/ocfs2/dlmglue.h void ocfs2_nfs_sync_unlock(struct ocfs2_super *osb, int ex); ex 176 fs/ocfs2/dlmglue.h int ocfs2_dentry_lock(struct dentry *dentry, int ex); ex 177 fs/ocfs2/dlmglue.h void ocfs2_dentry_unlock(struct dentry *dentry, int ex); ex 178 fs/ocfs2/dlmglue.h int ocfs2_file_lock(struct file *file, int ex, int trylock); ex 180 fs/ocfs2/dlmglue.h int ocfs2_qinfo_lock(struct ocfs2_mem_dqinfo *oinfo, int ex); ex 181 fs/ocfs2/dlmglue.h void ocfs2_qinfo_unlock(struct ocfs2_mem_dqinfo *oinfo, int ex); ex 183 fs/ocfs2/dlmglue.h int ocfs2_refcount_lock(struct ocfs2_refcount_tree *ref_tree, int ex); ex 184 fs/ocfs2/dlmglue.h void ocfs2_refcount_unlock(struct ocfs2_refcount_tree *ref_tree, int ex); ex 204 fs/ocfs2/dlmglue.h int ex, ex 207 fs/ocfs2/dlmglue.h int ex, ex 111 fs/ocfs2/quota.h int ocfs2_lock_global_qf(struct ocfs2_mem_dqinfo *oinfo, int ex); ex 112 fs/ocfs2/quota.h void ocfs2_unlock_global_qf(struct ocfs2_mem_dqinfo *oinfo, int ex); ex 298 fs/ocfs2/quota_global.c int ocfs2_lock_global_qf(struct ocfs2_mem_dqinfo *oinfo, int ex) ex 303 fs/ocfs2/quota_global.c status = ocfs2_inode_lock(oinfo->dqi_gqinode, &bh, ex); ex 312 fs/ocfs2/quota_global.c if (ex) { ex 321 fs/ocfs2/quota_global.c void ocfs2_unlock_global_qf(struct ocfs2_mem_dqinfo *oinfo, int ex) ex 323 fs/ocfs2/quota_global.c if (ex) { ex 329 fs/ocfs2/quota_global.c ocfs2_inode_unlock(oinfo->dqi_gqinode, ex); ex 795 fs/ocfs2/quota_global.c int ex = 0; ex 830 fs/ocfs2/quota_global.c ex = 1; ex 849 fs/ocfs2/quota_global.c status = ocfs2_qinfo_lock(info, ex); ex 853 fs/ocfs2/quota_global.c if (ex && info_dirty(sb_dqinfo(sb, type))) { ex 858 fs/ocfs2/quota_global.c ocfs2_qinfo_unlock(info, ex); ex 374 fs/select.c unsigned long *in, *out, *ex; ex 415 fs/select.c #define FDS_EX(fds, n) (fds->ex + n) ex 509 fs/select.c inp = fds->in; outp = fds->out; exp = fds->ex; ex 513 fs/select.c unsigned long in, out, ex, all_bits, bit = 1, j; ex 517 fs/select.c in = *inp++; out = *outp++; ex = *exp++; ex 518 fs/select.c all_bits = in | out | ex; ex 547 fs/select.c if ((mask & POLLEX_SET) && (ex & bit)) { ex 664 fs/select.c fds.ex = bits + 2*size; ex 671 fs/select.c (ret = get_fd_set(n, exp, fds.ex))) ex 1210 fs/select.c fds.ex = (unsigned long *) (bits + 2*size); ex 1217 fs/select.c (ret = compat_get_fd_set(n, exp, fds.ex))) ex 21 include/linux/ceph/striper.h static inline void ceph_object_extent_init(struct ceph_object_extent *ex) ex 23 include/linux/ceph/striper.h INIT_LIST_HEAD(&ex->oe_item); ex 33 include/linux/ceph/striper.h typedef void (*ceph_object_extent_fn_t)(struct ceph_object_extent *ex, ex 12 include/linux/elf.h # define elf_read_implies_exec(ex, have_pt_gnu_stack) 0 ex 15 include/linux/elf.h #define SET_PERSONALITY(ex) \ ex 20 include/linux/elf.h #define SET_PERSONALITY2(ex, state) \ ex 21 include/linux/elf.h SET_PERSONALITY(ex) ex 123 include/math-emu/soft-fp.h #define FP_SET_EXCEPTION(ex) \ ex 124 include/math-emu/soft-fp.h _fex |= (ex) ex 126 include/math-emu/soft-fp.h #define FP_UNSET_EXCEPTION(ex) \ ex 127 include/math-emu/soft-fp.h _fex &= ~(ex) ex 981 include/rdma/ib_verbs.h } ex; ex 1337 include/rdma/ib_verbs.h } ex; ex 2059 include/trace/events/ext4.h TP_PROTO(struct inode *inode, struct ext4_extent *ex, ex 2063 include/trace/events/ext4.h TP_ARGS(inode, ex, from, to, pc), ex 2083 include/trace/events/ext4.h __entry->ee_pblk = ext4_ext_pblock(ex); ex 2084 include/trace/events/ext4.h __entry->ee_lblk = le32_to_cpu(ex->ee_block); ex 2085 include/trace/events/ext4.h __entry->ee_len = ext4_ext_get_actual_len(ex); ex 2107 include/trace/events/ext4.h struct ext4_extent *ex, ex 2110 include/trace/events/ext4.h TP_ARGS(inode, start, ex, pc), ex 2128 include/trace/events/ext4.h __entry->ee_lblk = le32_to_cpu(ex->ee_block); ex 2129 include/trace/events/ext4.h __entry->ee_pblk = ext4_ext_pblock(ex); ex 2130 include/trace/events/ext4.h __entry->ee_len = ext4_ext_get_actual_len(ex); ex 1656 include/trace/events/rpcrdma.h wr->ex.invalidate_rkey : 0; ex 32 include/uapi/linux/dlm_plock.h __u8 ex; ex 284 include/uapi/linux/kvm.h } ex; ex 469 include/uapi/rdma/ib_user_verbs.h } ex; ex 801 include/uapi/rdma/ib_user_verbs.h } ex; ex 77 include/uapi/rdma/rdma_user_rxe.h } ex; ex 234 include/uapi/rdma/vmw_pvrdma-abi.h } ex; ex 59 net/ceph/striper.c struct ceph_object_extent *ex = ex 60 net/ceph/striper.c list_entry(pos, typeof(*ex), oe_item); ex 62 net/ceph/striper.c if (ex->oe_objno == objno) ex 63 net/ceph/striper.c return ex; ex 65 net/ceph/striper.c if (ex->oe_objno < objno) ex 77 net/ceph/striper.c struct ceph_object_extent *ex; ex 79 net/ceph/striper.c list_for_each_entry(ex, object_extents, oe_item) { ex 80 net/ceph/striper.c if (ex->oe_objno == objno && ex 81 net/ceph/striper.c ex->oe_off <= objoff && ex 82 net/ceph/striper.c ex->oe_off + ex->oe_len >= objoff + xlen) /* paranoia */ ex 83 net/ceph/striper.c return ex; ex 85 net/ceph/striper.c if (ex->oe_objno > objno) ex 117 net/ceph/striper.c struct ceph_object_extent *last_ex, *ex; ex 129 net/ceph/striper.c ex = alloc_fn(alloc_arg); ex 130 net/ceph/striper.c if (!ex) ex 133 net/ceph/striper.c ex->oe_objno = objno; ex 134 net/ceph/striper.c ex->oe_off = objoff; ex 135 net/ceph/striper.c ex->oe_len = xlen; ex 137 net/ceph/striper.c action_fn(ex, xlen, action_arg); ex 140 net/ceph/striper.c list_add(&ex->oe_item, add_pos); ex 142 net/ceph/striper.c list_add(&ex->oe_item, &last_ex->oe_item); ex 153 net/ceph/striper.c for (last_ex = list_first_entry(object_extents, typeof(*ex), oe_item), ex 154 net/ceph/striper.c ex = list_next_entry(last_ex, oe_item); ex 155 net/ceph/striper.c &ex->oe_item != object_extents; ex 156 net/ceph/striper.c last_ex = ex, ex = list_next_entry(ex, oe_item)) { ex 157 net/ceph/striper.c if (last_ex->oe_objno > ex->oe_objno || ex 158 net/ceph/striper.c (last_ex->oe_objno == ex->oe_objno && ex 159 net/ceph/striper.c last_ex->oe_off + last_ex->oe_len >= ex->oe_off)) { ex 180 net/ceph/striper.c struct ceph_object_extent *ex; ex 187 net/ceph/striper.c ex = lookup_containing(object_extents, objno, objoff, xlen); ex 188 net/ceph/striper.c if (!ex) { ex 194 net/ceph/striper.c action_fn(ex, xlen, action_arg); ex 303 net/rds/ib_cm.c wc->byte_len, be32_to_cpu(wc->ex.imm_data)); ex 348 net/rds/ib_cm.c wc->byte_len, be32_to_cpu(wc->ex.imm_data)); ex 290 net/rds/ib_frmr.c s_wr->ex.invalidate_rkey = frmr->mr->rkey; ex 980 net/rds/ib_recv.c be32_to_cpu(wc->ex.imm_data)); ex 201 net/rds/ib_send.c send->s_wr.ex.imm_data = 0; ex 255 net/rds/ib_send.c be32_to_cpu(wc->ex.imm_data)); ex 555 net/sunrpc/xprtrdma/frwr_ops.c last->ex.invalidate_rkey = mr->mr_handle; ex 658 net/sunrpc/xprtrdma/frwr_ops.c last->ex.invalidate_rkey = mr->mr_handle; ex 766 net/sunrpc/xprtrdma/svc_rdma_sendto.c sctxt->sc_send_wr.ex.invalidate_rkey = rctxt->rc_inv_rkey; ex 166 net/sunrpc/xprtrdma/verbs.c rep->rr_inv_rkey = wc->ex.invalidate_rkey; ex 285 scripts/mod/modpost.c static const char *export_str(enum export ex) ex 287 scripts/mod/modpost.c return export_list[ex].str; ex 61 security/device_cgroup.c struct dev_exception_item *ex, *tmp, *new; ex 65 security/device_cgroup.c list_for_each_entry(ex, orig, list) { ex 66 security/device_cgroup.c new = kmemdup(ex, sizeof(*ex), GFP_KERNEL); ex 75 security/device_cgroup.c list_for_each_entry_safe(ex, tmp, dest, list) { ex 76 security/device_cgroup.c list_del(&ex->list); ex 77 security/device_cgroup.c kfree(ex); ex 86 security/device_cgroup.c struct dev_exception_item *ex) ex 92 security/device_cgroup.c excopy = kmemdup(ex, sizeof(*ex), GFP_KERNEL); ex 97 security/device_cgroup.c if (walk->type != ex->type) ex 99 security/device_cgroup.c if (walk->major != ex->major) ex 101 security/device_cgroup.c if (walk->minor != ex->minor) ex 104 security/device_cgroup.c walk->access |= ex->access; ex 118 security/device_cgroup.c struct dev_exception_item *ex) ex 125 security/device_cgroup.c if (walk->type != ex->type) ex 127 security/device_cgroup.c if (walk->major != ex->major) ex 129 security/device_cgroup.c if (walk->minor != ex->minor) ex 132 security/device_cgroup.c walk->access &= ~ex->access; ex 142 security/device_cgroup.c struct dev_exception_item *ex, *tmp; ex 144 security/device_cgroup.c list_for_each_entry_safe(ex, tmp, &dev_cgroup->exceptions, list) { ex 145 security/device_cgroup.c list_del_rcu(&ex->list); ex 146 security/device_cgroup.c kfree_rcu(ex, rcu); ex 270 security/device_cgroup.c struct dev_exception_item *ex; ex 287 security/device_cgroup.c list_for_each_entry_rcu(ex, &devcgroup->exceptions, list) { ex 288 security/device_cgroup.c set_access(acc, ex->access); ex 289 security/device_cgroup.c set_majmin(maj, ex->major); ex 290 security/device_cgroup.c set_majmin(min, ex->minor); ex 291 security/device_cgroup.c seq_printf(m, "%c %s:%s %s\n", type_to_char(ex->type), ex 316 security/device_cgroup.c struct dev_exception_item *ex; ex 318 security/device_cgroup.c list_for_each_entry_rcu(ex, exceptions, list) { ex 319 security/device_cgroup.c if ((type & DEVCG_DEV_BLOCK) && !(ex->type & DEVCG_DEV_BLOCK)) ex 321 security/device_cgroup.c if ((type & DEVCG_DEV_CHAR) && !(ex->type & DEVCG_DEV_CHAR)) ex 323 security/device_cgroup.c if (ex->major != ~0 && ex->major != major) ex 325 security/device_cgroup.c if (ex->minor != ~0 && ex->minor != minor) ex 328 security/device_cgroup.c if (access & (~ex->access)) ex 353 security/device_cgroup.c struct dev_exception_item *ex; ex 355 security/device_cgroup.c list_for_each_entry_rcu(ex, exceptions, list) { ex 356 security/device_cgroup.c if ((type & DEVCG_DEV_BLOCK) && !(ex->type & DEVCG_DEV_BLOCK)) ex 358 security/device_cgroup.c if ((type & DEVCG_DEV_CHAR) && !(ex->type & DEVCG_DEV_CHAR)) ex 364 security/device_cgroup.c if (ex->major != ~0 && major != ~0 && ex->major != major) ex 366 security/device_cgroup.c if (ex->minor != ~0 && minor != ~0 && ex->minor != minor) ex 373 security/device_cgroup.c if (!(access & ex->access)) ex 448 security/device_cgroup.c struct dev_exception_item *ex) ex 454 security/device_cgroup.c return verify_new_ex(parent, ex, childcg->behavior); ex 469 security/device_cgroup.c struct dev_exception_item *ex) ex 484 security/device_cgroup.c return !match_exception_partial(&parent->exceptions, ex->type, ex 485 security/device_cgroup.c ex->major, ex->minor, ex->access); ex 516 security/device_cgroup.c struct dev_exception_item *ex; ex 520 security/device_cgroup.c ex = container_of(this, struct dev_exception_item, list); ex 521 security/device_cgroup.c if (!parent_has_perm(devcg, ex)) ex 522 security/device_cgroup.c dev_exception_rm(devcg, ex); ex 534 security/device_cgroup.c struct dev_exception_item *ex) ex 561 security/device_cgroup.c rc = dev_exception_add(devcg, ex); ex 571 security/device_cgroup.c dev_exception_rm(devcg, ex); ex 601 security/device_cgroup.c struct dev_exception_item ex; ex 607 security/device_cgroup.c memset(&ex, 0, sizeof(ex)); ex 641 security/device_cgroup.c ex.type = DEVCG_DEV_BLOCK; ex 644 security/device_cgroup.c ex.type = DEVCG_DEV_CHAR; ex 654 security/device_cgroup.c ex.major = ~0; ex 664 security/device_cgroup.c rc = kstrtou32(temp, 10, &ex.major); ex 676 security/device_cgroup.c ex.minor = ~0; ex 686 security/device_cgroup.c rc = kstrtou32(temp, 10, &ex.minor); ex 697 security/device_cgroup.c ex.access |= DEVCG_ACC_READ; ex 700 security/device_cgroup.c ex.access |= DEVCG_ACC_WRITE; ex 703 security/device_cgroup.c ex.access |= DEVCG_ACC_MKNOD; ex 723 security/device_cgroup.c if (!parent_allows_removal(devcgroup, &ex)) ex 725 security/device_cgroup.c dev_exception_rm(devcgroup, &ex); ex 729 security/device_cgroup.c if (!parent_has_perm(devcgroup, &ex)) ex 731 security/device_cgroup.c rc = dev_exception_add(devcgroup, &ex); ex 740 security/device_cgroup.c dev_exception_rm(devcgroup, &ex); ex 742 security/device_cgroup.c rc = dev_exception_add(devcgroup, &ex); ex 747 security/device_cgroup.c rc = propagate_exception(devcgroup, &ex); ex 284 tools/include/uapi/linux/kvm.h } ex; ex 23 tools/testing/selftests/x86/test_FISTTP.c int ex; ex 39 tools/testing/selftests/x86/test_FISTTP.c ex = fetestexcept(FE_DIVBYZERO|FE_INEXACT|FE_INVALID|FE_OVERFLOW|FE_UNDERFLOW); ex 40 tools/testing/selftests/x86/test_FISTTP.c if (ex != 0) { ex 59 tools/testing/selftests/x86/test_FISTTP.c ex = fetestexcept(FE_DIVBYZERO|FE_INEXACT|FE_INVALID|FE_OVERFLOW|FE_UNDERFLOW); ex 60 tools/testing/selftests/x86/test_FISTTP.c if (ex != FE_INEXACT) { ex 82 tools/testing/selftests/x86/test_FISTTP.c ex = fetestexcept(FE_DIVBYZERO|FE_INEXACT|FE_INVALID|FE_OVERFLOW|FE_UNDERFLOW); ex 83 tools/testing/selftests/x86/test_FISTTP.c if (ex != FE_INEXACT) { ex 103 tools/testing/selftests/x86/test_FISTTP.c ex = fetestexcept(FE_DIVBYZERO|FE_INEXACT|FE_INVALID|FE_OVERFLOW|FE_UNDERFLOW); ex 104 tools/testing/selftests/x86/test_FISTTP.c if (ex != FE_INEXACT) {