ge 192 arch/arm/include/asm/assembler.h .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo ge 449 arch/arm/include/asm/assembler.h .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo ge 90 arch/arm/mach-dove/common.c struct clk *xor0, *xor1, *ge, *gephy; ge 111 arch/arm/mach-dove/common.c ge = dove_register_gate("ge", "gephy", CLOCK_GATING_BIT_GBE); ge 120 arch/arm/mach-dove/common.c orion_clkdev_add(NULL, "mv643xx_eth_port.0", ge); ge 221 arch/ia64/kernel/minstate.h cmp.ge p6,p7 = 33,r17; \ ge 63 arch/mips/include/asm/cpu-features.h #define __isa_range(ge, lt) \ ge 64 arch/mips/include/asm/cpu-features.h ((MIPS_ISA_REV >= (ge)) && (MIPS_ISA_REV < (lt))) ge 65 arch/mips/include/asm/cpu-features.h #define __isa_range_or_flag(ge, lt, flag) \ ge 66 arch/mips/include/asm/cpu-features.h (__isa_range(ge, lt) || ((MIPS_ISA_REV < (lt)) && __isa(flag))) ge 280 arch/mips/include/asm/octeon/cvmx-pciercx-defs.h __BITFIELD_FIELD(uint32_t ge:1, ge 470 arch/mips/pci/pcie-octeon.c pciercx_cfg070.s.ge = 1; /* ECRC generation enable. */ ge 836 drivers/gpio/gpiolib.c struct gpioevent_data ge; ge 840 drivers/gpio/gpiolib.c memset(&ge, 0, sizeof(ge)); ge 847 drivers/gpio/gpiolib.c ge.timestamp = ktime_get_real_ns(); ge 849 drivers/gpio/gpiolib.c ge.timestamp = le->timestamp; ge 856 drivers/gpio/gpiolib.c ge.id = GPIOEVENT_EVENT_RISING_EDGE; ge 859 drivers/gpio/gpiolib.c ge.id = GPIOEVENT_EVENT_FALLING_EDGE; ge 862 drivers/gpio/gpiolib.c ge.id = GPIOEVENT_EVENT_RISING_EDGE; ge 865 drivers/gpio/gpiolib.c ge.id = GPIOEVENT_EVENT_FALLING_EDGE; ge 870 drivers/gpio/gpiolib.c ret = kfifo_put(&le->events, ge); ge 1128 drivers/gpu/drm/i915/gvt/gtt.c struct intel_vgpu_ppgtt_spt *s, struct intel_gvt_gtt_entry *ge) ge 1132 drivers/gpu/drm/i915/gvt/gtt.c se->type = ge->type; ge 1133 drivers/gpu/drm/i915/gvt/gtt.c se->val64 = ge->val64; ge 1246 drivers/gpu/drm/i915/gvt/gtt.c struct intel_gvt_gtt_entry *ge) ge 1249 drivers/gpu/drm/i915/gvt/gtt.c struct intel_gvt_gtt_entry se = *ge; ge 1254 drivers/gpu/drm/i915/gvt/gtt.c if (!pte_ops->test_present(ge)) ge 1257 drivers/gpu/drm/i915/gvt/gtt.c gfn = pte_ops->get_pfn(ge); ge 1259 drivers/gpu/drm/i915/gvt/gtt.c switch (ge->type) { ge 1273 drivers/gpu/drm/i915/gvt/gtt.c ret = is_2MB_gtt_possible(vgpu, ge); ge 1304 drivers/gpu/drm/i915/gvt/gtt.c struct intel_gvt_gtt_entry se, ge; ge 1311 drivers/gpu/drm/i915/gvt/gtt.c for_each_present_guest_entry(spt, &ge, i) { ge 1312 drivers/gpu/drm/i915/gvt/gtt.c if (gtt_type_is_pt(get_next_pt_type(ge.type))) { ge 1313 drivers/gpu/drm/i915/gvt/gtt.c s = ppgtt_populate_spt_by_guest_entry(vgpu, &ge); ge 1319 drivers/gpu/drm/i915/gvt/gtt.c ppgtt_generate_shadow_entry(&se, s, &ge); ge 1322 drivers/gpu/drm/i915/gvt/gtt.c gfn = ops->get_pfn(&ge); ge 1329 drivers/gpu/drm/i915/gvt/gtt.c ret = ppgtt_populate_shadow_entry(vgpu, spt, i, &ge); ge 1337 drivers/gpu/drm/i915/gvt/gtt.c spt, ge.val64, ge.type); ge 1686 drivers/gpu/drm/i915/gvt/gtt.c struct intel_gvt_gtt_entry ge; ge 1696 drivers/gpu/drm/i915/gvt/gtt.c ppgtt_get_guest_entry(spt, &ge, index); ge 1699 drivers/gpu/drm/i915/gvt/gtt.c &ge, index); ge 1811 drivers/gpu/drm/i915/gvt/gtt.c struct intel_gvt_gtt_entry ge, se; ge 1820 drivers/gpu/drm/i915/gvt/gtt.c ppgtt_get_guest_root_entry(mm, &ge, index); ge 1822 drivers/gpu/drm/i915/gvt/gtt.c if (!ops->test_present(&ge)) ge 1826 drivers/gpu/drm/i915/gvt/gtt.c ge.type, ge.val64, index); ge 1828 drivers/gpu/drm/i915/gvt/gtt.c spt = ppgtt_populate_spt_by_guest_entry(vgpu, &ge); ge 1834 drivers/gpu/drm/i915/gvt/gtt.c ppgtt_generate_shadow_entry(&se, spt, &ge); ge 1272 drivers/infiniband/hw/mlx4/main.c struct mlx4_ib_gid_entry *ge; ge 1274 drivers/infiniband/hw/mlx4/main.c ge = kzalloc(sizeof *ge, GFP_KERNEL); ge 1275 drivers/infiniband/hw/mlx4/main.c if (!ge) ge 1278 drivers/infiniband/hw/mlx4/main.c ge->gid = *gid; ge 1280 drivers/infiniband/hw/mlx4/main.c ge->port = mqp->port; ge 1281 drivers/infiniband/hw/mlx4/main.c ge->added = 1; ge 1285 drivers/infiniband/hw/mlx4/main.c list_add_tail(&ge->list, &mqp->gid_list); ge 1920 drivers/infiniband/hw/mlx4/main.c struct mlx4_ib_gid_entry *ge; ge 1924 drivers/infiniband/hw/mlx4/main.c list_for_each_entry_safe(ge, tmp, &qp->gid_list, list) { ge 1925 drivers/infiniband/hw/mlx4/main.c if (!memcmp(raw, ge->gid.raw, 16)) { ge 1926 drivers/infiniband/hw/mlx4/main.c ret = ge; ge 1941 drivers/infiniband/hw/mlx4/main.c struct mlx4_ib_gid_entry *ge; ge 1978 drivers/infiniband/hw/mlx4/main.c ge = find_gid_entry(mqp, gid->raw); ge 1979 drivers/infiniband/hw/mlx4/main.c if (ge) { ge 1981 drivers/infiniband/hw/mlx4/main.c ndev = ge->added ? mdev->iboe.netdevs[ge->port - 1] : NULL; ge 1987 drivers/infiniband/hw/mlx4/main.c list_del(&ge->list); ge 1988 drivers/infiniband/hw/mlx4/main.c kfree(ge); ge 1349 drivers/infiniband/hw/mlx4/qp.c struct mlx4_ib_gid_entry *ge, *tmp; ge 1351 drivers/infiniband/hw/mlx4/qp.c list_for_each_entry_safe(ge, tmp, &qp->gid_list, list) { ge 1352 drivers/infiniband/hw/mlx4/qp.c list_del(&ge->list); ge 1353 drivers/infiniband/hw/mlx4/qp.c kfree(ge); ge 1939 drivers/infiniband/hw/mlx4/qp.c struct mlx4_ib_gid_entry *ge, *tmp; ge 1941 drivers/infiniband/hw/mlx4/qp.c list_for_each_entry_safe(ge, tmp, &qp->gid_list, list) { ge 1942 drivers/infiniband/hw/mlx4/qp.c if (!ge->added && mlx4_ib_add_mc(dev, qp, &ge->gid)) { ge 1943 drivers/infiniband/hw/mlx4/qp.c ge->added = 1; ge 1944 drivers/infiniband/hw/mlx4/qp.c ge->port = qp->port; ge 892 net/sched/sch_netem.c const struct tc_netem_gemodel *ge = nla_data(la); ge 901 net/sched/sch_netem.c q->clg.a1 = ge->p; ge 902 net/sched/sch_netem.c q->clg.a2 = ge->r; ge 903 net/sched/sch_netem.c q->clg.a3 = ge->h; ge 904 net/sched/sch_netem.c q->clg.a4 = ge->k1; ge 1110 net/sched/sch_netem.c struct tc_netem_gemodel ge = { ge 1117 net/sched/sch_netem.c if (nla_put(skb, NETEM_LOSS_GE, sizeof(ge), &ge))