prob 231 arch/powerpc/include/asm/spu_csa.h struct spu_problem_collapsed prob; prob 55 arch/powerpc/platforms/cell/spufs/backing_ops.c mbox_stat = ctx->csa.prob.mb_stat_R; prob 61 arch/powerpc/platforms/cell/spufs/backing_ops.c *data = ctx->csa.prob.pu_mb_R; prob 62 arch/powerpc/platforms/cell/spufs/backing_ops.c ctx->csa.prob.mb_stat_R &= ~(0x0000ff); prob 73 arch/powerpc/platforms/cell/spufs/backing_ops.c return ctx->csa.prob.mb_stat_R; prob 84 arch/powerpc/platforms/cell/spufs/backing_ops.c stat = ctx->csa.prob.mb_stat_R; prob 120 arch/powerpc/platforms/cell/spufs/backing_ops.c if (ctx->csa.prob.mb_stat_R & 0xff0000) { prob 126 arch/powerpc/platforms/cell/spufs/backing_ops.c ctx->csa.prob.mb_stat_R &= ~(0xff0000); prob 144 arch/powerpc/platforms/cell/spufs/backing_ops.c if ((ctx->csa.prob.mb_stat_R) & 0x00ff00) { prob 146 arch/powerpc/platforms/cell/spufs/backing_ops.c int avail = (ctx->csa.prob.mb_stat_R & 0x00ff00) >> 8; prob 155 arch/powerpc/platforms/cell/spufs/backing_ops.c ctx->csa.prob.mb_stat_R &= ~(0x00ff00); prob 156 arch/powerpc/platforms/cell/spufs/backing_ops.c ctx->csa.prob.mb_stat_R |= (((4 - slot) & 0xff) << 8); prob 244 arch/powerpc/platforms/cell/spufs/backing_ops.c return ctx->csa.prob.spu_npc_RW; prob 249 arch/powerpc/platforms/cell/spufs/backing_ops.c ctx->csa.prob.spu_npc_RW = val; prob 254 arch/powerpc/platforms/cell/spufs/backing_ops.c return ctx->csa.prob.spu_status_R; prob 269 arch/powerpc/platforms/cell/spufs/backing_ops.c return ctx->csa.prob.spu_runcntl_RW; prob 275 arch/powerpc/platforms/cell/spufs/backing_ops.c ctx->csa.prob.spu_runcntl_RW = val; prob 277 arch/powerpc/platforms/cell/spufs/backing_ops.c ctx->csa.prob.spu_status_R &= prob 283 arch/powerpc/platforms/cell/spufs/backing_ops.c ctx->csa.prob.spu_status_R |= SPU_STATUS_RUNNING; prob 285 arch/powerpc/platforms/cell/spufs/backing_ops.c ctx->csa.prob.spu_status_R &= ~SPU_STATUS_RUNNING; prob 320 arch/powerpc/platforms/cell/spufs/backing_ops.c struct spu_problem_collapsed *prob = &ctx->csa.prob; prob 325 arch/powerpc/platforms/cell/spufs/backing_ops.c if (prob->dma_querytype_RW) prob 329 arch/powerpc/platforms/cell/spufs/backing_ops.c prob->dma_querymask_RW = mask; prob 330 arch/powerpc/platforms/cell/spufs/backing_ops.c prob->dma_querytype_RW = mode; prob 336 arch/powerpc/platforms/cell/spufs/backing_ops.c ctx->csa.prob.dma_tagstatus_R &= mask; prob 345 arch/powerpc/platforms/cell/spufs/backing_ops.c return ctx->csa.prob.dma_tagstatus_R; prob 350 arch/powerpc/platforms/cell/spufs/backing_ops.c return ctx->csa.prob.dma_qstatus_R; prob 1970 arch/powerpc/platforms/cell/spufs/file.c if (!(ctx->csa.prob.mb_stat_R & 0x0000ff)) prob 1973 arch/powerpc/platforms/cell/spufs/file.c data = ctx->csa.prob.pu_mb_R; prob 2010 arch/powerpc/platforms/cell/spufs/file.c if (!(ctx->csa.prob.mb_stat_R & 0xff0000)) prob 2051 arch/powerpc/platforms/cell/spufs/file.c wbox_stat = ctx->csa.prob.mb_stat_R; prob 2153 arch/powerpc/platforms/cell/spufs/file.c info.proxydma_info_type = ctx->csa.prob.dma_querytype_RW; prob 2154 arch/powerpc/platforms/cell/spufs/file.c info.proxydma_info_mask = ctx->csa.prob.dma_querymask_RW; prob 2155 arch/powerpc/platforms/cell/spufs/file.c info.proxydma_info_status = ctx->csa.prob.dma_tagstatus_R; prob 27 arch/powerpc/platforms/cell/spufs/hw_ops.c struct spu_problem __iomem *prob = spu->problem; prob 32 arch/powerpc/platforms/cell/spufs/hw_ops.c mbox_stat = in_be32(&prob->mb_stat_R); prob 34 arch/powerpc/platforms/cell/spufs/hw_ops.c *data = in_be32(&prob->pu_mb_R); prob 85 arch/powerpc/platforms/cell/spufs/hw_ops.c struct spu_problem __iomem *prob = spu->problem; prob 90 arch/powerpc/platforms/cell/spufs/hw_ops.c if (in_be32(&prob->mb_stat_R) & 0xff0000) { prob 106 arch/powerpc/platforms/cell/spufs/hw_ops.c struct spu_problem __iomem *prob = spu->problem; prob 110 arch/powerpc/platforms/cell/spufs/hw_ops.c if (in_be32(&prob->mb_stat_R) & 0x00ff00) { prob 112 arch/powerpc/platforms/cell/spufs/hw_ops.c out_be32(&prob->spu_mb_W, data); prob 249 arch/powerpc/platforms/cell/spufs/hw_ops.c struct spu_problem __iomem *prob = ctx->spu->problem; prob 254 arch/powerpc/platforms/cell/spufs/hw_ops.c if (in_be32(&prob->dma_querytype_RW)) prob 257 arch/powerpc/platforms/cell/spufs/hw_ops.c out_be32(&prob->dma_querymask_RW, mask); prob 258 arch/powerpc/platforms/cell/spufs/hw_ops.c out_be32(&prob->dma_querytype_RW, mode); prob 278 arch/powerpc/platforms/cell/spufs/hw_ops.c struct spu_problem __iomem *prob = ctx->spu->problem; prob 281 arch/powerpc/platforms/cell/spufs/hw_ops.c out_be32(&prob->mfc_lsa_W, cmd->lsa); prob 282 arch/powerpc/platforms/cell/spufs/hw_ops.c out_be64(&prob->mfc_ea_W, cmd->ea); prob 283 arch/powerpc/platforms/cell/spufs/hw_ops.c out_be32(&prob->mfc_union_W.by32.mfc_size_tag32, prob 285 arch/powerpc/platforms/cell/spufs/hw_ops.c out_be32(&prob->mfc_union_W.by32.mfc_class_cmd32, prob 287 arch/powerpc/platforms/cell/spufs/hw_ops.c status = in_be32(&prob->mfc_union_W.by32.mfc_class_cmd32); prob 83 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 94 arch/powerpc/platforms/cell/spufs/switch.c return (in_be32(&prob->spu_status_R) & isolate_state) ? 1 : 0; prob 203 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 209 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.spu_runcntl_RW = in_be32(&prob->spu_runcntl_RW); prob 222 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 227 arch/powerpc/platforms/cell/spufs/switch.c if ((in_be32(&prob->spu_status_R) & SPU_STATUS_RUNNING) == 0) { prob 228 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.spu_status_R = in_be32(&prob->spu_status_R); prob 232 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, SPU_RUNCNTL_STOP); prob 234 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & prob 239 arch/powerpc/platforms/cell/spufs/switch.c if ((in_be32(&prob->spu_status_R) & stopped) == 0) prob 240 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.spu_status_R = SPU_STATUS_RUNNING; prob 242 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.spu_status_R = in_be32(&prob->spu_status_R); prob 296 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 303 arch/powerpc/platforms/cell/spufs/switch.c out_be64(&prob->spc_mssync_RW, 1UL); prob 304 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be64(&prob->spc_mssync_RW) & MS_SYNC_PENDING); prob 368 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 374 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.dma_querymask_RW = in_be32(&prob->dma_querymask_RW); prob 379 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 385 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.dma_querytype_RW = in_be32(&prob->dma_querytype_RW); prob 390 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 398 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.dma_tagstatus_R = in_be32(&prob->dma_tagstatus_R); prob 503 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 508 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.spu_npc_RW = in_be32(&prob->spu_npc_RW); prob 587 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 592 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.mb_stat_R = in_be32(&prob->mb_stat_R); prob 597 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 602 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.pu_mb_R = in_be32(&prob->pu_mb_R); prob 769 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 782 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->mfc_lsa_W, ls_offset); prob 783 arch/powerpc/platforms/cell/spufs/switch.c out_be64(&prob->mfc_ea_W, ea); prob 784 arch/powerpc/platforms/cell/spufs/switch.c out_be64(&prob->mfc_union_W.all64, command.all64); prob 786 arch/powerpc/platforms/cell/spufs/switch.c in_be32(&prob->mfc_union_W.by32.mfc_class_cmd32); prob 816 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 827 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_npc_RW, 0); prob 833 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 845 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->signal_notify1, addr64.ui[0]); prob 851 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 863 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->signal_notify2, addr64.ui[1]); prob 885 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 892 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->dma_querymask_RW, MFC_TAGID_TO_TAGMASK(0)); prob 898 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 911 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_FALSE(in_be32(&prob->dma_tagstatus_R) & mask); prob 921 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 930 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & SPU_STATUS_RUNNING); prob 940 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 950 arch/powerpc/platforms/cell/spufs/switch.c return (in_be32(&prob->spu_status_R) != complete) ? 1 : 0; prob 991 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1000 arch/powerpc/platforms/cell/spufs/switch.c if (in_be32(&prob->spu_status_R) & SPU_STATUS_RUNNING) { prob 1001 arch/powerpc/platforms/cell/spufs/switch.c if (in_be32(&prob->spu_status_R) & prob 1003 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & prob 1006 arch/powerpc/platforms/cell/spufs/switch.c if ((in_be32(&prob->spu_status_R) & prob 1008 arch/powerpc/platforms/cell/spufs/switch.c || (in_be32(&prob->spu_status_R) & prob 1010 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, SPU_RUNCNTL_STOP); prob 1012 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & prob 1014 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, 0x2); prob 1016 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & prob 1019 arch/powerpc/platforms/cell/spufs/switch.c if (in_be32(&prob->spu_status_R) & prob 1021 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, SPU_RUNCNTL_STOP); prob 1023 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & prob 1033 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1039 arch/powerpc/platforms/cell/spufs/switch.c if (!(in_be32(&prob->spu_status_R) & SPU_STATUS_RUNNING)) { prob 1040 arch/powerpc/platforms/cell/spufs/switch.c if (in_be32(&prob->spu_status_R) & prob 1045 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, SPU_RUNCNTL_RUNNABLE); prob 1047 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & prob 1050 arch/powerpc/platforms/cell/spufs/switch.c if ((in_be32(&prob->spu_status_R) & prob 1052 arch/powerpc/platforms/cell/spufs/switch.c || (in_be32(&prob->spu_status_R) & prob 1057 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, 0x2); prob 1059 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & prob 1136 arch/powerpc/platforms/cell/spufs/switch.c (csa->prob.spu_status_R >> SPU_STOP_STATUS_SHIFT) & 0xFFFF; prob 1137 arch/powerpc/platforms/cell/spufs/switch.c if ((csa->prob.spu_status_R & status_P_I) == status_P_I) { prob 1146 arch/powerpc/platforms/cell/spufs/switch.c } else if ((csa->prob.spu_status_R & status_P_H) == status_P_H) { prob 1155 arch/powerpc/platforms/cell/spufs/switch.c } else if ((csa->prob.spu_status_R & status_S_P) == status_S_P) { prob 1163 arch/powerpc/platforms/cell/spufs/switch.c } else if ((csa->prob.spu_status_R & status_S_I) == status_S_I) { prob 1171 arch/powerpc/platforms/cell/spufs/switch.c } else if ((csa->prob.spu_status_R & status_P) == status_P) { prob 1179 arch/powerpc/platforms/cell/spufs/switch.c } else if ((csa->prob.spu_status_R & status_H) == status_H) { prob 1186 arch/powerpc/platforms/cell/spufs/switch.c } else if ((csa->prob.spu_status_R & status_S) == status_S) { prob 1192 arch/powerpc/platforms/cell/spufs/switch.c } else if ((csa->prob.spu_status_R & status_I) == status_I) { prob 1221 arch/powerpc/platforms/cell/spufs/switch.c if (!(csa->prob.spu_status_R & mask)) { prob 1284 arch/powerpc/platforms/cell/spufs/switch.c csa->lscsa->ppu_mb.slot[0] = csa->prob.pu_mb_R; prob 1297 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1307 arch/powerpc/platforms/cell/spufs/switch.c return (in_be32(&prob->spu_status_R) != complete) ? 1 : 0; prob 1323 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1333 arch/powerpc/platforms/cell/spufs/switch.c if (csa->prob.spu_status_R & mask) { prob 1334 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, SPU_RUNCNTL_RUNNABLE); prob 1336 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & prob 1343 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1356 arch/powerpc/platforms/cell/spufs/switch.c if (!(csa->prob.spu_status_R & mask)) { prob 1357 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, SPU_RUNCNTL_RUNNABLE); prob 1359 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_FALSE(in_be32(&prob->spu_status_R) & prob 1361 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, SPU_RUNCNTL_STOP); prob 1363 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & prob 1452 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1457 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->dma_querymask_RW, csa->prob.dma_querymask_RW); prob 1463 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1468 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->dma_querytype_RW, csa->prob.dma_querytype_RW); prob 1631 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1636 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_npc_RW, csa->prob.spu_npc_RW); prob 1659 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1666 arch/powerpc/platforms/cell/spufs/switch.c if ((csa->prob.mb_stat_R & 0xFF) == 0) { prob 1667 arch/powerpc/platforms/cell/spufs/switch.c dummy = in_be32(&prob->pu_mb_R); prob 1681 arch/powerpc/platforms/cell/spufs/switch.c if ((csa->prob.mb_stat_R & 0xFF0000) == 0) { prob 1715 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1721 arch/powerpc/platforms/cell/spufs/switch.c if (csa->prob.spu_status_R & SPU_STATUS_RUNNING) { prob 1722 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, SPU_RUNCNTL_RUNNABLE); prob 1875 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1879 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, SPU_RUNCNTL_STOP); prob 1881 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_TRUE(in_be32(&prob->spu_status_R) & SPU_STATUS_RUNNING); prob 1890 arch/powerpc/platforms/cell/spufs/switch.c out_be32(&prob->spu_runcntl_RW, 2); prob 1892 arch/powerpc/platforms/cell/spufs/switch.c POLL_WHILE_FALSE((in_be32(&prob->spu_status_R) prob 1907 arch/powerpc/platforms/cell/spufs/switch.c struct spu_problem __iomem *prob = spu->problem; prob 1909 arch/powerpc/platforms/cell/spufs/switch.c if (in_be32(&prob->spu_status_R) & SPU_STATUS_ISOLATED_STATE) { prob 2142 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.spu_runcntl_RW = SPU_RUNCNTL_STOP; prob 2143 arch/powerpc/platforms/cell/spufs/switch.c csa->prob.mb_stat_R = 0x000400; prob 344 drivers/net/ethernet/mellanox/mlxsw/spectrum_qdisc.c u64 prob; prob 347 drivers/net/ethernet/mellanox/mlxsw/spectrum_qdisc.c prob = p->probability; prob 348 drivers/net/ethernet/mellanox/mlxsw/spectrum_qdisc.c prob *= 100; prob 349 drivers/net/ethernet/mellanox/mlxsw/spectrum_qdisc.c prob = DIV_ROUND_UP(prob, 1 << 16); prob 350 drivers/net/ethernet/mellanox/mlxsw/spectrum_qdisc.c prob = DIV_ROUND_UP(prob, 1 << 16); prob 354 drivers/net/ethernet/mellanox/mlxsw/spectrum_qdisc.c max, prob, p->is_ecn); prob 958 include/uapi/linux/pkt_sched.h __u64 prob; /* current probability */ prob 349 lib/decompress_unlzma.c int pos_state, uint16_t *prob, prob 352 lib/decompress_unlzma.c rc_update_bit_0(rc, prob); prob 353 lib/decompress_unlzma.c prob = (p + LZMA_LITERAL + prob 367 lib/decompress_unlzma.c prob_lit = prob + 0x100 + bit + mi; prob 378 lib/decompress_unlzma.c uint16_t *prob_lit = prob + mi; prob 393 lib/decompress_unlzma.c int pos_state, uint16_t *prob) { prob 399 lib/decompress_unlzma.c rc_update_bit_1(rc, prob); prob 400 lib/decompress_unlzma.c prob = p + LZMA_IS_REP + cst->state; prob 401 lib/decompress_unlzma.c if (rc_is_bit_0(rc, prob)) { prob 402 lib/decompress_unlzma.c rc_update_bit_0(rc, prob); prob 407 lib/decompress_unlzma.c prob = p + LZMA_LEN_CODER; prob 409 lib/decompress_unlzma.c rc_update_bit_1(rc, prob); prob 410 lib/decompress_unlzma.c prob = p + LZMA_IS_REP_G0 + cst->state; prob 411 lib/decompress_unlzma.c if (rc_is_bit_0(rc, prob)) { prob 412 lib/decompress_unlzma.c rc_update_bit_0(rc, prob); prob 413 lib/decompress_unlzma.c prob = (p + LZMA_IS_REP_0_LONG prob 417 lib/decompress_unlzma.c if (rc_is_bit_0(rc, prob)) { prob 418 lib/decompress_unlzma.c rc_update_bit_0(rc, prob); prob 424 lib/decompress_unlzma.c rc_update_bit_1(rc, prob); prob 429 lib/decompress_unlzma.c rc_update_bit_1(rc, prob); prob 430 lib/decompress_unlzma.c prob = p + LZMA_IS_REP_G1 + cst->state; prob 431 lib/decompress_unlzma.c if (rc_is_bit_0(rc, prob)) { prob 432 lib/decompress_unlzma.c rc_update_bit_0(rc, prob); prob 435 lib/decompress_unlzma.c rc_update_bit_1(rc, prob); prob 436 lib/decompress_unlzma.c prob = p + LZMA_IS_REP_G2 + cst->state; prob 437 lib/decompress_unlzma.c if (rc_is_bit_0(rc, prob)) { prob 438 lib/decompress_unlzma.c rc_update_bit_0(rc, prob); prob 441 lib/decompress_unlzma.c rc_update_bit_1(rc, prob); prob 451 lib/decompress_unlzma.c prob = p + LZMA_REP_LEN_CODER; prob 454 lib/decompress_unlzma.c prob_len = prob + LZMA_LEN_CHOICE; prob 457 lib/decompress_unlzma.c prob_len = (prob + LZMA_LEN_LOW prob 464 lib/decompress_unlzma.c prob_len = prob + LZMA_LEN_CHOICE_2; prob 467 lib/decompress_unlzma.c prob_len = (prob + LZMA_LEN_MID prob 474 lib/decompress_unlzma.c prob_len = prob + LZMA_LEN_HIGH; prob 488 lib/decompress_unlzma.c prob = prob 494 lib/decompress_unlzma.c rc_bit_tree_decode(rc, prob, prob 503 lib/decompress_unlzma.c prob = p + LZMA_SPEC_POS + prob 510 lib/decompress_unlzma.c prob = p + LZMA_ALIGN; prob 517 lib/decompress_unlzma.c if (rc_get_bit(rc, prob + mi, &mi)) prob 633 lib/decompress_unlzma.c uint16_t *prob = p + LZMA_IS_MATCH + prob 635 lib/decompress_unlzma.c if (rc_is_bit_0(&rc, prob)) { prob 636 lib/decompress_unlzma.c if (process_bit0(&wr, &rc, &cst, p, pos_state, prob, prob 642 lib/decompress_unlzma.c if (process_bit1(&wr, &rc, &cst, p, pos_state, prob)) { prob 497 lib/xz/xz_dec_lzma2.c static __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob) prob 503 lib/xz/xz_dec_lzma2.c bound = (rc->range >> RC_BIT_MODEL_TOTAL_BITS) * *prob; prob 506 lib/xz/xz_dec_lzma2.c *prob += (RC_BIT_MODEL_TOTAL - *prob) >> RC_MOVE_BITS; prob 511 lib/xz/xz_dec_lzma2.c *prob -= *prob >> RC_MOVE_BITS; prob 1700 net/mac80211/rc80211_minstrel_ht.c int i, j, prob, tp_avg; prob 1707 net/mac80211/rc80211_minstrel_ht.c prob = mi->groups[i].rates[j].prob_ewma; prob 1710 net/mac80211/rc80211_minstrel_ht.c tp_avg = minstrel_ht_get_tp_avg(mi, i, j, prob) * 10; prob 41 net/sched/sch_pie.c u64 prob; /* probability but scaled by u64 limit. */ prob 96 net/sched/sch_pie.c u64 local_prob = q->vars.prob; prob 107 net/sched/sch_pie.c (q->vars.prob < MAX_PROB / 5)) prob 122 net/sched/sch_pie.c local_prob = q->vars.prob; prob 164 net/sched/sch_pie.c } else if (q->params.ecn && (q->vars.prob <= MAX_PROB / 10) && prob 368 net/sched/sch_pie.c if (q->vars.prob < MAX_PROB / 10) { prob 373 net/sched/sch_pie.c while (q->vars.prob < div_u64(MAX_PROB, power) && prob 385 net/sched/sch_pie.c oldprob = q->vars.prob; prob 389 net/sched/sch_pie.c q->vars.prob >= MAX_PROB / 10) prob 400 net/sched/sch_pie.c q->vars.prob += delta; prob 404 net/sched/sch_pie.c if (q->vars.prob < oldprob) { prob 405 net/sched/sch_pie.c q->vars.prob = MAX_PROB; prob 415 net/sched/sch_pie.c if (q->vars.prob > oldprob) prob 416 net/sched/sch_pie.c q->vars.prob = 0; prob 425 net/sched/sch_pie.c q->vars.prob -= q->vars.prob / 64u; prob 438 net/sched/sch_pie.c q->vars.prob == 0 && prob 514 net/sched/sch_pie.c .prob = q->vars.prob, prob 201 net/sched/sch_sfb.c u32 qlen = 0, prob = 0, totalpm = 0; prob 208 net/sched/sch_sfb.c if (prob < b->p_mark) prob 209 net/sched/sch_sfb.c prob = b->p_mark; prob 212 net/sched/sch_sfb.c *prob_r = prob; prob 957 tools/include/uapi/linux/pkt_sched.h __u32 prob; /* current probability */