partial 102 arch/arm/crypto/ghash-ce-glue.c unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; partial 106 arch/arm/crypto/ghash-ce-glue.c if ((partial + len) >= GHASH_BLOCK_SIZE) { partial 110 arch/arm/crypto/ghash-ce-glue.c if (partial) { partial 111 arch/arm/crypto/ghash-ce-glue.c int p = GHASH_BLOCK_SIZE - partial; partial 113 arch/arm/crypto/ghash-ce-glue.c memcpy(ctx->buf + partial, src, p); partial 122 arch/arm/crypto/ghash-ce-glue.c partial ? ctx->buf : NULL); partial 124 arch/arm/crypto/ghash-ce-glue.c partial = 0; partial 127 arch/arm/crypto/ghash-ce-glue.c memcpy(ctx->buf + partial, src, len); partial 134 arch/arm/crypto/ghash-ce-glue.c unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; partial 136 arch/arm/crypto/ghash-ce-glue.c if (partial) { partial 139 arch/arm/crypto/ghash-ce-glue.c memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial); partial 126 arch/arm64/crypto/ghash-ce-glue.c unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; partial 130 arch/arm64/crypto/ghash-ce-glue.c if ((partial + len) >= GHASH_BLOCK_SIZE) { partial 134 arch/arm64/crypto/ghash-ce-glue.c if (partial) { partial 135 arch/arm64/crypto/ghash-ce-glue.c int p = GHASH_BLOCK_SIZE - partial; partial 137 arch/arm64/crypto/ghash-ce-glue.c memcpy(ctx->buf + partial, src, p); partial 149 arch/arm64/crypto/ghash-ce-glue.c partial ? ctx->buf : NULL, partial 154 arch/arm64/crypto/ghash-ce-glue.c partial = 0; partial 158 arch/arm64/crypto/ghash-ce-glue.c memcpy(ctx->buf + partial, src, len); partial 177 arch/arm64/crypto/ghash-ce-glue.c unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; partial 179 arch/arm64/crypto/ghash-ce-glue.c if (partial) { partial 182 arch/arm64/crypto/ghash-ce-glue.c memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial); partial 197 arch/arm64/crypto/ghash-ce-glue.c unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; partial 199 arch/arm64/crypto/ghash-ce-glue.c if (partial) { partial 202 arch/arm64/crypto/ghash-ce-glue.c memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial); partial 39 arch/arm64/crypto/sha3-ce-glue.c if ((sctx->partial + len) >= sctx->rsiz) { partial 42 arch/arm64/crypto/sha3-ce-glue.c if (sctx->partial) { partial 43 arch/arm64/crypto/sha3-ce-glue.c int p = sctx->rsiz - sctx->partial; partial 45 arch/arm64/crypto/sha3-ce-glue.c memcpy(sctx->buf + sctx->partial, data, p); partial 52 arch/arm64/crypto/sha3-ce-glue.c sctx->partial = 0; partial 67 arch/arm64/crypto/sha3-ce-glue.c memcpy(sctx->buf + sctx->partial, data, len); partial 68 arch/arm64/crypto/sha3-ce-glue.c sctx->partial += len; partial 83 arch/arm64/crypto/sha3-ce-glue.c sctx->buf[sctx->partial++] = 0x06; partial 84 arch/arm64/crypto/sha3-ce-glue.c memset(sctx->buf + sctx->partial, 0, sctx->rsiz - sctx->partial); partial 91 arch/mips/cavium-octeon/crypto/octeon-sha1.c unsigned int partial; partial 95 arch/mips/cavium-octeon/crypto/octeon-sha1.c partial = sctx->count % SHA1_BLOCK_SIZE; partial 100 arch/mips/cavium-octeon/crypto/octeon-sha1.c if ((partial + len) >= SHA1_BLOCK_SIZE) { partial 101 arch/mips/cavium-octeon/crypto/octeon-sha1.c if (partial) { partial 102 arch/mips/cavium-octeon/crypto/octeon-sha1.c done = -partial; partial 103 arch/mips/cavium-octeon/crypto/octeon-sha1.c memcpy(sctx->buffer + partial, data, partial 114 arch/mips/cavium-octeon/crypto/octeon-sha1.c partial = 0; partial 116 arch/mips/cavium-octeon/crypto/octeon-sha1.c memcpy(sctx->buffer + partial, src, len - done); partial 103 arch/mips/cavium-octeon/crypto/octeon-sha256.c unsigned int partial; partial 107 arch/mips/cavium-octeon/crypto/octeon-sha256.c partial = sctx->count % SHA256_BLOCK_SIZE; partial 112 arch/mips/cavium-octeon/crypto/octeon-sha256.c if ((partial + len) >= SHA256_BLOCK_SIZE) { partial 113 arch/mips/cavium-octeon/crypto/octeon-sha256.c if (partial) { partial 114 arch/mips/cavium-octeon/crypto/octeon-sha256.c done = -partial; partial 115 arch/mips/cavium-octeon/crypto/octeon-sha256.c memcpy(sctx->buf + partial, data, partial 126 arch/mips/cavium-octeon/crypto/octeon-sha256.c partial = 0; partial 128 arch/mips/cavium-octeon/crypto/octeon-sha256.c memcpy(sctx->buf + partial, src, len - done); partial 41 arch/powerpc/crypto/sha1.c unsigned int partial, done; partial 44 arch/powerpc/crypto/sha1.c partial = sctx->count & 0x3f; partial 49 arch/powerpc/crypto/sha1.c if ((partial + len) > 63) { partial 52 arch/powerpc/crypto/sha1.c if (partial) { partial 53 arch/powerpc/crypto/sha1.c done = -partial; partial 54 arch/powerpc/crypto/sha1.c memcpy(sctx->buffer + partial, data, done + 64); partial 65 arch/powerpc/crypto/sha1.c partial = 0; partial 67 arch/powerpc/crypto/sha1.c memcpy(sctx->buffer + partial, src, len - done); partial 47 arch/sparc/crypto/md5_glue.c unsigned int len, unsigned int partial) partial 52 arch/sparc/crypto/md5_glue.c if (partial) { partial 53 arch/sparc/crypto/md5_glue.c done = MD5_HMAC_BLOCK_SIZE - partial; partial 54 arch/sparc/crypto/md5_glue.c memcpy((u8 *)sctx->block + partial, data, done); partial 71 arch/sparc/crypto/md5_glue.c unsigned int partial = sctx->byte_count % MD5_HMAC_BLOCK_SIZE; partial 74 arch/sparc/crypto/md5_glue.c if (partial + len < MD5_HMAC_BLOCK_SIZE) { partial 76 arch/sparc/crypto/md5_glue.c memcpy((u8 *)sctx->block + partial, data, len); partial 78 arch/sparc/crypto/md5_glue.c __md5_sparc64_update(sctx, data, len, partial); partial 42 arch/sparc/crypto/sha1_glue.c unsigned int len, unsigned int partial) partial 47 arch/sparc/crypto/sha1_glue.c if (partial) { partial 48 arch/sparc/crypto/sha1_glue.c done = SHA1_BLOCK_SIZE - partial; partial 49 arch/sparc/crypto/sha1_glue.c memcpy(sctx->buffer + partial, data, done); partial 66 arch/sparc/crypto/sha1_glue.c unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; partial 69 arch/sparc/crypto/sha1_glue.c if (partial + len < SHA1_BLOCK_SIZE) { partial 71 arch/sparc/crypto/sha1_glue.c memcpy(sctx->buffer + partial, data, len); partial 73 arch/sparc/crypto/sha1_glue.c __sha1_sparc64_update(sctx, data, len, partial); partial 63 arch/sparc/crypto/sha256_glue.c unsigned int len, unsigned int partial) partial 68 arch/sparc/crypto/sha256_glue.c if (partial) { partial 69 arch/sparc/crypto/sha256_glue.c done = SHA256_BLOCK_SIZE - partial; partial 70 arch/sparc/crypto/sha256_glue.c memcpy(sctx->buf + partial, data, done); partial 87 arch/sparc/crypto/sha256_glue.c unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; partial 90 arch/sparc/crypto/sha256_glue.c if (partial + len < SHA256_BLOCK_SIZE) { partial 92 arch/sparc/crypto/sha256_glue.c memcpy(sctx->buf + partial, data, len); partial 94 arch/sparc/crypto/sha256_glue.c __sha256_sparc64_update(sctx, data, len, partial); partial 62 arch/sparc/crypto/sha512_glue.c unsigned int len, unsigned int partial) partial 68 arch/sparc/crypto/sha512_glue.c if (partial) { partial 69 arch/sparc/crypto/sha512_glue.c done = SHA512_BLOCK_SIZE - partial; partial 70 arch/sparc/crypto/sha512_glue.c memcpy(sctx->buf + partial, data, done); partial 87 arch/sparc/crypto/sha512_glue.c unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; partial 90 arch/sparc/crypto/sha512_glue.c if (partial + len < SHA512_BLOCK_SIZE) { partial 93 arch/sparc/crypto/sha512_glue.c memcpy(sctx->buf + partial, data, len); partial 95 arch/sparc/crypto/sha512_glue.c __sha512_sparc64_update(sctx, data, len, partial); partial 68 arch/x86/include/asm/unwind.h bool *partial) partial 73 arch/x86/include/asm/unwind.h if (partial) { partial 75 arch/x86/include/asm/unwind.h *partial = !state->full_regs; partial 77 arch/x86/include/asm/unwind.h *partial = false; partial 85 arch/x86/include/asm/unwind.h bool *partial) partial 137 arch/x86/kernel/dumpstack.c bool partial) partial 148 arch/x86/kernel/dumpstack.c if (!partial && on_stack(info, regs, sizeof(*regs))) { partial 151 arch/x86/kernel/dumpstack.c } else if (partial && on_stack(info, (void *)regs + IRET_FRAME_OFFSET, partial 169 arch/x86/kernel/dumpstack.c bool partial = false; partial 175 arch/x86/kernel/dumpstack.c regs = unwind_get_entry_regs(&state, &partial); partial 213 arch/x86/kernel/dumpstack.c show_regs_if_on_stack(&stack_info, regs, partial); partial 272 arch/x86/kernel/dumpstack.c regs = unwind_get_entry_regs(&state, &partial); partial 274 arch/x86/kernel/dumpstack.c show_regs_if_on_stack(&stack_info, regs, partial); partial 49 crypto/asymmetric_keys/asymmetric_type.c bool partial) partial 72 crypto/asymmetric_keys/asymmetric_type.c if (partial) { partial 168 crypto/sha3_generic.c sctx->partial = 0; partial 185 crypto/sha3_generic.c if ((sctx->partial + len) > (sctx->rsiz - 1)) { partial 186 crypto/sha3_generic.c if (sctx->partial) { partial 187 crypto/sha3_generic.c done = -sctx->partial; partial 188 crypto/sha3_generic.c memcpy(sctx->buf + sctx->partial, data, partial 204 crypto/sha3_generic.c sctx->partial = 0; partial 206 crypto/sha3_generic.c memcpy(sctx->buf + sctx->partial, src, len - done); partial 207 crypto/sha3_generic.c sctx->partial += (len - done); partial 216 crypto/sha3_generic.c unsigned int i, inlen = sctx->partial; partial 61 crypto/vmac.c u8 partial[VMAC_NHBYTES]; /* partial block */ partial 510 crypto/vmac.c memcpy(&dctx->partial[dctx->partial_size], p, n); partial 529 crypto/vmac.c memcpy(dctx->partial, p, len); partial 539 crypto/vmac.c unsigned int partial = dctx->partial_size; partial 544 crypto/vmac.c if (partial) { partial 546 crypto/vmac.c unsigned int n = round_up(partial, 16); partial 549 crypto/vmac.c memset(&dctx->partial[partial], 0, n - partial); partial 560 crypto/vmac.c return l3hash(ch, cl, tctx->l3key[0], tctx->l3key[1], partial * 8); partial 289 drivers/crypto/padlock-sha.c unsigned int partial, done; partial 296 drivers/crypto/padlock-sha.c partial = sctx->count & 0x3f; partial 302 drivers/crypto/padlock-sha.c if ((partial + len) >= SHA1_BLOCK_SIZE) { partial 305 drivers/crypto/padlock-sha.c if (partial) { partial 306 drivers/crypto/padlock-sha.c done = -partial; partial 307 drivers/crypto/padlock-sha.c memcpy(sctx->buffer + partial, data, partial 326 drivers/crypto/padlock-sha.c partial = 0; partial 329 drivers/crypto/padlock-sha.c memcpy(sctx->buffer + partial, src, len - done); partial 337 drivers/crypto/padlock-sha.c unsigned int partial, padlen; partial 344 drivers/crypto/padlock-sha.c partial = state->count & 0x3f; partial 345 drivers/crypto/padlock-sha.c padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial); partial 373 drivers/crypto/padlock-sha.c unsigned int partial, done; partial 380 drivers/crypto/padlock-sha.c partial = sctx->count & 0x3f; partial 386 drivers/crypto/padlock-sha.c if ((partial + len) >= SHA256_BLOCK_SIZE) { partial 389 drivers/crypto/padlock-sha.c if (partial) { partial 390 drivers/crypto/padlock-sha.c done = -partial; partial 391 drivers/crypto/padlock-sha.c memcpy(sctx->buf + partial, data, partial 410 drivers/crypto/padlock-sha.c partial = 0; partial 413 drivers/crypto/padlock-sha.c memcpy(sctx->buf + partial, src, len - done); partial 422 drivers/crypto/padlock-sha.c unsigned int partial, padlen; partial 429 drivers/crypto/padlock-sha.c partial = state->count & 0x3f; partial 430 drivers/crypto/padlock-sha.c padlen = (partial < 56) ? (56 - partial) : ((64+56) - partial); partial 125 drivers/crypto/qat/qat_common/icp_qat_fw_la.h ciph_iv, ciphcfg, partial) \ partial 144 drivers/crypto/qat/qat_common/icp_qat_fw_la.h ((partial & QAT_LA_PARTIAL_MASK) << \ partial 61 drivers/crypto/stm32/stm32-crc32.c u32 partial; /* crc32c: partial in first 4 bytes of that struct */ partial 118 drivers/crypto/stm32/stm32-crc32.c ctx->partial = readl_relaxed(ctx->crc->regs + CRC_DR); partial 158 drivers/crypto/stm32/stm32-crc32.c ctx->partial = readl_relaxed(crc->regs + CRC_DR); partial 189 drivers/crypto/stm32/stm32-crc32.c ~ctx->partial : ctx->partial, out); partial 1045 drivers/dma/qcom/bam_dma.c u32 partial = MAX_DESCRIPTORS - bchan->tail; partial 1048 drivers/dma/qcom/bam_dma.c partial * sizeof(struct bam_desc_hw)); partial 1049 drivers/dma/qcom/bam_dma.c memcpy(fifo, &desc[partial], partial 1050 drivers/dma/qcom/bam_dma.c (async_desc->xfer_len - partial) * partial 539 drivers/dma/sh/shdma-base.c new->partial = 0; partial 767 drivers/dma/sh/shdma-base.c desc->partial = ops->get_partial(schan, desc); partial 1066 drivers/gpu/drm/amd/powerplay/hwmgr/ppatomctrl.c *voltage = (uint16_t)fV_NL.partial.real; partial 46 drivers/gpu/drm/amd/powerplay/hwmgr/ppevvmath.h } partial; partial 338 drivers/gpu/drm/amd/powerplay/hwmgr/ppevvmath.h X_LessThanOne = (X.partial.real == 0 && X.partial.decimal != 0 && X.full >= 0); partial 339 drivers/gpu/drm/amd/powerplay/hwmgr/ppevvmath.h Y_LessThanOne = (Y.partial.real == 0 && Y.partial.decimal != 0 && Y.full >= 0); partial 413 drivers/gpu/drm/amd/powerplay/hwmgr/ppevvmath.h if (num.partial.real > 3000) partial 415 drivers/gpu/drm/amd/powerplay/hwmgr/ppevvmath.h else if (num.partial.real > 1000) partial 417 drivers/gpu/drm/amd/powerplay/hwmgr/ppevvmath.h else if (num.partial.real > 100) partial 512 drivers/gpu/drm/amd/powerplay/hwmgr/ppevvmath.h int i, scaledDecimal = 0, tmp = A.partial.decimal; partial 545 drivers/gpu/drm/amd/powerplay/hwmgr/ppevvmath.h solution.partial.decimal = 0; /*All fractional digits changes to 0 */ partial 548 drivers/gpu/drm/amd/powerplay/hwmgr/ppevvmath.h solution.partial.real += 1; /*Error term of 1 added */ partial 187 drivers/gpu/drm/i915/gem/i915_gem_mman.c view.partial.offset = rounddown(page_offset, chunk); partial 188 drivers/gpu/drm/i915/gem/i915_gem_mman.c view.partial.size = partial 190 drivers/gpu/drm/i915/gem/i915_gem_mman.c (obj->base.size >> PAGE_SHIFT) - view.partial.offset); partial 300 drivers/gpu/drm/i915/gem/i915_gem_mman.c area->vm_start + (vma->ggtt_view.partial.offset << PAGE_SHIFT), partial 119 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c GEM_BUG_ON(view.partial.size > nreal); partial 129 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c n = page - view.partial.offset; partial 130 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c GEM_BUG_ON(n >= view.partial.size); partial 155 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c view.partial.offset, partial 156 drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c view.partial.size, partial 641 drivers/gpu/drm/i915/gt/intel_reset.c vma_offset = vma->ggtt_view.partial.offset << PAGE_SHIFT; partial 180 drivers/gpu/drm/i915/i915_debugfs.c vma->ggtt_view.partial.offset << PAGE_SHIFT, partial 181 drivers/gpu/drm/i915/i915_debugfs.c vma->ggtt_view.partial.size << PAGE_SHIFT); partial 3514 drivers/gpu/drm/i915/i915_gem_gtt.c unsigned int count = view->partial.size; partial 3526 drivers/gpu/drm/i915/i915_gem_gtt.c iter = i915_gem_object_get_sg(obj, view->partial.offset, &offset); partial 209 drivers/gpu/drm/i915/i915_gem_gtt.h struct intel_partial_info partial; partial 136 drivers/gpu/drm/i915/i915_vma.c view->partial.offset, partial 137 drivers/gpu/drm/i915/i915_vma.c view->partial.size, partial 139 drivers/gpu/drm/i915/i915_vma.c vma->size = view->partial.size; partial 875 drivers/gpu/drm/i915/i915_vma.c vma_offset = vma->ggtt_view.partial.offset << PAGE_SHIFT; partial 290 drivers/gpu/drm/i915/i915_vma.h offsetof(typeof(*view), partial)); partial 293 drivers/gpu/drm/i915/i915_vma.h return memcmp(&vma->ggtt_view.partial, &view->partial, view->type); partial 729 drivers/gpu/drm/i915/selftests/i915_vma.c view.partial.offset = offset; partial 730 drivers/gpu/drm/i915/selftests/i915_vma.c view.partial.size = sz; partial 92 drivers/gpu/drm/tiny/repaper.c bool partial; partial 581 drivers/gpu/drm/tiny/repaper.c if (epd->partial) { partial 592 drivers/gpu/drm/tiny/repaper.c epd->partial = true; partial 607 drivers/gpu/drm/tiny/repaper.c epd->partial = true; partial 789 drivers/gpu/drm/tiny/repaper.c epd->partial = false; partial 1250 drivers/infiniband/hw/i40iw/i40iw_puda.c bool partial = false; partial 1268 drivers/infiniband/hw/i40iw/i40iw_puda.c partial = true; partial 1316 drivers/infiniband/hw/i40iw/i40iw_puda.c if (partial) partial 277 drivers/net/ethernet/marvell/octeontx2/af/mbox.h u8 partial:1; partial 995 drivers/net/ethernet/marvell/octeontx2/af/rvu.c if (detach && detach->partial) partial 97 drivers/net/usb/rndis_host.c int partial; partial 133 drivers/net/usb/rndis_host.c ¬ification, sizeof(notification), &partial, partial 315 drivers/perf/xgene_pmu.c XGENE_PMU_EVENT_ATTR(axi0-read-partial, 0x03), partial 317 drivers/perf/xgene_pmu.c XGENE_PMU_EVENT_ATTR(axi1-read-partial, 0x05), partial 319 drivers/perf/xgene_pmu.c XGENE_PMU_EVENT_ATTR(csw-read-partial, 0x07), partial 321 drivers/perf/xgene_pmu.c XGENE_PMU_EVENT_ATTR(axi0-write-partial, 0x11), partial 323 drivers/perf/xgene_pmu.c XGENE_PMU_EVENT_ATTR(axi1-write-partial, 0x14), partial 554 drivers/perf/xgene_pmu.c XGENE_PMU_EVENT_ATTR(partial-wr-req-vld, 0x17), partial 846 drivers/scsi/qla2xxx/qla_iocb.c uint32_t *partial) partial 867 drivers/scsi/qla2xxx/qla_iocb.c *partial = 0; partial 871 drivers/scsi/qla2xxx/qla_iocb.c *partial = 1; partial 898 drivers/scsi/qla2xxx/qla_iocb.c uint32_t partial; partial 924 drivers/scsi/qla2xxx/qla_iocb.c while (qla24xx_get_one_block_sg(prot_int, &sgx, &partial)) { partial 978 drivers/scsi/qla2xxx/qla_iocb.c if (partial == 0) { partial 989 drivers/scsi/qla2xxx/qla_iocb.c partial = 1; /* So as to not re-enter this block */ partial 1784 drivers/scsi/qla2xxx/qla_iocb.c uint32_t partial; partial 1793 drivers/scsi/qla2xxx/qla_iocb.c cmd->device->sector_size, &sgx, &partial)) partial 2119 drivers/scsi/qla2xxx/qla_iocb.c uint32_t partial; partial 2128 drivers/scsi/qla2xxx/qla_iocb.c cmd->device->sector_size, &sgx, &partial)) partial 68 drivers/scsi/sd_zbc.c bool partial) partial 82 drivers/scsi/sd_zbc.c if (partial) partial 268 drivers/scsi/smartpqi/smartpqi.h u8 partial : 1; partial 291 drivers/scsi/smartpqi/smartpqi.h u8 partial : 1; partial 4842 drivers/scsi/smartpqi/smartpqi_init.c request->partial = chained; partial 4906 drivers/scsi/smartpqi/smartpqi_init.c request->partial = chained; partial 297 drivers/soc/fsl/qbman/bman.c struct bm_rcr_entry *partial = rcr->cursor + 1; partial 299 drivers/soc/fsl/qbman/bman.c rcr->cursor = rcr_carryclear(partial); partial 300 drivers/soc/fsl/qbman/bman.c if (partial != rcr->cursor) partial 414 drivers/soc/fsl/qbman/qman.c struct qm_eqcr_entry *partial = eqcr->cursor + 1; partial 416 drivers/soc/fsl/qbman/qman.c eqcr->cursor = eqcr_carryclear(partial); partial 417 drivers/soc/fsl/qbman/qman.c if (partial != eqcr->cursor) partial 500 drivers/usb/storage/ene_ub6250.c unsigned int cswlen = 0, partial = 0; partial 525 drivers/usb/storage/ene_ub6250.c transfer_length, 0, &partial); partial 227 drivers/usb/storage/freecom.c unsigned int partial; partial 266 drivers/usb/storage/freecom.c FCM_STATUS_PACKET_LENGTH, &partial); partial 267 drivers/usb/storage/freecom.c usb_stor_dbg(us, "foo Status result %d %u\n", result, partial); partial 271 drivers/usb/storage/freecom.c US_DEBUG(pdump(us, (void *)fst, partial)); partial 308 drivers/usb/storage/freecom.c FCM_STATUS_PACKET_LENGTH, &partial); partial 310 drivers/usb/storage/freecom.c usb_stor_dbg(us, "bar Status result %d %u\n", result, partial); partial 314 drivers/usb/storage/freecom.c US_DEBUG(pdump(us, (void *)fst, partial)); partial 317 drivers/usb/storage/freecom.c if (partial != 4) partial 376 drivers/usb/storage/freecom.c FCM_PACKET_LENGTH, &partial); partial 377 drivers/usb/storage/freecom.c US_DEBUG(pdump(us, (void *)fst, partial)); partial 379 drivers/usb/storage/freecom.c if (partial != 4 || result > USB_STOR_XFER_SHORT) partial 407 drivers/usb/storage/freecom.c FCM_PACKET_LENGTH, &partial); partial 409 drivers/usb/storage/freecom.c if (partial != 4 || result > USB_STOR_XFER_SHORT) partial 56 drivers/usb/storage/initializers.c unsigned int partial; partial 70 drivers/usb/storage/initializers.c US_BULK_CB_WRAP_LEN, &partial); partial 76 drivers/usb/storage/initializers.c US_BULK_CS_WRAP_LEN, &partial); partial 257 drivers/usb/storage/transport.c unsigned int length, int result, unsigned int partial) partial 260 drivers/usb/storage/transport.c result, partial, length); partial 265 drivers/usb/storage/transport.c if (partial != length) { partial 464 drivers/usb/storage/transport.c unsigned int partial; partial 467 drivers/usb/storage/transport.c &partial); partial 469 drivers/usb/storage/transport.c scsi_set_resid(srb, scsi_bufflen(srb) - partial); partial 487 drivers/usb/storage/transport.c unsigned int partial; partial 494 drivers/usb/storage/transport.c length_left, &partial); partial 495 drivers/usb/storage/transport.c length_left -= partial; partial 499 drivers/usb/storage/transport.c length_left, &partial); partial 500 drivers/usb/storage/transport.c length_left -= partial; partial 135 fs/adfs/dir_fplus.c unsigned int buffer, partial, remainder; partial 140 fs/adfs/dir_fplus.c partial = sb->s_blocksize - offset; partial 142 fs/adfs/dir_fplus.c if (partial >= len) partial 147 fs/adfs/dir_fplus.c remainder = len - partial; partial 151 fs/adfs/dir_fplus.c partial); partial 153 fs/adfs/dir_fplus.c memcpy(c + partial, partial 470 fs/afs/flock.c bool partial, no_server_lock = false; partial 484 fs/afs/flock.c partial = (fl->fl_start != 0 || fl->fl_end != OFFSET_MAX); partial 486 fs/afs/flock.c if (mode == afs_flock_mode_write && partial) partial 504 fs/afs/flock.c (partial && mode == afs_flock_mode_openafs)) { partial 2037 fs/buffer.c int partial = 0; partial 2049 fs/buffer.c partial = 1; partial 2066 fs/buffer.c if (!partial) partial 341 fs/cramfs/inode.c u32 partial, last_page, blockaddr, *blockptrs; partial 344 fs/cramfs/inode.c partial = offset_in_page(inode->i_size); partial 345 fs/cramfs/inode.c if (!partial) partial 351 fs/cramfs/inode.c tail_data = sbi->linear_virt_addr + blockaddr + partial; partial 352 fs/cramfs/inode.c return memchr_inv(tail_data, 0, PAGE_SIZE - partial) ? true : false; partial 767 fs/erofs/zdata.c bool overlapped, partial; partial 889 fs/erofs/zdata.c partial = !(pcl->length & Z_EROFS_PCLUSTER_FULL_LENGTH); partial 892 fs/erofs/zdata.c partial = true; partial 904 fs/erofs/zdata.c .partial_decoding = partial partial 331 fs/ext2/inode.c Indirect *partial) partial 346 fs/ext2/inode.c return ext2_find_near(inode, partial); partial 631 fs/ext2/inode.c Indirect *partial; partial 647 fs/ext2/inode.c partial = ext2_get_branch(inode, depth, offsets, chain, &err); partial 649 fs/ext2/inode.c if (!partial) { partial 665 fs/ext2/inode.c partial = chain + depth - 1; partial 695 fs/ext2/inode.c if (err == -EAGAIN || !verify_chain(chain, partial)) { partial 696 fs/ext2/inode.c while (partial > chain) { partial 697 fs/ext2/inode.c brelse(partial->bh); partial 698 fs/ext2/inode.c partial--; partial 700 fs/ext2/inode.c partial = ext2_get_branch(inode, depth, offsets, chain, &err); partial 701 fs/ext2/inode.c if (!partial) { partial 720 fs/ext2/inode.c goal = ext2_find_goal(inode, iblock, partial); partial 723 fs/ext2/inode.c indirect_blks = (chain + depth) - partial - 1; partial 728 fs/ext2/inode.c count = ext2_blks_to_allocate(partial, indirect_blks, partial 734 fs/ext2/inode.c offsets + (partial - chain), partial); partial 764 fs/ext2/inode.c ext2_splice_branch(inode, iblock, partial, indirect_blks, count); partial 771 fs/ext2/inode.c partial = chain + depth - 1; /* the whole chain */ partial 773 fs/ext2/inode.c while (partial > chain) { partial 774 fs/ext2/inode.c brelse(partial->bh); partial 775 fs/ext2/inode.c partial--; partial 1054 fs/ext2/inode.c Indirect *partial, *p; partial 1060 fs/ext2/inode.c partial = ext2_get_branch(inode, k, offsets, chain, &err); partial 1061 fs/ext2/inode.c if (!partial) partial 1062 fs/ext2/inode.c partial = chain + k-1; partial 1068 fs/ext2/inode.c if (!partial->key && *partial->p) { partial 1072 fs/ext2/inode.c for (p=partial; p>chain && all_zeroes((__le32*)p->bh->b_data,p->p); p--) partial 1088 fs/ext2/inode.c while(partial > p) partial 1090 fs/ext2/inode.c brelse(partial->bh); partial 1091 fs/ext2/inode.c partial--; partial 1094 fs/ext2/inode.c return partial; partial 1190 fs/ext2/inode.c Indirect *partial; partial 1218 fs/ext2/inode.c partial = ext2_find_shared(inode, n, offsets, chain, &nr); partial 1221 fs/ext2/inode.c if (partial == chain) partial 1224 fs/ext2/inode.c mark_buffer_dirty_inode(partial->bh, inode); partial 1225 fs/ext2/inode.c ext2_free_branches(inode, &nr, &nr+1, (chain+n-1) - partial); partial 1228 fs/ext2/inode.c while (partial > chain) { partial 1230 fs/ext2/inode.c partial->p + 1, partial 1231 fs/ext2/inode.c (__le32*)partial->bh->b_data+addr_per_block, partial 1232 fs/ext2/inode.c (chain+n-1) - partial); partial 1233 fs/ext2/inode.c mark_buffer_dirty_inode(partial->bh, inode); partial 1234 fs/ext2/inode.c brelse (partial->bh); partial 1235 fs/ext2/inode.c partial--; partial 2594 fs/ext4/ext4.h int *partial, partial 2596 fs/ext4/extents.c struct partial_cluster *partial, partial 2627 fs/ext4/extents.c trace_ext4_remove_blocks(inode, ex, from, to, partial); partial 2635 fs/ext4/extents.c if (partial->state != initial && partial 2636 fs/ext4/extents.c partial->pclu != EXT4_B2C(sbi, last_pblk)) { partial 2637 fs/ext4/extents.c if (partial->state == tofree) { partial 2639 fs/ext4/extents.c if (ext4_is_pending(inode, partial->lblk)) partial 2642 fs/ext4/extents.c EXT4_C2B(sbi, partial->pclu), partial 2645 fs/ext4/extents.c ext4_rereserve_cluster(inode, partial->lblk); partial 2647 fs/ext4/extents.c partial->state = initial; partial 2664 fs/ext4/extents.c (partial->state != nofree)) { partial 2672 fs/ext4/extents.c partial->state = initial; partial 2688 fs/ext4/extents.c if (partial->state != initial && partial->pclu != EXT4_B2C(sbi, pblk)) partial 2689 fs/ext4/extents.c partial->state = initial; partial 2702 fs/ext4/extents.c if (partial->state == initial) { partial 2703 fs/ext4/extents.c partial->pclu = EXT4_B2C(sbi, pblk); partial 2704 fs/ext4/extents.c partial->lblk = from; partial 2705 fs/ext4/extents.c partial->state = tofree; partial 2708 fs/ext4/extents.c partial->state = initial; partial 2732 fs/ext4/extents.c struct partial_cluster *partial, partial 2764 fs/ext4/extents.c trace_ext4_ext_rm_leaf(inode, start, ex, partial); partial 2795 fs/ext4/extents.c partial->pclu = EXT4_B2C(sbi, pblk); partial 2796 fs/ext4/extents.c partial->state = nofree; partial 2838 fs/ext4/extents.c err = ext4_remove_blocks(handle, inode, ex, partial, a, b); partial 2895 fs/ext4/extents.c if (partial->state == tofree && ex >= EXT_FIRST_EXTENT(eh)) { partial 2897 fs/ext4/extents.c if (partial->pclu != EXT4_B2C(sbi, pblk)) { partial 2900 fs/ext4/extents.c if (ext4_is_pending(inode, partial->lblk)) partial 2903 fs/ext4/extents.c EXT4_C2B(sbi, partial->pclu), partial 2906 fs/ext4/extents.c ext4_rereserve_cluster(inode, partial->lblk); partial 2908 fs/ext4/extents.c partial->state = initial; partial 2947 fs/ext4/extents.c struct partial_cluster partial; partial 2951 fs/ext4/extents.c partial.pclu = 0; partial 2952 fs/ext4/extents.c partial.lblk = 0; partial 2953 fs/ext4/extents.c partial.state = initial; partial 3014 fs/ext4/extents.c partial.pclu = EXT4_B2C(sbi, pblk); partial 3015 fs/ext4/extents.c partial.state = nofree; partial 3030 fs/ext4/extents.c partial.state == initial) { partial 3047 fs/ext4/extents.c partial.pclu = EXT4_B2C(sbi, pblk); partial 3048 fs/ext4/extents.c partial.state = nofree; partial 3084 fs/ext4/extents.c &partial, start, end); partial 3156 fs/ext4/extents.c trace_ext4_ext_remove_space_done(inode, start, end, depth, &partial, partial 3163 fs/ext4/extents.c if (partial.state == tofree && err == 0) { partial 3166 fs/ext4/extents.c if (ext4_is_pending(inode, partial.lblk)) partial 3169 fs/ext4/extents.c EXT4_C2B(sbi, partial.pclu), partial 3172 fs/ext4/extents.c ext4_rereserve_cluster(inode, partial.lblk); partial 3173 fs/ext4/extents.c partial.state = initial; partial 977 fs/ext4/extents_status.c bool partial; partial 1016 fs/ext4/extents_status.c rc->partial = false; partial 1069 fs/ext4/extents_status.c if (rc->partial && (rc->lclu != EXT4_B2C(sbi, i))) { partial 1071 fs/ext4/extents_status.c rc->partial = false; partial 1081 fs/ext4/extents_status.c rc->partial = false; partial 1100 fs/ext4/extents_status.c if (!rc->partial && i <= end) { partial 1101 fs/ext4/extents_status.c rc->partial = true; partial 1171 fs/ext4/extents_status.c if (rc->partial) partial 245 fs/ext4/indirect.c Indirect *partial) partial 253 fs/ext4/indirect.c goal = ext4_find_near(inode, partial); partial 517 fs/ext4/indirect.c Indirect *partial; partial 533 fs/ext4/indirect.c partial = ext4_get_branch(inode, depth, offsets, chain, &err); partial 536 fs/ext4/indirect.c if (!partial) { partial 565 fs/ext4/indirect.c for (i = partial - chain + 1; i < depth; i++) partial 598 fs/ext4/indirect.c ar.goal = ext4_find_goal(inode, map->m_lblk, partial); partial 601 fs/ext4/indirect.c indirect_blks = (chain + depth) - partial - 1; partial 607 fs/ext4/indirect.c ar.len = ext4_blks_to_allocate(partial, indirect_blks, partial 614 fs/ext4/indirect.c offsets + (partial - chain), partial); partial 624 fs/ext4/indirect.c err = ext4_splice_branch(handle, &ar, partial, indirect_blks); partial 640 fs/ext4/indirect.c partial = chain + depth - 1; /* the whole chain */ partial 642 fs/ext4/indirect.c while (partial > chain) { partial 643 fs/ext4/indirect.c BUFFER_TRACE(partial->bh, "call brelse"); partial 644 fs/ext4/indirect.c brelse(partial->bh); partial 645 fs/ext4/indirect.c partial--; partial 767 fs/ext4/indirect.c Indirect *partial, *p; partial 774 fs/ext4/indirect.c partial = ext4_get_branch(inode, k, offsets, chain, &err); partial 776 fs/ext4/indirect.c if (!partial) partial 777 fs/ext4/indirect.c partial = chain + k-1; partial 782 fs/ext4/indirect.c if (!partial->key && *partial->p) partial 785 fs/ext4/indirect.c for (p = partial; (p > chain) && all_zeroes((__le32 *) p->bh->b_data, p->p); p--) partial 804 fs/ext4/indirect.c while (partial > p) { partial 805 fs/ext4/indirect.c brelse(partial->bh); partial 806 fs/ext4/indirect.c partial--; partial 809 fs/ext4/indirect.c return partial; partial 1102 fs/ext4/indirect.c Indirect *partial; partial 1142 fs/ext4/indirect.c partial = ext4_find_shared(inode, n, offsets, chain, &nr); partial 1145 fs/ext4/indirect.c if (partial == chain) { partial 1148 fs/ext4/indirect.c &nr, &nr+1, (chain+n-1) - partial); partial 1149 fs/ext4/indirect.c *partial->p = 0; partial 1156 fs/ext4/indirect.c BUFFER_TRACE(partial->bh, "get_write_access"); partial 1157 fs/ext4/indirect.c ext4_free_branches(handle, inode, partial->bh, partial 1158 fs/ext4/indirect.c partial->p, partial 1159 fs/ext4/indirect.c partial->p+1, (chain+n-1) - partial); partial 1163 fs/ext4/indirect.c while (partial > chain) { partial 1164 fs/ext4/indirect.c ext4_free_branches(handle, inode, partial->bh, partial->p + 1, partial 1165 fs/ext4/indirect.c (__le32*)partial->bh->b_data+addr_per_block, partial 1166 fs/ext4/indirect.c (chain+n-1) - partial); partial 1167 fs/ext4/indirect.c BUFFER_TRACE(partial->bh, "call brelse"); partial 1168 fs/ext4/indirect.c brelse(partial->bh); partial 1169 fs/ext4/indirect.c partial--; partial 1218 fs/ext4/indirect.c Indirect *partial, *partial2; partial 1261 fs/ext4/indirect.c partial = p = ext4_find_shared(inode, n, offsets, chain, &nr); partial 1263 fs/ext4/indirect.c if (partial == chain) { partial 1266 fs/ext4/indirect.c &nr, &nr+1, (chain+n-1) - partial); partial 1267 fs/ext4/indirect.c *partial->p = 0; partial 1270 fs/ext4/indirect.c BUFFER_TRACE(partial->bh, "get_write_access"); partial 1271 fs/ext4/indirect.c ext4_free_branches(handle, inode, partial->bh, partial 1272 fs/ext4/indirect.c partial->p, partial 1273 fs/ext4/indirect.c partial->p+1, (chain+n-1) - partial); partial 1281 fs/ext4/indirect.c while (partial > chain) { partial 1282 fs/ext4/indirect.c ext4_free_branches(handle, inode, partial->bh, partial 1283 fs/ext4/indirect.c partial->p + 1, partial 1284 fs/ext4/indirect.c (__le32 *)partial->bh->b_data+addr_per_block, partial 1285 fs/ext4/indirect.c (chain+n-1) - partial); partial 1286 fs/ext4/indirect.c partial--; partial 1326 fs/ext4/indirect.c partial = p = ext4_find_shared(inode, n, offsets, chain, &nr); partial 1331 fs/ext4/indirect.c int level = min(partial - chain, partial2 - chain2); partial 1343 fs/ext4/indirect.c if (partial == chain) { partial 1347 fs/ext4/indirect.c (chain+n-1) - partial); partial 1348 fs/ext4/indirect.c *partial->p = 0; partial 1351 fs/ext4/indirect.c BUFFER_TRACE(partial->bh, "get_write_access"); partial 1352 fs/ext4/indirect.c ext4_free_branches(handle, inode, partial->bh, partial 1353 fs/ext4/indirect.c partial->p, partial 1354 fs/ext4/indirect.c partial->p+1, partial 1355 fs/ext4/indirect.c (chain+n-1) - partial); partial 1370 fs/ext4/indirect.c while (partial > chain || partial2 > chain2) { partial 1371 fs/ext4/indirect.c int depth = (chain+n-1) - partial; partial 1374 fs/ext4/indirect.c if (partial > chain && partial2 > chain2 && partial 1375 fs/ext4/indirect.c partial->bh->b_blocknr == partial2->bh->b_blocknr) { partial 1380 fs/ext4/indirect.c ext4_free_branches(handle, inode, partial->bh, partial 1381 fs/ext4/indirect.c partial->p + 1, partial 1383 fs/ext4/indirect.c (chain+n-1) - partial); partial 1394 fs/ext4/indirect.c if (partial > chain && depth <= depth2) { partial 1395 fs/ext4/indirect.c ext4_free_branches(handle, inode, partial->bh, partial 1396 fs/ext4/indirect.c partial->p + 1, partial 1397 fs/ext4/indirect.c (__le32 *)partial->bh->b_data+addr_per_block, partial 1398 fs/ext4/indirect.c (chain+n-1) - partial); partial 1399 fs/ext4/indirect.c partial--; partial 1094 fs/ext4/inode.c int *partial, partial 1110 fs/ext4/inode.c if (partial && !buffer_uptodate(bh)) partial 1111 fs/ext4/inode.c *partial = 1; partial 1527 fs/ext4/inode.c int partial = 0; partial 1556 fs/ext4/inode.c from + copied, &partial, partial 1558 fs/ext4/inode.c if (!partial) partial 173 fs/ext4/move_extent.c int i, err, nr = 0, partial = 0; partial 191 fs/ext4/move_extent.c partial = 1; partial 224 fs/ext4/move_extent.c if (!partial) partial 152 fs/minix/itree_common.c Indirect *partial; partial 160 fs/minix/itree_common.c partial = get_branch(inode, depth, offsets, chain, &err); partial 163 fs/minix/itree_common.c if (!partial) { partial 167 fs/minix/itree_common.c partial = chain+depth-1; /* the whole chain */ partial 174 fs/minix/itree_common.c while (partial > chain) { partial 175 fs/minix/itree_common.c brelse(partial->bh); partial 176 fs/minix/itree_common.c partial--; partial 190 fs/minix/itree_common.c left = (chain + depth) - partial; partial 191 fs/minix/itree_common.c err = alloc_branch(inode, left, offsets+(partial-chain), partial); partial 195 fs/minix/itree_common.c if (splice_branch(inode, chain, partial, left) < 0) partial 202 fs/minix/itree_common.c while (partial > chain) { partial 203 fs/minix/itree_common.c brelse(partial->bh); partial 204 fs/minix/itree_common.c partial--; partial 223 fs/minix/itree_common.c Indirect *partial, *p; partial 229 fs/minix/itree_common.c partial = get_branch(inode, k, offsets, chain, &err); partial 232 fs/minix/itree_common.c if (!partial) partial 233 fs/minix/itree_common.c partial = chain + k-1; partial 234 fs/minix/itree_common.c if (!partial->key && *partial->p) { partial 238 fs/minix/itree_common.c for (p=partial;p>chain && all_zeroes((block_t*)p->bh->b_data,p->p);p--) partial 248 fs/minix/itree_common.c while(partial > p) partial 250 fs/minix/itree_common.c brelse(partial->bh); partial 251 fs/minix/itree_common.c partial--; partial 254 fs/minix/itree_common.c return partial; partial 300 fs/minix/itree_common.c Indirect *partial; partial 320 fs/minix/itree_common.c partial = find_shared(inode, n, offsets, chain, &nr); partial 322 fs/minix/itree_common.c if (partial == chain) partial 325 fs/minix/itree_common.c mark_buffer_dirty_inode(partial->bh, inode); partial 326 fs/minix/itree_common.c free_branches(inode, &nr, &nr+1, (chain+n-1) - partial); partial 329 fs/minix/itree_common.c while (partial > chain) { partial 330 fs/minix/itree_common.c free_branches(inode, partial->p + 1, block_end(partial->bh), partial 331 fs/minix/itree_common.c (chain+n-1) - partial); partial 332 fs/minix/itree_common.c mark_buffer_dirty_inode(partial->bh, inode); partial 333 fs/minix/itree_common.c brelse (partial->bh); partial 334 fs/minix/itree_common.c partial--; partial 1407 fs/ntfs/file.c bool partial; partial 1412 fs/ntfs/file.c partial = false; partial 1419 fs/ntfs/file.c partial = true; partial 1429 fs/ntfs/file.c if (!partial && !PageUptodate(page)) partial 6809 fs/ocfs2/alloc.c int ret, partial = 0; partial 6826 fs/ocfs2/alloc.c from, to, &partial, partial 6837 fs/ocfs2/alloc.c if (!partial) partial 429 fs/ocfs2/aops.c int *partial, partial 446 fs/ocfs2/aops.c if (partial && !buffer_uptodate(bh)) partial 447 fs/ocfs2/aops.c *partial = 1; partial 28 fs/ocfs2/aops.h int *partial, partial 39 fs/ocfs2/move_extents.c int partial; partial 210 fs/ocfs2/move_extents.c int ret, credits = 0, extra_blocks = 0, partial = context->partial; partial 306 fs/ocfs2/move_extents.c if (!partial) { partial 322 fs/ocfs2/move_extents.c if (partial && (new_len != *len)) partial 1049 fs/ocfs2/move_extents.c context->partial = 1; partial 2908 fs/ocfs2/refcounttree.c int ret = 0, partial; partial 2974 fs/ocfs2/refcounttree.c from, to, &partial, partial 180 fs/reiserfs/file.c int partial = 0; partial 210 fs/reiserfs/file.c partial = 1; partial 240 fs/reiserfs/file.c if (!partial) partial 2536 fs/reiserfs/inode.c int partial = 0; partial 2693 fs/reiserfs/inode.c partial = 1; partial 2698 fs/reiserfs/inode.c if (!partial) partial 204 fs/splice.c buf->offset = spd->partial[page_nr].offset; partial 205 fs/splice.c buf->len = spd->partial[page_nr].len; partial 206 fs/splice.c buf->private = spd->partial[page_nr].private; partial 262 fs/splice.c spd->partial = kmalloc_array(buffers, sizeof(struct partial_page), partial 265 fs/splice.c if (spd->pages && spd->partial) partial 269 fs/splice.c kfree(spd->partial); partial 279 fs/splice.c kfree(spd->partial); partial 209 fs/sysv/itree.c Indirect *partial; partial 218 fs/sysv/itree.c partial = get_branch(inode, depth, offsets, chain, &err); partial 222 fs/sysv/itree.c if (!partial) { partial 227 fs/sysv/itree.c partial = chain+depth-1; /* the whole chain */ partial 234 fs/sysv/itree.c while (partial > chain) { partial 235 fs/sysv/itree.c brelse(partial->bh); partial 236 fs/sysv/itree.c partial--; partial 250 fs/sysv/itree.c left = (chain + depth) - partial; partial 251 fs/sysv/itree.c err = alloc_branch(inode, left, offsets+(partial-chain), partial); partial 255 fs/sysv/itree.c if (splice_branch(inode, chain, partial, left) < 0) partial 262 fs/sysv/itree.c while (partial > chain) { partial 263 fs/sysv/itree.c brelse(partial->bh); partial 264 fs/sysv/itree.c partial--; partial 283 fs/sysv/itree.c Indirect *partial, *p; partial 291 fs/sysv/itree.c partial = get_branch(inode, k, offsets, chain, &err); partial 292 fs/sysv/itree.c if (!partial) partial 293 fs/sysv/itree.c partial = chain + k-1; partial 298 fs/sysv/itree.c if (!partial->key && *partial->p) { partial 302 fs/sysv/itree.c for (p=partial; p>chain && all_zeroes((sysv_zone_t*)p->bh->b_data,p->p); p--) partial 318 fs/sysv/itree.c while (partial > p) { partial 319 fs/sysv/itree.c brelse(partial->bh); partial 320 fs/sysv/itree.c partial--; partial 323 fs/sysv/itree.c return partial; partial 369 fs/sysv/itree.c Indirect *partial; partial 394 fs/sysv/itree.c partial = find_shared(inode, n, offsets, chain, &nr); partial 397 fs/sysv/itree.c if (partial == chain) partial 400 fs/sysv/itree.c dirty_indirect(partial->bh, inode); partial 401 fs/sysv/itree.c free_branches(inode, &nr, &nr+1, (chain+n-1) - partial); partial 404 fs/sysv/itree.c while (partial > chain) { partial 405 fs/sysv/itree.c free_branches(inode, partial->p + 1, block_end(partial->bh), partial 406 fs/sysv/itree.c (chain+n-1) - partial); partial 407 fs/sysv/itree.c dirty_indirect(partial->bh, inode); partial 408 fs/sysv/itree.c brelse (partial->bh); partial 409 fs/sysv/itree.c partial--; partial 40 include/crypto/sha1_base.h unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; partial 44 include/crypto/sha1_base.h if (unlikely((partial + len) >= SHA1_BLOCK_SIZE)) { partial 47 include/crypto/sha1_base.h if (partial) { partial 48 include/crypto/sha1_base.h int p = SHA1_BLOCK_SIZE - partial; partial 50 include/crypto/sha1_base.h memcpy(sctx->buffer + partial, data, p); partial 64 include/crypto/sha1_base.h partial = 0; partial 67 include/crypto/sha1_base.h memcpy(sctx->buffer + partial, data, len); partial 78 include/crypto/sha1_base.h unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; partial 80 include/crypto/sha1_base.h sctx->buffer[partial++] = 0x80; partial 81 include/crypto/sha1_base.h if (partial > bit_offset) { partial 82 include/crypto/sha1_base.h memset(sctx->buffer + partial, 0x0, SHA1_BLOCK_SIZE - partial); partial 83 include/crypto/sha1_base.h partial = 0; partial 88 include/crypto/sha1_base.h memset(sctx->buffer + partial, 0x0, bit_offset - partial); partial 41 include/crypto/sha256_base.h unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; partial 45 include/crypto/sha256_base.h if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { partial 48 include/crypto/sha256_base.h if (partial) { partial 49 include/crypto/sha256_base.h int p = SHA256_BLOCK_SIZE - partial; partial 51 include/crypto/sha256_base.h memcpy(sctx->buf + partial, data, p); partial 65 include/crypto/sha256_base.h partial = 0; partial 68 include/crypto/sha256_base.h memcpy(sctx->buf + partial, data, len); partial 79 include/crypto/sha256_base.h unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; partial 81 include/crypto/sha256_base.h sctx->buf[partial++] = 0x80; partial 82 include/crypto/sha256_base.h if (partial > bit_offset) { partial 83 include/crypto/sha256_base.h memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial); partial 84 include/crypto/sha256_base.h partial = 0; partial 89 include/crypto/sha256_base.h memset(sctx->buf + partial, 0x0, bit_offset - partial); partial 25 include/crypto/sha3.h unsigned int partial; partial 61 include/crypto/sha512_base.h unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; partial 67 include/crypto/sha512_base.h if (unlikely((partial + len) >= SHA512_BLOCK_SIZE)) { partial 70 include/crypto/sha512_base.h if (partial) { partial 71 include/crypto/sha512_base.h int p = SHA512_BLOCK_SIZE - partial; partial 73 include/crypto/sha512_base.h memcpy(sctx->buf + partial, data, p); partial 87 include/crypto/sha512_base.h partial = 0; partial 90 include/crypto/sha512_base.h memcpy(sctx->buf + partial, data, len); partial 101 include/crypto/sha512_base.h unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; partial 103 include/crypto/sha512_base.h sctx->buf[partial++] = 0x80; partial 104 include/crypto/sha512_base.h if (partial > bit_offset) { partial 105 include/crypto/sha512_base.h memset(sctx->buf + partial, 0x0, SHA512_BLOCK_SIZE - partial); partial 106 include/crypto/sha512_base.h partial = 0; partial 111 include/crypto/sha512_base.h memset(sctx->buf + partial, 0x0, bit_offset - partial); partial 43 include/crypto/sm3_base.h unsigned int partial = sctx->count % SM3_BLOCK_SIZE; partial 47 include/crypto/sm3_base.h if (unlikely((partial + len) >= SM3_BLOCK_SIZE)) { partial 50 include/crypto/sm3_base.h if (partial) { partial 51 include/crypto/sm3_base.h int p = SM3_BLOCK_SIZE - partial; partial 53 include/crypto/sm3_base.h memcpy(sctx->buffer + partial, data, p); partial 67 include/crypto/sm3_base.h partial = 0; partial 70 include/crypto/sm3_base.h memcpy(sctx->buffer + partial, data, len); partial 81 include/crypto/sm3_base.h unsigned int partial = sctx->count % SM3_BLOCK_SIZE; partial 83 include/crypto/sm3_base.h sctx->buffer[partial++] = 0x80; partial 84 include/crypto/sm3_base.h if (partial > bit_offset) { partial 85 include/crypto/sm3_base.h memset(sctx->buffer + partial, 0x0, SM3_BLOCK_SIZE - partial); partial 86 include/crypto/sm3_base.h partial = 0; partial 91 include/crypto/sm3_base.h memset(sctx->buffer + partial, 0x0, bit_offset - partial); partial 78 include/keys/asymmetric-type.h bool partial); partial 50 include/linux/shdma-base.h size_t partial; partial 4381 include/linux/skbuff.h __wsum partial = SKB_GSO_CB(skb)->csum; partial 4386 include/linux/skbuff.h return csum_fold(csum_partial(csum_start, plen, partial)); partial 4507 include/linux/skbuff.h __wsum partial; partial 4510 include/linux/skbuff.h partial = ~csum_unfold(*(__force __sum16 *)(csum_start + partial 4516 include/linux/skbuff.h return csum_partial(l4_hdr, csum_start - l4_hdr, partial); partial 46 include/linux/slub_def.h struct page *partial; /* Partially allocated frozen slabs */ partial 54 include/linux/slub_def.h #define slub_percpu_partial(c) ((c)->partial) partial 58 include/linux/splice.h struct partial_page *partial; /* pages[] may not be contig */ partial 1215 kernel/relay.c struct partial_page partial[PIPE_DEF_BUFFERS]; partial 1220 kernel/relay.c .partial = partial, partial 1253 kernel/relay.c spd.partial[spd.nr_pages].offset = poff; partial 1260 kernel/relay.c spd.partial[spd.nr_pages].len = this_len; partial 1261 kernel/relay.c spd.partial[spd.nr_pages].private = private; partial 6174 kernel/trace/trace.c .partial = partial_def, partial 6224 kernel/trace/trace.c spd.partial[i].offset = 0; partial 6225 kernel/trace/trace.c spd.partial[i].len = trace_seq_used(&iter->seq); partial 7399 kernel/trace/trace.c (struct buffer_ref *)spd->partial[i].private; partial 7402 kernel/trace/trace.c spd->partial[i].private = 0; partial 7416 kernel/trace/trace.c .partial = partial_def, partial 7479 kernel/trace/trace.c spd.partial[i].len = PAGE_SIZE; partial 7480 kernel/trace/trace.c spd.partial[i].offset = 0; partial 7481 kernel/trace/trace.c spd.partial[i].private = (unsigned long)ref; partial 211 lib/crypto/sha256.c unsigned int partial, done; partial 214 lib/crypto/sha256.c partial = sctx->count & 0x3f; partial 219 lib/crypto/sha256.c if ((partial + len) > 63) { partial 220 lib/crypto/sha256.c if (partial) { partial 221 lib/crypto/sha256.c done = -partial; partial 222 lib/crypto/sha256.c memcpy(sctx->buf + partial, data, done + 64); partial 232 lib/crypto/sha256.c partial = 0; partial 234 lib/crypto/sha256.c memcpy(sctx->buf + partial, src, len - done); partial 618 mm/slab.h struct list_head partial; partial 1763 mm/slub.c list_add_tail(&page->slab_list, &n->partial); partial 1765 mm/slub.c list_add(&page->slab_list, &n->partial); partial 1853 mm/slub.c list_for_each_entry_safe(page, page2, &n->partial, slab_list) { partial 2188 mm/slub.c while ((page = c->partial)) { partial 2192 mm/slub.c c->partial = page->next; partial 2260 mm/slub.c oldpage = this_cpu_read(s->cpu_slab->partial); partial 2288 mm/slub.c } while (this_cpu_cmpxchg(s->cpu_slab->partial, oldpage, page) partial 2397 mm/slub.c list_for_each_entry(page, &n->partial, slab_list) partial 3346 mm/slub.c INIT_LIST_HEAD(&n->partial); partial 3727 mm/slub.c list_for_each_entry_safe(page, h, &n->partial, slab_list) { partial 4016 mm/slub.c list_for_each_entry_safe(page, t, &n->partial, slab_list) { partial 4037 mm/slub.c list_splice(promote + i, &n->partial); partial 4228 mm/slub.c list_for_each_entry(p, &n->partial, slab_list) partial 4449 mm/slub.c list_for_each_entry(page, &n->partial, slab_list) { partial 4656 mm/slub.c list_for_each_entry(page, &n->partial, slab_list) partial 5051 mm/slub.c SLAB_ATTR_RO(partial); partial 2300 net/core/skbuff.c (spd->partial[spd->nr_pages - 1].offset + partial 2301 net/core/skbuff.c spd->partial[spd->nr_pages - 1].len == offset); partial 2322 net/core/skbuff.c spd->partial[spd->nr_pages - 1].len += *len; partial 2327 net/core/skbuff.c spd->partial[spd->nr_pages].len = *len; partial 2328 net/core/skbuff.c spd->partial[spd->nr_pages].offset = offset; partial 2429 net/core/skbuff.c struct partial_page partial[MAX_SKB_FRAGS]; partial 2433 net/core/skbuff.c .partial = partial, partial 1718 net/ipv4/tcp_output.c static bool tcp_nagle_check(bool partial, const struct tcp_sock *tp, partial 1721 net/ipv4/tcp_output.c return partial && partial 1772 net/ipv4/tcp_output.c u32 partial, needed, window, max_len; partial 1785 net/ipv4/tcp_output.c partial = needed % mss_now; partial 1790 net/ipv4/tcp_output.c if (tcp_nagle_check(partial != 0, tp, nonagle)) partial 1791 net/ipv4/tcp_output.c return needed - partial; partial 1361 net/ipv4/udp.c static void udp_rmem_release(struct sock *sk, int size, int partial, partial 1368 net/ipv4/udp.c if (likely(partial)) { partial 1388 net/ipv4/udp.c amt = (sk->sk_forward_alloc - partial) & ~(SK_MEM_QUANTUM - 1); partial 28 net/ipv4/udp_offload.c __wsum partial; partial 41 net/ipv4/udp_offload.c partial = (__force __wsum)uh->len; partial 43 net/ipv4/udp_offload.c partial = (__force __wsum)htonl(skb->len); partial 44 net/ipv4/udp_offload.c partial = csum_sub(csum_unfold(uh->check), partial); partial 131 net/ipv4/udp_offload.c uh->check = ~csum_fold(csum_add(partial, partial 56 net/netfilter/xt_dccp.c goto partial; partial 74 net/netfilter/xt_dccp.c partial: partial 155 net/smc/smc_rx.c struct partial_page partial; partial 164 net/smc/smc_rx.c partial.offset = src - (char *)smc->conn.rmb_desc->cpu_addr; partial 165 net/smc/smc_rx.c partial.len = len; partial 166 net/smc/smc_rx.c partial.private = (unsigned long)priv; partial 171 net/smc/smc_rx.c spd.partial = &partial; partial 107 sound/usb/line6/driver.c int partial; partial 116 sound/usb/line6/driver.c &partial, LINE6_TIMEOUT * HZ); partial 121 sound/usb/line6/driver.c &partial, LINE6_TIMEOUT * HZ); partial 36 tools/vm/slabinfo.c unsigned long partial, objects, slabs, objects_partial, objects_total; partial 552 tools/vm/slabinfo.c s->slab_size, s->slabs - s->partial - s->cpu_slabs, partial 555 tools/vm/slabinfo.c page_size << s->order, s->partial, onoff(s->poison), partial 600 tools/vm/slabinfo.c s->partial, s->cpu_slabs); partial 643 tools/vm/slabinfo.c s->slabs ? (s->partial * 100) / s->slabs : 100, partial 851 tools/vm/slabinfo.c percentage_partial_slabs = s->partial * 100 / s->slabs; partial 863 tools/vm/slabinfo.c if (s->partial < min_partial) partial 864 tools/vm/slabinfo.c min_partial = s->partial; partial 888 tools/vm/slabinfo.c if (s->partial > max_partial) partial 889 tools/vm/slabinfo.c max_partial = s->partial; partial 911 tools/vm/slabinfo.c total_partial += s->partial; partial 1055 tools/vm/slabinfo.c result = s1->partial < s2->partial; partial 1226 tools/vm/slabinfo.c slab->partial = get_obj("partial"); partial 1227 tools/vm/slabinfo.c slab->partial = get_obj_and_str("partial", &t); partial 1317 tools/vm/slabinfo.c int loss, int size, int partial) partial 1323 tools/vm/slabinfo.c sort_partial = partial;