nbSeq             571 lib/zstd/compress.c 	U32 const nbSeq = (U32)(seqStorePtr->sequences - seqStorePtr->sequencesStart);
nbSeq             573 lib/zstd/compress.c 	for (u = 0; u < nbSeq; u++) {
nbSeq             601 lib/zstd/compress.c 	size_t const nbSeq = seqStorePtr->sequences - seqStorePtr->sequencesStart;
nbSeq             632 lib/zstd/compress.c 	if (nbSeq < 0x7F)
nbSeq             633 lib/zstd/compress.c 		*op++ = (BYTE)nbSeq;
nbSeq             634 lib/zstd/compress.c 	else if (nbSeq < LONGNBSEQ)
nbSeq             635 lib/zstd/compress.c 		op[0] = (BYTE)((nbSeq >> 8) + 0x80), op[1] = (BYTE)nbSeq, op += 2;
nbSeq             637 lib/zstd/compress.c 		op[0] = 0xFF, ZSTD_writeLE16(op + 1, (U16)(nbSeq - LONGNBSEQ)), op += 3;
nbSeq             638 lib/zstd/compress.c 	if (nbSeq == 0)
nbSeq             653 lib/zstd/compress.c 		size_t const mostFrequent = FSE_countFast_wksp(count, &max, llCodeTable, nbSeq, workspace);
nbSeq             654 lib/zstd/compress.c 		if ((mostFrequent == nbSeq) && (nbSeq > 2)) {
nbSeq             658 lib/zstd/compress.c 		} else if ((zc->flagStaticTables) && (nbSeq < MAX_SEQ_FOR_STATIC_FSE)) {
nbSeq             660 lib/zstd/compress.c 		} else if ((nbSeq < MIN_SEQ_FOR_DYNAMIC_FSE) || (mostFrequent < (nbSeq >> (LL_defaultNormLog - 1)))) {
nbSeq             664 lib/zstd/compress.c 			size_t nbSeq_1 = nbSeq;
nbSeq             665 lib/zstd/compress.c 			const U32 tableLog = FSE_optimalTableLog(LLFSELog, nbSeq, max);
nbSeq             666 lib/zstd/compress.c 			if (count[llCodeTable[nbSeq - 1]] > 1) {
nbSeq             667 lib/zstd/compress.c 				count[llCodeTable[nbSeq - 1]]--;
nbSeq             685 lib/zstd/compress.c 		size_t const mostFrequent = FSE_countFast_wksp(count, &max, ofCodeTable, nbSeq, workspace);
nbSeq             686 lib/zstd/compress.c 		if ((mostFrequent == nbSeq) && (nbSeq > 2)) {
nbSeq             690 lib/zstd/compress.c 		} else if ((zc->flagStaticTables) && (nbSeq < MAX_SEQ_FOR_STATIC_FSE)) {
nbSeq             692 lib/zstd/compress.c 		} else if ((nbSeq < MIN_SEQ_FOR_DYNAMIC_FSE) || (mostFrequent < (nbSeq >> (OF_defaultNormLog - 1)))) {
nbSeq             696 lib/zstd/compress.c 			size_t nbSeq_1 = nbSeq;
nbSeq             697 lib/zstd/compress.c 			const U32 tableLog = FSE_optimalTableLog(OffFSELog, nbSeq, max);
nbSeq             698 lib/zstd/compress.c 			if (count[ofCodeTable[nbSeq - 1]] > 1) {
nbSeq             699 lib/zstd/compress.c 				count[ofCodeTable[nbSeq - 1]]--;
nbSeq             717 lib/zstd/compress.c 		size_t const mostFrequent = FSE_countFast_wksp(count, &max, mlCodeTable, nbSeq, workspace);
nbSeq             718 lib/zstd/compress.c 		if ((mostFrequent == nbSeq) && (nbSeq > 2)) {
nbSeq             722 lib/zstd/compress.c 		} else if ((zc->flagStaticTables) && (nbSeq < MAX_SEQ_FOR_STATIC_FSE)) {
nbSeq             724 lib/zstd/compress.c 		} else if ((nbSeq < MIN_SEQ_FOR_DYNAMIC_FSE) || (mostFrequent < (nbSeq >> (ML_defaultNormLog - 1)))) {
nbSeq             728 lib/zstd/compress.c 			size_t nbSeq_1 = nbSeq;
nbSeq             729 lib/zstd/compress.c 			const U32 tableLog = FSE_optimalTableLog(MLFSELog, nbSeq, max);
nbSeq             730 lib/zstd/compress.c 			if (count[mlCodeTable[nbSeq - 1]] > 1) {
nbSeq             731 lib/zstd/compress.c 				count[mlCodeTable[nbSeq - 1]]--;
nbSeq             759 lib/zstd/compress.c 		FSE_initCState2(&stateMatchLength, CTable_MatchLength, mlCodeTable[nbSeq - 1]);
nbSeq             760 lib/zstd/compress.c 		FSE_initCState2(&stateOffsetBits, CTable_OffsetBits, ofCodeTable[nbSeq - 1]);
nbSeq             761 lib/zstd/compress.c 		FSE_initCState2(&stateLitLength, CTable_LitLength, llCodeTable[nbSeq - 1]);
nbSeq             762 lib/zstd/compress.c 		BIT_addBits(&blockStream, sequences[nbSeq - 1].litLength, LL_bits[llCodeTable[nbSeq - 1]]);
nbSeq             765 lib/zstd/compress.c 		BIT_addBits(&blockStream, sequences[nbSeq - 1].matchLength, ML_bits[mlCodeTable[nbSeq - 1]]);
nbSeq             769 lib/zstd/compress.c 			U32 const ofBits = ofCodeTable[nbSeq - 1];
nbSeq             772 lib/zstd/compress.c 				BIT_addBits(&blockStream, sequences[nbSeq - 1].offset, extraBits);
nbSeq             775 lib/zstd/compress.c 			BIT_addBits(&blockStream, sequences[nbSeq - 1].offset >> extraBits, ofBits - extraBits);
nbSeq             777 lib/zstd/compress.c 			BIT_addBits(&blockStream, sequences[nbSeq - 1].offset, ofCodeTable[nbSeq - 1]);
nbSeq             783 lib/zstd/compress.c 			for (n = nbSeq - 2; n < nbSeq; n--) { /* intentional underflow */
nbSeq             806 lib/zstd/decompress.c 		int nbSeq = *ip++;
nbSeq             807 lib/zstd/decompress.c 		if (!nbSeq) {
nbSeq             811 lib/zstd/decompress.c 		if (nbSeq > 0x7F) {
nbSeq             812 lib/zstd/decompress.c 			if (nbSeq == 0xFF) {
nbSeq             815 lib/zstd/decompress.c 				nbSeq = ZSTD_readLE16(ip) + LONGNBSEQ, ip += 2;
nbSeq             819 lib/zstd/decompress.c 				nbSeq = ((nbSeq - 0x80) << 8) + *ip++;
nbSeq             822 lib/zstd/decompress.c 		*nbSeqPtr = nbSeq;
nbSeq            1105 lib/zstd/decompress.c 	int nbSeq;
nbSeq            1109 lib/zstd/decompress.c 		size_t const seqHSize = ZSTD_decodeSeqHeaders(dctx, &nbSeq, ip, seqSize);
nbSeq            1116 lib/zstd/decompress.c 	if (nbSeq) {
nbSeq            1129 lib/zstd/decompress.c 		for (; (BIT_reloadDStream(&(seqState.DStream)) <= BIT_DStream_completed) && nbSeq;) {
nbSeq            1130 lib/zstd/decompress.c 			nbSeq--;
nbSeq            1141 lib/zstd/decompress.c 		if (nbSeq)
nbSeq            1361 lib/zstd/decompress.c 	int nbSeq;
nbSeq            1365 lib/zstd/decompress.c 		size_t const seqHSize = ZSTD_decodeSeqHeaders(dctx, &nbSeq, ip, seqSize);
nbSeq            1372 lib/zstd/decompress.c 	if (nbSeq) {
nbSeq            1377 lib/zstd/decompress.c 		int const seqAdvance = MIN(nbSeq, ADVANCED_SEQS);
nbSeq            1403 lib/zstd/decompress.c 		for (; (BIT_reloadDStream(&(seqState.DStream)) <= BIT_DStream_completed) && seqNb < nbSeq; seqNb++) {
nbSeq            1413 lib/zstd/decompress.c 		if (seqNb < nbSeq)
nbSeq            1418 lib/zstd/decompress.c 		for (; seqNb < nbSeq; seqNb++) {