untrusted comment: verify with openbsd-70-base.pub RWR3KL+gSr4QZ4fkEvX2u08KCdO5q0O1i39d/UFoCUCCzYYaHhuXImhA+nivJIkYBm6w0rK/bqHomvsIOenM6W3VeG6jMs7MNAc= OpenBSD 7.0 errata 018, April 1, 2022: Memory corruption in zlib can lead to a crash. Apply by doing: signify -Vep /etc/signify/openbsd-70-base.pub -x 018_zlib.patch.sig \ -m - | (cd /usr/src && patch -p0) And then rebuild and install libz and perl: cd /usr/src/lib/libz make obj make make install cd /usr/src/gnu/usr.bin/perl make -f Makefile.bsd-wrapper obj make -f Makefile.bsd-wrapper cleandir make -f Makefile.bsd-wrapper make -f Makefile.bsd-wrapper install Index: lib/libz/deflate.c =================================================================== RCS file: /cvs/src/lib/libz/deflate.c,v retrieving revision 1.12 diff -u -p -r1.12 deflate.c --- lib/libz/deflate.c 4 Jul 2021 14:24:49 -0000 1.12 +++ lib/libz/deflate.c 24 Mar 2022 16:11:16 -0000 @@ -249,11 +249,6 @@ int ZEXPORT deflateInit2_(strm, level, m int wrap = 1; static const char my_version[] = ZLIB_VERSION; - ushf *overlay; - /* We overlay pending_buf and d_buf+l_buf. This works since the average - * output size for (length,distance) codes is <= 24 bits. - */ - if (version == Z_NULL || version[0] != my_version[0] || stream_size != sizeof(z_stream)) { return Z_VERSION_ERROR; @@ -323,9 +318,47 @@ int ZEXPORT deflateInit2_(strm, level, m s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ - overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); - s->pending_buf = (uchf *) overlay; - s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L); + /* We overlay pending_buf and sym_buf. This works since the average size + * for length/distance pairs over any compressed block is assured to be 31 + * bits or less. + * + * Analysis: The longest fixed codes are a length code of 8 bits plus 5 + * extra bits, for lengths 131 to 257. The longest fixed distance codes are + * 5 bits plus 13 extra bits, for distances 16385 to 32768. The longest + * possible fixed-codes length/distance pair is then 31 bits total. + * + * sym_buf starts one-fourth of the way into pending_buf. So there are + * three bytes in sym_buf for every four bytes in pending_buf. Each symbol + * in sym_buf is three bytes -- two for the distance and one for the + * literal/length. As each symbol is consumed, the pointer to the next + * sym_buf value to read moves forward three bytes. From that symbol, up to + * 31 bits are written to pending_buf. The closest the written pending_buf + * bits gets to the next sym_buf symbol to read is just before the last + * code is written. At that time, 31*(n-2) bits have been written, just + * after 24*(n-2) bits have been consumed from sym_buf. sym_buf starts at + * 8*n bits into pending_buf. (Note that the symbol buffer fills when n-1 + * symbols are written.) The closest the writing gets to what is unread is + * then n+14 bits. Here n is lit_bufsize, which is 16384 by default, and + * can range from 128 to 32768. + * + * Therefore, at a minimum, there are 142 bits of space between what is + * written and what is read in the overlain buffers, so the symbols cannot + * be overwritten by the compressed data. That space is actually 139 bits, + * due to the three-bit fixed-code block header. + * + * That covers the case where either Z_FIXED is specified, forcing fixed + * codes, or when the use of fixed codes is chosen, because that choice + * results in a smaller compressed block than dynamic codes. That latter + * condition then assures that the above analysis also covers all dynamic + * blocks. A dynamic-code block will only be chosen to be emitted if it has + * fewer bits than a fixed-code block would for the same set of symbols. + * Therefore its average symbol length is assured to be less than 31. So + * the compressed data for a dynamic block also cannot overwrite the + * symbols from which it is being constructed. + */ + + s->pending_buf = (uchf *) ZALLOC(strm, s->lit_bufsize, 4); + s->pending_buf_size = (ulg)s->lit_bufsize * 4; if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL || s->pending_buf == Z_NULL) { @@ -334,8 +367,12 @@ int ZEXPORT deflateInit2_(strm, level, m deflateEnd (strm); return Z_MEM_ERROR; } - s->d_buf = overlay + s->lit_bufsize/sizeof(ush); - s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; + s->sym_buf = s->pending_buf + s->lit_bufsize; + s->sym_end = (s->lit_bufsize - 1) * 3; + /* We avoid equality with lit_bufsize*3 because of wraparound at 64K + * on 16 bit machines and because stored blocks are restricted to + * 64K-1 bytes. + */ s->level = level; s->strategy = strategy; @@ -546,7 +583,8 @@ int ZEXPORT deflatePrime (strm, bits, va if (deflateStateCheck(strm)) return Z_STREAM_ERROR; s = strm->state; - if ((Bytef *)(s->d_buf) < s->pending_out + ((Buf_size + 7) >> 3)) + if (bits < 0 || bits > 16 || + s->sym_buf < s->pending_out + ((Buf_size + 7) >> 3)) return Z_BUF_ERROR; do { put = Buf_size - s->bi_valid; @@ -1105,7 +1143,6 @@ int ZEXPORT deflateCopy (dest, source) #else deflate_state *ds; deflate_state *ss; - ushf *overlay; if (deflateStateCheck(source) || dest == Z_NULL) { @@ -1125,8 +1162,7 @@ int ZEXPORT deflateCopy (dest, source) ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte)); ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof(Pos)); ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof(Pos)); - overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2); - ds->pending_buf = (uchf *) overlay; + ds->pending_buf = (uchf *) ZALLOC(dest, ds->lit_bufsize, 4); if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL || ds->pending_buf == Z_NULL) { @@ -1140,8 +1176,7 @@ int ZEXPORT deflateCopy (dest, source) zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size); ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf); - ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush); - ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize; + ds->sym_buf = ds->pending_buf + ds->lit_bufsize; ds->l_desc.dyn_tree = ds->dyn_ltree; ds->d_desc.dyn_tree = ds->dyn_dtree; @@ -1909,7 +1944,7 @@ local block_state deflate_fast(s, flush) FLUSH_BLOCK(s, 1); return finish_done; } - if (s->last_lit) + if (s->sym_next) FLUSH_BLOCK(s, 0); return block_done; } @@ -2040,7 +2075,7 @@ local block_state deflate_slow(s, flush) FLUSH_BLOCK(s, 1); return finish_done; } - if (s->last_lit) + if (s->sym_next) FLUSH_BLOCK(s, 0); return block_done; } @@ -2115,7 +2150,7 @@ local block_state deflate_rle(s, flush) FLUSH_BLOCK(s, 1); return finish_done; } - if (s->last_lit) + if (s->sym_next) FLUSH_BLOCK(s, 0); return block_done; } @@ -2154,7 +2189,7 @@ local block_state deflate_huff(s, flush) FLUSH_BLOCK(s, 1); return finish_done; } - if (s->last_lit) + if (s->sym_next) FLUSH_BLOCK(s, 0); return block_done; } Index: lib/libz/deflate.h =================================================================== RCS file: /cvs/src/lib/libz/deflate.h,v retrieving revision 1.10 diff -u -p -r1.10 deflate.h --- lib/libz/deflate.h 4 Jul 2021 17:41:23 -0000 1.10 +++ lib/libz/deflate.h 24 Mar 2022 16:11:16 -0000 @@ -217,7 +217,7 @@ typedef struct internal_state { /* Depth of each subtree used as tie breaker for trees of equal frequency */ - uchf *l_buf; /* buffer for literals or lengths */ + uchf *sym_buf; /* buffer for distances and literals/lengths */ uInt lit_bufsize; /* Size of match buffer for literals/lengths. There are 4 reasons for @@ -239,13 +239,8 @@ typedef struct internal_state { * - I can't count above 4 */ - uInt last_lit; /* running index in l_buf */ - - ushf *d_buf; - /* Buffer for distances. To simplify the code, d_buf and l_buf have - * the same number of elements. To use different lengths, an extra flag - * array would be necessary. - */ + uInt sym_next; /* running index in sym_buf */ + uInt sym_end; /* symbol table full when sym_next reaches this */ ulg opt_len; /* bit length of current block with optimal trees */ ulg static_len; /* bit length of current block with static trees */ @@ -325,20 +320,22 @@ void ZLIB_INTERNAL _tr_stored_block OF(( # define _tr_tally_lit(s, c, flush) \ { uch cc = (c); \ - s->d_buf[s->last_lit] = 0; \ - s->l_buf[s->last_lit++] = cc; \ + s->sym_buf[s->sym_next++] = 0; \ + s->sym_buf[s->sym_next++] = 0; \ + s->sym_buf[s->sym_next++] = cc; \ s->dyn_ltree[cc].Freq++; \ - flush = (s->last_lit == s->lit_bufsize-1); \ + flush = (s->sym_next == s->sym_end); \ } # define _tr_tally_dist(s, distance, length, flush) \ { uch len = (uch)(length); \ ush dist = (ush)(distance); \ - s->d_buf[s->last_lit] = dist; \ - s->l_buf[s->last_lit++] = len; \ + s->sym_buf[s->sym_next++] = dist; \ + s->sym_buf[s->sym_next++] = dist >> 8; \ + s->sym_buf[s->sym_next++] = len; \ dist--; \ s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \ s->dyn_dtree[d_code(dist)].Freq++; \ - flush = (s->last_lit == s->lit_bufsize-1); \ + flush = (s->sym_next == s->sym_end); \ } #else # define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c) Index: lib/libz/trees.c =================================================================== RCS file: /cvs/src/lib/libz/trees.c,v retrieving revision 1.9 diff -u -p -r1.9 trees.c --- lib/libz/trees.c 4 Jul 2021 14:24:49 -0000 1.9 +++ lib/libz/trees.c 24 Mar 2022 16:11:16 -0000 @@ -415,7 +415,7 @@ local void init_block(s) s->dyn_ltree[END_BLOCK].Freq = 1; s->opt_len = s->static_len = 0L; - s->last_lit = s->matches = 0; + s->sym_next = s->matches = 0; } #define SMALLEST 1 @@ -946,7 +946,7 @@ void ZLIB_INTERNAL _tr_flush_block(s, bu Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, - s->last_lit)); + s->sym_next / 3)); if (static_lenb <= opt_lenb) opt_lenb = static_lenb; @@ -1015,8 +1015,9 @@ int ZLIB_INTERNAL _tr_tally (s, dist, lc unsigned dist; /* distance of matched string */ unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ { - s->d_buf[s->last_lit] = (ush)dist; - s->l_buf[s->last_lit++] = (uch)lc; + s->sym_buf[s->sym_next++] = dist; + s->sym_buf[s->sym_next++] = dist >> 8; + s->sym_buf[s->sym_next++] = lc; if (dist == 0) { /* lc is the unmatched char */ s->dyn_ltree[lc].Freq++; @@ -1031,30 +1032,7 @@ int ZLIB_INTERNAL _tr_tally (s, dist, lc s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; s->dyn_dtree[d_code(dist)].Freq++; } - -#ifdef TRUNCATE_BLOCK - /* Try to guess if it is profitable to stop the current block here */ - if ((s->last_lit & 0x1fff) == 0 && s->level > 2) { - /* Compute an upper bound for the compressed length */ - ulg out_length = (ulg)s->last_lit*8L; - ulg in_length = (ulg)((long)s->strstart - s->block_start); - int dcode; - for (dcode = 0; dcode < D_CODES; dcode++) { - out_length += (ulg)s->dyn_dtree[dcode].Freq * - (5L+extra_dbits[dcode]); - } - out_length >>= 3; - Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", - s->last_lit, in_length, out_length, - 100L - out_length*100L/in_length)); - if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1; - } -#endif - return (s->last_lit == s->lit_bufsize-1); - /* We avoid equality with lit_bufsize because of wraparound at 64K - * on 16 bit machines and because stored blocks are restricted to - * 64K-1 bytes. - */ + return (s->sym_next == s->sym_end); } /* =========================================================================== @@ -1067,13 +1045,14 @@ local void compress_block(s, ltree, dtre { unsigned dist; /* distance of matched string */ int lc; /* match length or unmatched char (if dist == 0) */ - unsigned lx = 0; /* running index in l_buf */ + unsigned sx = 0; /* running index in sym_buf */ unsigned code; /* the code to send */ int extra; /* number of extra bits to send */ - if (s->last_lit != 0) do { - dist = s->d_buf[lx]; - lc = s->l_buf[lx++]; + if (s->sym_next != 0) do { + dist = s->sym_buf[sx++] & 0xff; + dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8; + lc = s->sym_buf[sx++]; if (dist == 0) { send_code(s, lc, ltree); /* send a literal byte */ Tracecv(isgraph(lc), (stderr," '%c' ", lc)); @@ -1098,11 +1077,10 @@ local void compress_block(s, ltree, dtre } } /* literal or match pair ? */ - /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ - Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, - "pendingBuf overflow"); + /* Check that the overlay between pending_buf and sym_buf is ok: */ + Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow"); - } while (lx < s->last_lit); + } while (sx < s->sym_next); send_code(s, END_BLOCK, ltree); } Index: gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/deflate.c =================================================================== RCS file: /cvs/src/gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/deflate.c,v retrieving revision 1.8 diff -u -p -r1.8 deflate.c --- gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/deflate.c 1 Mar 2021 23:19:45 -0000 1.8 +++ gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/deflate.c 24 Mar 2022 16:11:16 -0000 @@ -254,11 +254,6 @@ int ZEXPORT deflateInit2_( int wrap = 1; static const char my_version[] = ZLIB_VERSION; - ushf *overlay; - /* We overlay pending_buf and d_buf+l_buf. This works since the average - * output size for (length,distance) codes is <= 24 bits. - */ - if (version == Z_NULL || version[0] != my_version[0] || stream_size != sizeof(z_stream)) { return Z_VERSION_ERROR; @@ -328,9 +323,47 @@ int ZEXPORT deflateInit2_( s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ - overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); - s->pending_buf = (uchf *) overlay; - s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L); + /* We overlay pending_buf and sym_buf. This works since the average size + * for length/distance pairs over any compressed block is assured to be 31 + * bits or less. + * + * Analysis: The longest fixed codes are a length code of 8 bits plus 5 + * extra bits, for lengths 131 to 257. The longest fixed distance codes are + * 5 bits plus 13 extra bits, for distances 16385 to 32768. The longest + * possible fixed-codes length/distance pair is then 31 bits total. + * + * sym_buf starts one-fourth of the way into pending_buf. So there are + * three bytes in sym_buf for every four bytes in pending_buf. Each symbol + * in sym_buf is three bytes -- two for the distance and one for the + * literal/length. As each symbol is consumed, the pointer to the next + * sym_buf value to read moves forward three bytes. From that symbol, up to + * 31 bits are written to pending_buf. The closest the written pending_buf + * bits gets to the next sym_buf symbol to read is just before the last + * code is written. At that time, 31*(n-2) bits have been written, just + * after 24*(n-2) bits have been consumed from sym_buf. sym_buf starts at + * 8*n bits into pending_buf. (Note that the symbol buffer fills when n-1 + * symbols are written.) The closest the writing gets to what is unread is + * then n+14 bits. Here n is lit_bufsize, which is 16384 by default, and + * can range from 128 to 32768. + * + * Therefore, at a minimum, there are 142 bits of space between what is + * written and what is read in the overlain buffers, so the symbols cannot + * be overwritten by the compressed data. That space is actually 139 bits, + * due to the three-bit fixed-code block header. + * + * That covers the case where either Z_FIXED is specified, forcing fixed + * codes, or when the use of fixed codes is chosen, because that choice + * results in a smaller compressed block than dynamic codes. That latter + * condition then assures that the above analysis also covers all dynamic + * blocks. A dynamic-code block will only be chosen to be emitted if it has + * fewer bits than a fixed-code block would for the same set of symbols. + * Therefore its average symbol length is assured to be less than 31. So + * the compressed data for a dynamic block also cannot overwrite the + * symbols from which it is being constructed. + */ + + s->pending_buf = (uchf *) ZALLOC(strm, s->lit_bufsize, 4); + s->pending_buf_size = (ulg)s->lit_bufsize * 4; if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL || s->pending_buf == Z_NULL) { @@ -339,8 +372,12 @@ int ZEXPORT deflateInit2_( deflateEnd (strm); return Z_MEM_ERROR; } - s->d_buf = overlay + s->lit_bufsize/sizeof(ush); - s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; + s->sym_buf = s->pending_buf + s->lit_bufsize; + s->sym_end = (s->lit_bufsize - 1) * 3; + /* We avoid equality with lit_bufsize*3 because of wraparound at 64K + * on 16 bit machines and because stored blocks are restricted to + * 64K-1 bytes. + */ s->level = level; s->strategy = strategy; @@ -551,7 +588,8 @@ int ZEXPORT deflatePrime ( if (deflateStateCheck(strm)) return Z_STREAM_ERROR; s = strm->state; - if ((Bytef *)(s->d_buf) < s->pending_out + ((Buf_size + 7) >> 3)) + if (bits < 0 || bits > 16 || + s->sym_buf < s->pending_out + ((Buf_size + 7) >> 3)) return Z_BUF_ERROR; do { put = Buf_size - s->bi_valid; @@ -1110,7 +1148,6 @@ int ZEXPORT deflateCopy ( #else deflate_state *ds; deflate_state *ss; - ushf *overlay; if (deflateStateCheck(source) || dest == Z_NULL) { @@ -1130,8 +1167,7 @@ int ZEXPORT deflateCopy ( ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte)); ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof(Pos)); ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof(Pos)); - overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2); - ds->pending_buf = (uchf *) overlay; + ds->pending_buf = (uchf *) ZALLOC(dest, ds->lit_bufsize, 4); if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL || ds->pending_buf == Z_NULL) { @@ -1145,8 +1181,7 @@ int ZEXPORT deflateCopy ( zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size); ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf); - ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush); - ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize; + ds->sym_buf = ds->pending_buf + ds->lit_bufsize; ds->l_desc.dyn_tree = ds->dyn_ltree; ds->d_desc.dyn_tree = ds->dyn_dtree; @@ -1915,7 +1950,7 @@ local block_state deflate_fast( FLUSH_BLOCK(s, 1); return finish_done; } - if (s->last_lit) + if (s->sym_next) FLUSH_BLOCK(s, 0); return block_done; } @@ -2046,7 +2081,7 @@ local block_state deflate_slow( FLUSH_BLOCK(s, 1); return finish_done; } - if (s->last_lit) + if (s->sym_next) FLUSH_BLOCK(s, 0); return block_done; } @@ -2121,7 +2156,7 @@ local block_state deflate_rle( FLUSH_BLOCK(s, 1); return finish_done; } - if (s->last_lit) + if (s->sym_next) FLUSH_BLOCK(s, 0); return block_done; } @@ -2160,7 +2195,7 @@ local block_state deflate_huff( FLUSH_BLOCK(s, 1); return finish_done; } - if (s->last_lit) + if (s->sym_next) FLUSH_BLOCK(s, 0); return block_done; } Index: gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/deflate.h =================================================================== RCS file: /cvs/src/gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/deflate.h,v retrieving revision 1.7 diff -u -p -r1.7 deflate.h --- gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/deflate.h 13 Feb 2019 21:15:08 -0000 1.7 +++ gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/deflate.h 24 Mar 2022 16:11:16 -0000 @@ -217,7 +217,7 @@ typedef struct internal_state { /* Depth of each subtree used as tie breaker for trees of equal frequency */ - uchf *l_buf; /* buffer for literals or lengths */ + uchf *sym_buf; /* buffer for distances and literals/lengths */ uInt lit_bufsize; /* Size of match buffer for literals/lengths. There are 4 reasons for @@ -239,13 +239,8 @@ typedef struct internal_state { * - I can't count above 4 */ - uInt last_lit; /* running index in l_buf */ - - ushf *d_buf; - /* Buffer for distances. To simplify the code, d_buf and l_buf have - * the same number of elements. To use different lengths, an extra flag - * array would be necessary. - */ + uInt sym_next; /* running index in sym_buf */ + uInt sym_end; /* symbol table full when sym_next reaches this */ ulg opt_len; /* bit length of current block with optimal trees */ ulg static_len; /* bit length of current block with static trees */ @@ -325,20 +320,22 @@ void ZLIB_INTERNAL _tr_stored_block OF(( # define _tr_tally_lit(s, c, flush) \ { uch cc = (c); \ - s->d_buf[s->last_lit] = 0; \ - s->l_buf[s->last_lit++] = cc; \ + s->sym_buf[s->sym_next++] = 0; \ + s->sym_buf[s->sym_next++] = 0; \ + s->sym_buf[s->sym_next++] = cc; \ s->dyn_ltree[cc].Freq++; \ - flush = (s->last_lit == s->lit_bufsize-1); \ + flush = (s->sym_next == s->sym_end); \ } # define _tr_tally_dist(s, distance, length, flush) \ { uch len = (uch)(length); \ ush dist = (ush)(distance); \ - s->d_buf[s->last_lit] = dist; \ - s->l_buf[s->last_lit++] = len; \ + s->sym_buf[s->sym_next++] = dist; \ + s->sym_buf[s->sym_next++] = dist >> 8; \ + s->sym_buf[s->sym_next++] = len; \ dist--; \ s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \ s->dyn_dtree[d_code(dist)].Freq++; \ - flush = (s->last_lit == s->lit_bufsize-1); \ + flush = (s->sym_next == s->sym_end); \ } #else # define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c) Index: gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/trees.c =================================================================== RCS file: /cvs/src/gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/trees.c,v retrieving revision 1.7 diff -u -p -r1.7 trees.c --- gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/trees.c 13 Feb 2019 21:15:08 -0000 1.7 +++ gnu/usr.bin/perl/cpan/Compress-Raw-Zlib/zlib-src/trees.c 24 Mar 2022 16:11:16 -0000 @@ -416,7 +416,7 @@ local void init_block( s->dyn_ltree[END_BLOCK].Freq = 1; s->opt_len = s->static_len = 0L; - s->last_lit = s->matches = 0; + s->sym_next = s->matches = 0; } #define SMALLEST 1 @@ -949,7 +949,7 @@ void ZLIB_INTERNAL _tr_flush_block( Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, - s->last_lit)); + s->sym_next / 3)); if (static_lenb <= opt_lenb) opt_lenb = static_lenb; @@ -1018,8 +1018,9 @@ int ZLIB_INTERNAL _tr_tally ( unsigned dist, unsigned lc) { - s->d_buf[s->last_lit] = (ush)dist; - s->l_buf[s->last_lit++] = (uch)lc; + s->sym_buf[s->sym_next++] = dist; + s->sym_buf[s->sym_next++] = dist >> 8; + s->sym_buf[s->sym_next++] = lc; if (dist == 0) { /* lc is the unmatched char */ s->dyn_ltree[lc].Freq++; @@ -1034,30 +1035,7 @@ int ZLIB_INTERNAL _tr_tally ( s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; s->dyn_dtree[d_code(dist)].Freq++; } - -#ifdef TRUNCATE_BLOCK - /* Try to guess if it is profitable to stop the current block here */ - if ((s->last_lit & 0x1fff) == 0 && s->level > 2) { - /* Compute an upper bound for the compressed length */ - ulg out_length = (ulg)s->last_lit*8L; - ulg in_length = (ulg)((long)s->strstart - s->block_start); - int dcode; - for (dcode = 0; dcode < D_CODES; dcode++) { - out_length += (ulg)s->dyn_dtree[dcode].Freq * - (5L+extra_dbits[dcode]); - } - out_length >>= 3; - Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", - s->last_lit, in_length, out_length, - 100L - out_length*100L/in_length)); - if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1; - } -#endif - return (s->last_lit == s->lit_bufsize-1); - /* We avoid equality with lit_bufsize because of wraparound at 64K - * on 16 bit machines and because stored blocks are restricted to - * 64K-1 bytes. - */ + return (s->sym_next == s->sym_end); } /* =========================================================================== @@ -1070,13 +1048,14 @@ local void compress_block( { unsigned dist; /* distance of matched string */ int lc; /* match length or unmatched char (if dist == 0) */ - unsigned lx = 0; /* running index in l_buf */ + unsigned sx = 0; /* running index in sym_buf */ unsigned code; /* the code to send */ int extra; /* number of extra bits to send */ - if (s->last_lit != 0) do { - dist = s->d_buf[lx]; - lc = s->l_buf[lx++]; + if (s->sym_next != 0) do { + dist = s->sym_buf[sx++] & 0xff; + dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8; + lc = s->sym_buf[sx++]; if (dist == 0) { send_code(s, lc, ltree); /* send a literal byte */ Tracecv(isgraph(lc), (stderr," '%c' ", lc)); @@ -1101,11 +1080,10 @@ local void compress_block( } } /* literal or match pair ? */ - /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ - Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, - "pendingBuf overflow"); + /* Check that the overlay between pending_buf and sym_buf is ok: */ + Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow"); - } while (lx < s->last_lit); + } while (sx < s->sym_next); send_code(s, END_BLOCK, ltree); }