Commit a1e3f250 authored by Poul-Henning Kamp's avatar Poul-Henning Kamp

Also register the start, stop and last bits when compressing


PS: I'm getting faster at this, today I wrote 3 lines of code per hour



git-svn-id: http://www.varnish-cache.org/svn/trunk/varnish-cache@5703 d4fa192b-c00b-0410-8231-f00ffab90ce4
parent 6875d5dd
......@@ -811,6 +811,9 @@ int ZEXPORT deflate (strm, flush)
ERR_RETURN(strm, Z_BUF_ERROR);
}
if (strm->start_bit == 0)
strm->start_bit = (strm->total_out + s->pending) * 8 + s->bi_valid;
/* Start a new block or continue the current one.
*/
if (strm->avail_in != 0 || s->lookahead != 0 ||
......
......@@ -931,6 +931,10 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
int max_blindex = 0; /* index of last bit length code of non zero freq */
if (last)
s->strm->last_bit =
(s->strm->total_out + s->pending) * 8 + s->bi_valid;
/* Build the Huffman trees unless a stored block is forced */
if (s->level > 0) {
......@@ -1011,6 +1015,9 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
init_block(s);
if (last) {
s->strm->stop_bit =
(s->strm->total_out + s->pending) * 8 + s->bi_valid;
bi_windup(s);
#ifdef DEBUG
s->compressed_len += 7; /* align on byte boundary */
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment