When decoding Huffman codes, if an invalid bit sequence is discovered,
reject the input instead of treating it as a 0-length code.
Fixes #10426.
Change-Id: Ie2f1a3a718afd7c6bee73a67480d4b84936c21c9
Reviewed-on: https://go-review.googlesource.com/8893
Run-TryBot: Matthew Dempsky <mdempsky@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Nigel Tao <nigeltao@golang.org>
t.Fatalf("Given sequence of bits is bad and should not succeed.")
}
}
+
+func TestInvalidEncoding(t *testing.T) {
+ // Initialize Huffman decoder to recognize "0".
+ var h huffmanDecoder
+ if !h.init([]int{1}) {
+ t.Fatal("Failed to initialize Huffman decoder")
+ }
+
+ // Initialize decompressor with invalid Huffman coding.
+ var f decompressor
+ f.r = bytes.NewReader([]byte{0xff})
+
+ _, err := f.huffSym(&h)
+ if err == nil {
+ t.Fatal("Should have rejected invalid bit sequence")
+ }
+}
if n > huffmanChunkBits {
chunk = h.links[chunk>>huffmanValueShift][(f.b>>huffmanChunkBits)&h.linkMask]
n = uint(chunk & huffmanCountMask)
+ }
+ if n <= f.nb {
if n == 0 {
f.err = CorruptInputError(f.roffset)
return 0, f.err
}
- }
- if n <= f.nb {
f.b >>= n
f.nb -= n
return int(chunk >> huffmanValueShift), nil