if s.Type == SHT_NOBITS {
return io.NewSectionReader(&nobitsSectionReader{}, 0, int64(s.Size))
}
+
+ var zrd func(io.Reader) (io.ReadCloser, error)
if s.Flags&SHF_COMPRESSED == 0 {
- return io.NewSectionReader(s.sr, 0, 1<<63-1)
- }
- if s.compressionType == COMPRESS_ZLIB {
- return &readSeekerFromReader{
- reset: func() (io.Reader, error) {
- fr := io.NewSectionReader(s.sr, s.compressionOffset, int64(s.FileSize)-s.compressionOffset)
- return zlib.NewReader(fr)
- },
- size: int64(s.Size),
+
+ if !strings.HasPrefix(s.Name, ".zdebug") {
+ return io.NewSectionReader(s.sr, 0, 1<<63-1)
}
+
+ b := make([]byte, 12)
+ n, _ := s.sr.ReadAt(b, 0)
+ if n != 12 || string(b[:4]) != "ZLIB" {
+ return io.NewSectionReader(s.sr, 0, 1<<63-1)
+ }
+
+ s.compressionOffset = 12
+ s.compressionType = COMPRESS_ZLIB
+ s.Size = binary.BigEndian.Uint64(b[4:12])
+ zrd = zlib.NewReader
+
+ } else if s.Flags&SHF_ALLOC != 0 {
+ return errorReader{&FormatError{int64(s.Offset),
+ "SHF_COMPRESSED applies only to non-allocable sections", s.compressionType}}
+ }
+
+ switch s.compressionType {
+ case COMPRESS_ZLIB:
+ zrd = zlib.NewReader
+ }
+
+ if zrd == nil {
+ return errorReader{&FormatError{int64(s.Offset), "unknown compression type", s.compressionType}}
+ }
+
+ return &readSeekerFromReader{
+ reset: func() (io.Reader, error) {
+ fr := io.NewSectionReader(s.sr, s.compressionOffset, int64(s.FileSize)-s.compressionOffset)
+ return zrd(fr)
+ },
+ size: int64(s.Size),
}
- err := &FormatError{int64(s.Offset), "unknown compression type", s.compressionType}
- return errorReader{err}
}
// A ProgHeader represents a single ELF program header.
if err != nil && uint64(len(b)) < s.Size {
return nil, err
}
- var dlen uint64
- if len(b) >= 12 && string(b[:4]) == "ZLIB" {
- dlen = binary.BigEndian.Uint64(b[4:12])
- s.compressionOffset = 12
- }
- if dlen == 0 && len(b) >= 12 && s.Flags&SHF_COMPRESSED != 0 &&
- s.Flags&SHF_ALLOC == 0 &&
- f.FileHeader.ByteOrder.Uint32(b[:]) == uint32(COMPRESS_ZLIB) {
- s.compressionType = COMPRESS_ZLIB
- switch f.FileHeader.Class {
- case ELFCLASS32:
- // Chdr32.Size offset
- dlen = uint64(f.FileHeader.ByteOrder.Uint32(b[4:]))
- s.compressionOffset = 12
- case ELFCLASS64:
- if len(b) < 24 {
- return nil, errors.New("invalid compress header 64")
- }
- // Chdr64.Size offset
- dlen = f.FileHeader.ByteOrder.Uint64(b[8:])
- s.compressionOffset = 24
- default:
- return nil, fmt.Errorf("unsupported compress header:%s", f.FileHeader.Class)
- }
- }
- if dlen > 0 {
- r, err := zlib.NewReader(bytes.NewBuffer(b[s.compressionOffset:]))
- if err != nil {
- return nil, err
- }
- b, err = saferio.ReadData(r, dlen)
- if err != nil {
- return nil, err
- }
- if err := r.Close(); err != nil {
- return nil, err
- }
- }
if f.Type == ET_EXEC {
// Do not apply relocations to DWARF sections for ET_EXEC binaries.
import (
"bytes"
"compress/gzip"
+ "compress/zlib"
"debug/dwarf"
"encoding/binary"
"fmt"
"path"
"reflect"
"runtime"
+ "strings"
"testing"
)
t.Errorf("DynValue(DT_VERNEEDNUM): got %v, want [1]", vals)
}
}
+
+func TestIssue59208(t *testing.T) {
+ // corrupted dwarf data should raise invalid dwarf data instead of invalid zlib
+ const orig = "testdata/compressed-64.obj"
+ f, err := Open(orig)
+ if err != nil {
+ t.Fatal(err)
+ }
+ sec := f.Section(".debug_info")
+
+ data, err := os.ReadFile(orig)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ dn := make([]byte, len(data))
+ zoffset := sec.Offset + uint64(sec.compressionOffset)
+ copy(dn, data[:zoffset])
+
+ ozd, err := sec.Data()
+ if err != nil {
+ t.Fatal(err)
+ }
+ buf := bytes.NewBuffer(nil)
+ wr := zlib.NewWriter(buf)
+ // corrupt origin data same as COMPRESS_ZLIB
+ copy(ozd, []byte{1, 0, 0, 0})
+ wr.Write(ozd)
+ wr.Close()
+
+ copy(dn[zoffset:], buf.Bytes())
+ copy(dn[sec.Offset+sec.FileSize:], data[sec.Offset+sec.FileSize:])
+
+ nf, err := NewFile(bytes.NewReader(dn))
+ if err != nil {
+ t.Error(err)
+ }
+
+ const want = "decoding dwarf section info"
+ _, err = nf.DWARF()
+ if err == nil || !strings.Contains(err.Error(), want) {
+ t.Errorf("DWARF = %v; want %q", err, want)
+ }
+}