package gyac
-import "fmt"
+import (
+ "fmt"
+ "io"
+)
-// BLOB object, that keeps data splitted on ChunkLen chunks.
+// BLOB object. You have to set its ChunkLen > 0 and data is read from R.
type Blob struct {
- Chunks [][]byte
- ChunkLen int
+ R io.Reader
+ ChunkLen int
+ DecodedLen int64 // filled up after decoding
}
func (blob *Blob) String() string {
- var l int
- for _, chunk := range blob.Chunks {
- l += len(chunk)
- }
- return fmt.Sprintf("BLOB(%d, %d)", blob.ChunkLen, l)
-}
-
-func MakeBlob(chunkLen int, data []byte) (blob Blob) {
- blob.ChunkLen = chunkLen
- n := len(data) / chunkLen
- for i := 0; i < n; i++ {
- blob.Chunks = append(blob.Chunks, data[i*chunkLen:(i+1)*chunkLen])
- }
- left := len(data) - n*chunkLen
- if left > 0 {
- blob.Chunks = append(blob.Chunks, data[len(data)-left:])
- }
- return
+ return fmt.Sprintf("BLOB(%d, %d)", blob.ChunkLen, blob.DecodedLen)
}
"fmt"
"log"
"math/big"
+ "strings"
"time"
"github.com/google/uuid"
"utf8": "привет мир",
},
"blob": []any{
- gyac.MakeBlob(12, []byte{'5'}),
- gyac.MakeBlob(12, bytes.Repeat([]byte{'6'}, 12)),
- gyac.MakeBlob(12, bytes.Repeat([]byte{'7'}, 13)),
- gyac.MakeBlob(5, []byte("1234567890-")),
+ gyac.Blob{ChunkLen: 12, R: strings.NewReader("5")},
+ gyac.Blob{ChunkLen: 12, R: strings.NewReader(strings.Repeat("6", 12))},
+ gyac.Blob{ChunkLen: 12, R: strings.NewReader(strings.Repeat("7", 13))},
+ gyac.Blob{ChunkLen: 5, R: strings.NewReader("1234567890-")},
},
"empties": []any{
[]any{},
map[string]any{},
- gyac.MakeBlob(123, []byte{}),
+ gyac.Blob{ChunkLen: 123, R: bytes.NewReader(nil)},
uuid.Nil,
atom.Raw{T: atom.TAI64, V: mustHexDec("0000000000000000")},
},
package gyac
import (
+ "bytes"
"errors"
"io"
v := Blob{ChunkLen: chunkLen}
var sub Item
var subRead int64
+ var chunks []io.Reader
BlobCycle:
for {
sub, subRead, err = decode(r, false, recursionDepth+1)
if err != nil {
return
}
- v.Chunks = append(v.Chunks, buf)
+ chunks = append(chunks, bytes.NewReader(buf))
+ v.DecodedLen += int64(chunkLen)
case types.Bin:
b := sub.V.([]byte)
if len(b) >= chunkLen {
return
}
if len(b) != 0 {
- v.Chunks = append(v.Chunks, b)
+ chunks = append(chunks, bytes.NewReader(b))
+ v.DecodedLen += int64(len(b))
}
break BlobCycle
default:
return
}
}
+ v.R = io.MultiReader(chunks...)
item.V = v
return
}
if err != nil {
return
}
- var n int64
- for _, chunk := range blob.Chunks {
- if len(chunk) == blob.ChunkLen {
- n, err = atom.ChunkEncode(w, chunk)
- written += n
- if err != nil {
- return
+ chunk := make([]byte, blob.ChunkLen)
+ var n int
+ var n64 int64
+ for {
+ n, err = io.ReadFull(blob.R, chunk)
+ if err != nil {
+ if err == io.ErrUnexpectedEOF || err == io.EOF {
+ chunk = chunk[:n]
+ break
}
+ return
}
- }
- if len(blob.Chunks) == 0 {
- n, err = atom.BinEncode(w, []byte{})
- } else {
- last := blob.Chunks[len(blob.Chunks)-1]
- if len(last) == blob.ChunkLen {
- n, err = atom.BinEncode(w, []byte{})
- } else {
- n, err = atom.BinEncode(w, last)
+ n64, err = atom.ChunkEncode(w, chunk)
+ written += n64
+ if err != nil {
+ return
}
}
- written += n
+ n64, err = atom.BinEncode(w, chunk)
+ written += n64
case types.TAI64:
return atom.TAI64Encode(w, item.V.([]byte))
case types.Bin: