import "sort"
-func qsufsort(data []byte) []int {
+func qsufsort(data []byte) []int32 {
// initial sorting by first byte of suffix
sa := sortedByFirstByte(data)
if len(sa) < 2 {
// the index starts 1-ordered
sufSortable := &suffixSortable{sa, inv, 1}
- for sa[0] > -len(sa) { // until all suffixes are one big sorted group
+ for int(sa[0]) > -len(sa) { // until all suffixes are one big sorted group
// The suffixes are h-ordered, make them 2*h-ordered
pi := 0 // pi is first position of first group
sl := 0 // sl is negated length of sorted groups
for pi < len(sa) {
- if s := sa[pi]; s < 0 { // if pi starts sorted group
+ if s := int(sa[pi]); s < 0 { // if pi starts sorted group
pi -= s // skip over sorted group
sl += s // add negated length to sl
} else { // if pi starts unsorted group
if sl != 0 {
- sa[pi+sl] = sl // combine sorted groups before pi
+ sa[pi+sl] = int32(sl) // combine sorted groups before pi
sl = 0
}
- pk := inv[s] + 1 // pk-1 is last position of unsorted group
+ pk := int(inv[s]) + 1 // pk-1 is last position of unsorted group
sufSortable.sa = sa[pi:pk]
sort.Sort(sufSortable)
sufSortable.updateGroups(pi)
}
}
if sl != 0 { // if the array ends with a sorted group
- sa[pi+sl] = sl // combine sorted groups at end of sa
+ sa[pi+sl] = int32(sl) // combine sorted groups at end of sa
}
sufSortable.h *= 2 // double sorted depth
}
for i := range sa { // reconstruct suffix array from inverse
- sa[inv[i]] = i
+ sa[inv[i]] = int32(i)
}
return sa
}
-func sortedByFirstByte(data []byte) []int {
+func sortedByFirstByte(data []byte) []int32 {
// total byte counts
var count [256]int
for _, b := range data {
count[b], sum = sum, count[b]+sum
}
// iterate through bytes, placing index into the correct spot in sa
- sa := make([]int, len(data))
+ sa := make([]int32, len(data))
for i, b := range data {
- sa[count[b]] = i
+ sa[count[b]] = int32(i)
count[b]++
}
return sa
}
-func initGroups(sa []int, data []byte) []int {
+func initGroups(sa []int32, data []byte) []int32 {
// label contiguous same-letter groups with the same group number
- inv := make([]int, len(data))
+ inv := make([]int32, len(data))
prevGroup := len(sa) - 1
groupByte := data[sa[prevGroup]]
for i := len(sa) - 1; i >= 0; i-- {
groupByte = b
prevGroup = i
}
- inv[sa[i]] = prevGroup
+ inv[sa[i]] = int32(prevGroup)
if prevGroup == 0 {
sa[0] = -1
}
if data[sa[i]] == lastByte && s == -1 {
s = i
}
- if sa[i] == len(sa)-1 {
+ if int(sa[i]) == len(sa)-1 {
sa[i], sa[s] = sa[s], sa[i]
- inv[sa[s]] = s
+ inv[sa[s]] = int32(s)
sa[s] = -1 // mark it as an isolated sorted group
break
}
}
type suffixSortable struct {
- sa []int
- inv []int
- h int
+ sa []int32
+ inv []int32
+ h int32
}
func (x *suffixSortable) Len() int { return len(x.sa) }
prev := 0
for _, b := range bounds {
for i := prev; i < b; i++ {
- x.inv[x.sa[i]] = offset + b - 1
+ x.inv[x.sa[i]] = int32(offset + b - 1)
}
if b-prev == 1 {
x.sa[prev] = -1
import (
"bytes"
+ "encoding/binary"
"exp/regexp"
+ "io"
+ "os"
"sort"
)
// Index implements a suffix array for fast substring search.
type Index struct {
data []byte
- sa []int // suffix array for data
+ sa []int32 // suffix array for data; len(sa) == len(data)
}
// New creates a new Index for data.
return &Index{data, qsufsort(data)}
}
+// Read reads the index from r into x; x must not be nil.
+func (x *Index) Read(r io.Reader) os.Error {
+ var n int32
+ if err := binary.Read(r, binary.LittleEndian, &n); err != nil {
+ return err
+ }
+ if 2*n < int32(cap(x.data)) || int32(cap(x.data)) < n {
+ // new data is significantly smaller or larger then
+ // existing buffers - allocate new ones
+ x.data = make([]byte, n)
+ x.sa = make([]int32, n)
+ } else {
+ // re-use existing buffers
+ x.data = x.data[0:n]
+ x.sa = x.sa[0:n]
+ }
+
+ if err := binary.Read(r, binary.LittleEndian, x.data); err != nil {
+ return err
+ }
+ if err := binary.Read(r, binary.LittleEndian, x.sa); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// Write writes the index x to w.
+func (x *Index) Write(w io.Writer) os.Error {
+ n := int32(len(x.data))
+ if err := binary.Write(w, binary.LittleEndian, n); err != nil {
+ return err
+ }
+ if err := binary.Write(w, binary.LittleEndian, x.data); err != nil {
+ return err
+ }
+ if err := binary.Write(w, binary.LittleEndian, x.sa); err != nil {
+ return err
+ }
+ return nil
+}
+
// Bytes returns the data over which the index was created.
// It must not be modified.
//
// lookupAll returns a slice into the matching region of the index.
// The runtime is O(log(N)*len(s)).
-func (x *Index) lookupAll(s []byte) []int {
+func (x *Index) lookupAll(s []byte) []int32 {
// find matching suffix index range [i:j]
// find the first index where s would be the prefix
i := sort.Search(len(x.sa), func(i int) bool { return bytes.Compare(x.at(i), s) >= 0 })
func (x *Index) Lookup(s []byte, n int) (result []int) {
if len(s) > 0 && n != 0 {
matches := x.lookupAll(s)
- if len(matches) < n || n < 0 {
+ if n < 0 || len(matches) < n {
n = len(matches)
}
+ // 0 <= n <= len(matches)
if n > 0 {
result = make([]int, n)
- copy(result, matches)
+ for i, x := range matches[0:n] {
+ result[i] = int(x)
+ }
}
}
return
func testConstruction(t *testing.T, tc *testCase, x *Index) {
if !sort.IsSorted((*index)(x)) {
- t.Errorf("testConstruction failed %s", tc.name)
+ t.Errorf("failed testConstruction %s", tc.name)
+ }
+}
+
+func equal(x, y *Index) bool {
+ if !bytes.Equal(x.data, y.data) {
+ return false
+ }
+ for i, j := range x.sa {
+ if j != y.sa[i] {
+ return false
+ }
+ }
+ return true
+}
+
+func testSaveRestore(t *testing.T, tc *testCase, x *Index) {
+ var buf bytes.Buffer
+ if err := x.Write(&buf); err != nil {
+ t.Errorf("failed writing index %s (%s)", tc.name, err)
+ }
+ var y Index
+ if err := y.Read(&buf); err != nil {
+ t.Errorf("failed reading index %s (%s)", tc.name, err)
+ }
+ if !equal(x, &y) {
+ t.Errorf("restored index doesn't match saved index %s", tc.name)
}
}
for _, tc := range testCases {
x := New([]byte(tc.source))
testConstruction(t, &tc, x)
+ testSaveRestore(t, &tc, x)
testLookups(t, &tc, x, 0)
testLookups(t, &tc, x, 1)
testLookups(t, &tc, x, 10)