"errors"
"fmt"
"git.curoverse.com/arvados.git/sdk/go/blockdigest"
+ "path"
"regexp"
+ "sort"
"strconv"
"strings"
)
var ErrInvalidToken = errors.New("Invalid token")
-var LocatorPattern = regexp.MustCompile(
- "^[0-9a-fA-F]{32}\\+[0-9]+(\\+[A-Z][A-Za-z0-9@_-]+)*$")
-
type Manifest struct {
Text string
Err error
Hints []string
}
-type DataSegment struct {
- BlockLocator
- Locator string
- StreamOffset uint64
-}
-
// FileSegment is a portion of a file that is contained within a
// single block.
type FileSegment struct {
type ManifestStream struct {
StreamName string
Blocks []string
+ blockOffsets []uint64
FileStreamSegments []FileStreamSegment
Err error
}
+// Array of segments referencing file content
+type segmentedFile []FileSegment
+
+// Map of files to list of file segments referencing file content
+type segmentedStream map[string]segmentedFile
+
+// Map of streams
+type segmentedManifest map[string]segmentedStream
+
var escapeSeq = regexp.MustCompile(`\\([0-9]{3}|\\)`)
func unescapeSeq(seq string) string {
return string([]byte{byte(i)})
}
+func EscapeName(s string) string {
+ raw := []byte(s)
+ escaped := make([]byte, 0, len(s))
+ for _, c := range raw {
+ if c <= 32 {
+ oct := fmt.Sprintf("\\%03o", c)
+ escaped = append(escaped, []byte(oct)...)
+ } else {
+ escaped = append(escaped, c)
+ }
+ }
+ return string(escaped)
+}
+
func UnescapeName(s string) string {
return escapeSeq.ReplaceAllStringFunc(s, unescapeSeq)
}
func ParseBlockLocator(s string) (b BlockLocator, err error) {
- if !LocatorPattern.MatchString(s) {
+ if !blockdigest.LocatorPattern.MatchString(s) {
err = fmt.Errorf("String \"%s\" does not match BlockLocator pattern "+
"\"%s\".",
s,
- LocatorPattern.String())
+ blockdigest.LocatorPattern.String())
} else {
tokens := strings.Split(s, "+")
var blockSize int64
}
func (s *ManifestStream) FileSegmentIterByName(filepath string) <-chan *FileSegment {
- ch := make(chan *FileSegment)
+ ch := make(chan *FileSegment, 64)
go func() {
s.sendFileSegmentIterByName(filepath, ch)
close(ch)
return ch
}
+func firstBlock(offsets []uint64, range_start uint64) int {
+ // range_start/block_start is the inclusive lower bound
+ // range_end/block_end is the exclusive upper bound
+
+ hi := len(offsets) - 1
+ var lo int
+ i := ((hi + lo) / 2)
+ block_start := offsets[i]
+ block_end := offsets[i+1]
+
+ // perform a binary search for the first block
+ // assumes that all of the blocks are contiguous, so range_start is guaranteed
+ // to either fall into the range of a block or be outside the block range entirely
+ for !(range_start >= block_start && range_start < block_end) {
+ fmt.Println(i, block_start, block_end)
+ if lo == i {
+ // must be out of range, fail
+ return -1
+ }
+ if range_start > block_start {
+ lo = i
+ } else {
+ hi = i
+ }
+ i = ((hi + lo) / 2)
+ block_start = offsets[i]
+ block_end = offsets[i+1]
+ }
+ return i
+}
+
func (s *ManifestStream) sendFileSegmentIterByName(filepath string, ch chan<- *FileSegment) {
- blockLens := make([]int, 0, len(s.Blocks))
// This is what streamName+"/"+fileName will look like:
- target := "./" + filepath
+ target := fixStreamName(filepath)
for _, fTok := range s.FileStreamSegments {
wantPos := fTok.SegPos
wantLen := fTok.SegLen
ch <- &FileSegment{Locator: "d41d8cd98f00b204e9800998ecf8427e+0", Offset: 0, Len: 0}
continue
}
- // Linear search for blocks containing data for this
- // file
- var blockPos uint64 = 0 // position of block in stream
- for i, loc := range s.Blocks {
+
+ // Binary search to determine first block in the stream
+ i := firstBlock(s.blockOffsets, wantPos)
+ if i == -1 {
+ // Shouldn't happen, file segments are checked in parseManifestStream
+ panic(fmt.Sprintf("File segment %v extends past end of stream", fTok))
+ }
+ for ; i < len(s.Blocks); i++ {
+ blockPos := s.blockOffsets[i]
+ blockEnd := s.blockOffsets[i+1]
+ if blockEnd <= wantPos {
+ // Shouldn't happen, FirstBlock() should start
+ // us on the right block, so if this triggers
+ // that means there is a bug.
+ panic(fmt.Sprintf("Block end %v comes before start of file segment %v", blockEnd, wantPos))
+ }
if blockPos >= wantPos+wantLen {
+ // current block comes after current file span
break
}
- if len(blockLens) <= i {
- blockLens = blockLens[:i+1]
- b, err := ParseBlockLocator(loc)
- if err != nil {
- // Unparseable locator -> unusable
- // stream.
- ch <- nil
- return
- }
- blockLens[i] = b.Size
- }
- blockLen := uint64(blockLens[i])
- if blockPos+blockLen <= wantPos {
- blockPos += blockLen
- continue
- }
+
fseg := FileSegment{
- Locator: loc,
+ Locator: s.Blocks[i],
Offset: 0,
- Len: blockLens[i],
+ Len: int(blockEnd - blockPos),
}
if blockPos < wantPos {
fseg.Offset = int(wantPos - blockPos)
fseg.Len -= fseg.Offset
}
- if blockPos+blockLen > wantPos+wantLen {
+ if blockEnd > wantPos+wantLen {
fseg.Len = int(wantPos+wantLen-blockPos) - fseg.Offset
}
ch <- &fseg
- blockPos += blockLen
}
}
}
return
}
+ m.blockOffsets = make([]uint64, len(m.Blocks)+1)
+ var streamoffset uint64
+ for i, b := range m.Blocks {
+ bl, err := ParseBlockLocator(b)
+ if err != nil {
+ m.Err = err
+ return
+ }
+ m.blockOffsets[i] = streamoffset
+ streamoffset += uint64(bl.Size)
+ }
+ m.blockOffsets[len(m.Blocks)] = streamoffset
+
if len(fileTokens) == 0 {
m.Err = fmt.Errorf("No file tokens found")
return
m.Err = fmt.Errorf("Invalid file token: %s", ft)
break
}
+ if pft.SegPos+pft.SegLen > streamoffset {
+ m.Err = fmt.Errorf("File segment %s extends past end of stream %d", ft, streamoffset)
+ break
+ }
m.FileStreamSegments = append(m.FileStreamSegments, pft)
}
return
}
+func fixStreamName(sn string) string {
+ sn = path.Clean(sn)
+ if strings.HasPrefix(sn, "/") {
+ sn = "." + sn
+ } else if sn != "." {
+ sn = "./" + sn
+ }
+ return sn
+}
+
+func splitPath(srcpath string) (streamname, filename string) {
+ pathIdx := strings.LastIndex(srcpath, "/")
+ if pathIdx >= 0 {
+ streamname = srcpath[0:pathIdx]
+ filename = srcpath[pathIdx+1:]
+ } else {
+ streamname = srcpath
+ filename = ""
+ }
+ return
+}
+
+func (m *Manifest) segment() (*segmentedManifest, error) {
+ files := make(segmentedManifest)
+
+ for stream := range m.StreamIter() {
+ if stream.Err != nil {
+ // Stream has an error
+ return nil, stream.Err
+ }
+ currentStreamfiles := make(map[string]bool)
+ for _, f := range stream.FileStreamSegments {
+ sn := stream.StreamName
+ if strings.HasSuffix(sn, "/") {
+ sn = sn[0 : len(sn)-1]
+ }
+ path := sn + "/" + f.Name
+ streamname, filename := splitPath(path)
+ if files[streamname] == nil {
+ files[streamname] = make(segmentedStream)
+ }
+ if !currentStreamfiles[path] {
+ segs := files[streamname][filename]
+ for seg := range stream.FileSegmentIterByName(path) {
+ if seg.Len > 0 {
+ segs = append(segs, *seg)
+ }
+ }
+ files[streamname][filename] = segs
+ currentStreamfiles[path] = true
+ }
+ }
+ }
+
+ return &files, nil
+}
+
+func (stream segmentedStream) normalizedText(name string) string {
+ var sortedfiles []string
+ for k, _ := range stream {
+ sortedfiles = append(sortedfiles, k)
+ }
+ sort.Strings(sortedfiles)
+
+ stream_tokens := []string{EscapeName(name)}
+
+ blocks := make(map[string]int64)
+ var streamoffset int64
+
+ // Go through each file and add each referenced block exactly once.
+ for _, streamfile := range sortedfiles {
+ for _, segment := range stream[streamfile] {
+ if _, ok := blocks[segment.Locator]; !ok {
+ stream_tokens = append(stream_tokens, segment.Locator)
+ blocks[segment.Locator] = streamoffset
+ b, _ := ParseBlockLocator(segment.Locator)
+ streamoffset += int64(b.Size)
+ }
+ }
+ }
+
+ if len(stream_tokens) == 1 {
+ stream_tokens = append(stream_tokens, "d41d8cd98f00b204e9800998ecf8427e+0")
+ }
+
+ for _, streamfile := range sortedfiles {
+ // Add in file segments
+ span_start := int64(-1)
+ span_end := int64(0)
+ fout := EscapeName(streamfile)
+ for _, segment := range stream[streamfile] {
+ // Collapse adjacent segments
+ streamoffset = blocks[segment.Locator] + int64(segment.Offset)
+ if span_start == -1 {
+ span_start = streamoffset
+ span_end = streamoffset + int64(segment.Len)
+ } else {
+ if streamoffset == span_end {
+ span_end += int64(segment.Len)
+ } else {
+ stream_tokens = append(stream_tokens, fmt.Sprintf("%d:%d:%s", span_start, span_end-span_start, fout))
+ span_start = streamoffset
+ span_end = streamoffset + int64(segment.Len)
+ }
+ }
+ }
+
+ if span_start != -1 {
+ stream_tokens = append(stream_tokens, fmt.Sprintf("%d:%d:%s", span_start, span_end-span_start, fout))
+ }
+
+ if len(stream[streamfile]) == 0 {
+ stream_tokens = append(stream_tokens, fmt.Sprintf("0:0:%s", fout))
+ }
+ }
+
+ return strings.Join(stream_tokens, " ") + "\n"
+}
+
+func (m segmentedManifest) manifestTextForPath(srcpath, relocate string) string {
+ srcpath = fixStreamName(srcpath)
+
+ var suffix string
+ if strings.HasSuffix(relocate, "/") {
+ suffix = "/"
+ }
+ relocate = fixStreamName(relocate) + suffix
+
+ streamname, filename := splitPath(srcpath)
+
+ if stream, ok := m[streamname]; ok {
+ // check if it refers to a single file in a stream
+ filesegs, okfile := stream[filename]
+ if okfile {
+ newstream := make(segmentedStream)
+ relocate_stream, relocate_filename := splitPath(relocate)
+ if relocate_filename == "" {
+ relocate_filename = filename
+ }
+ newstream[relocate_filename] = filesegs
+ return newstream.normalizedText(relocate_stream)
+ }
+ }
+
+ // Going to extract multiple streams
+ prefix := srcpath + "/"
+
+ if strings.HasSuffix(relocate, "/") {
+ relocate = relocate[0 : len(relocate)-1]
+ }
+
+ var sortedstreams []string
+ for k, _ := range m {
+ sortedstreams = append(sortedstreams, k)
+ }
+ sort.Strings(sortedstreams)
+
+ manifest := ""
+ for _, k := range sortedstreams {
+ if strings.HasPrefix(k, prefix) || k == srcpath {
+ manifest += m[k].normalizedText(relocate + k[len(srcpath):])
+ }
+ }
+ return manifest
+}
+
+// Extract extracts some or all of the manifest and returns the extracted
+// portion as a normalized manifest. This is a swiss army knife function that
+// can be several ways:
+//
+// If 'srcpath' and 'relocate' are '.' it simply returns an equivalent manifest
+// in normalized form.
+//
+// Extract(".", ".") // return entire normalized manfest text
+//
+// If 'srcpath' points to a single file, it will return manifest text for just that file.
+// The value of "relocate" is can be used to rename the file or set the file stream.
+//
+// Extract("./foo", ".") // extract file "foo" and put it in stream "."
+// Extract("./foo", "./bar") // extract file "foo", rename it to "bar" in stream "."
+// Extract("./foo", "./bar/") // extract file "foo", rename it to "./bar/foo"
+// Extract("./foo", "./bar/baz") // extract file "foo", rename it to "./bar/baz")
+//
+// Otherwise it will return the manifest text for all streams with the prefix in "srcpath" and place
+// them under the path in "relocate".
+//
+// Extract("./stream", ".") // extract "./stream" to "." and "./stream/subdir" to "./subdir")
+// Extract("./stream", "./bar") // extract "./stream" to "./bar" and "./stream/subdir" to "./bar/subdir")
+func (m Manifest) Extract(srcpath, relocate string) (ret Manifest) {
+ segmented, err := m.segment()
+ if err != nil {
+ ret.Err = err
+ return
+ }
+ ret.Text = segmented.manifestTextForPath(srcpath, relocate)
+ return
+}
+
func (m *Manifest) StreamIter() <-chan ManifestStream {
ch := make(chan ManifestStream)
go func(input string) {
}
func (m *Manifest) FileSegmentIterByName(filepath string) <-chan *FileSegment {
- ch := make(chan *FileSegment)
+ ch := make(chan *FileSegment, 64)
+ filepath = fixStreamName(filepath)
go func() {
for stream := range m.StreamIter() {
- if !strings.HasPrefix("./"+filepath, stream.StreamName+"/") {
+ if !strings.HasPrefix(filepath, stream.StreamName+"/") {
continue
}
stream.sendFileSegmentIterByName(filepath, ch)
package manifest
import (
+ "fmt"
+ "git.curoverse.com/arvados.git/sdk/go/arvadostest"
+ "git.curoverse.com/arvados.git/sdk/go/blockdigest"
"io/ioutil"
"reflect"
"regexp"
"runtime"
"testing"
-
- "git.curoverse.com/arvados.git/sdk/go/arvadostest"
- "git.curoverse.com/arvados.git/sdk/go/blockdigest"
)
func getStackTrace() string {
firstStream,
ManifestStream{StreamName: ".",
Blocks: []string{"b746e3d2104645f2f64cd3cc69dd895d+15693477+E2866e643690156651c03d876e638e674dcd79475@5441920c"},
- FileStreamSegments: []FileStreamSegment{{0, 15893477, "chr10_band0_s0_e3000000.fj"}}})
+ FileStreamSegments: []FileStreamSegment{{0, 15693477, "chr10_band0_s0_e3000000.fj"}}})
received, ok := <-streamIter
if ok {
}
}
}
+
+func TestNormalizeManifest(t *testing.T) {
+ m1 := Manifest{Text: `. 5348b82a029fd9e971a811ce1f71360b+43 0:43:md5sum.txt
+. 085c37f02916da1cad16f93c54d899b7+41 0:41:md5sum.txt
+. 8b22da26f9f433dea0a10e5ec66d73ba+43 0:43:md5sum.txt
+`}
+ expectEqual(t, m1.Extract(".", ".").Text,
+ `. 5348b82a029fd9e971a811ce1f71360b+43 085c37f02916da1cad16f93c54d899b7+41 8b22da26f9f433dea0a10e5ec66d73ba+43 0:127:md5sum.txt
+`)
+
+ m2 := Manifest{Text: `. 204e43b8a1185621ca55a94839582e6f+67108864 b9677abbac956bd3e86b1deb28dfac03+67108864 fc15aff2a762b13f521baf042140acec+67108864 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:227212247:var-GS000016015-ASM.tsv.bz2
+`}
+ expectEqual(t, m2.Extract(".", ".").Text, m2.Text)
+
+ m3 := Manifest{Text: `. 5348b82a029fd9e971a811ce1f71360b+43 3:40:md5sum.txt
+. 085c37f02916da1cad16f93c54d899b7+41 0:41:md5sum.txt
+. 8b22da26f9f433dea0a10e5ec66d73ba+43 0:43:md5sum.txt
+`}
+ expectEqual(t, m3.Extract(".", ".").Text, `. 5348b82a029fd9e971a811ce1f71360b+43 085c37f02916da1cad16f93c54d899b7+41 8b22da26f9f433dea0a10e5ec66d73ba+43 3:124:md5sum.txt
+`)
+ expectEqual(t, m3.Extract("/md5sum.txt", "/wiggle.txt").Text, `. 5348b82a029fd9e971a811ce1f71360b+43 085c37f02916da1cad16f93c54d899b7+41 8b22da26f9f433dea0a10e5ec66d73ba+43 3:124:wiggle.txt
+`)
+
+ m4 := Manifest{Text: `. 204e43b8a1185621ca55a94839582e6f+67108864 0:3:foo/bar
+./zzz 204e43b8a1185621ca55a94839582e6f+67108864 0:999:zzz
+./foo 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar
+`}
+
+ expectEqual(t, m4.Extract(".", ".").Text,
+ `./foo 204e43b8a1185621ca55a94839582e6f+67108864 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar 67108864:3:bar
+./zzz 204e43b8a1185621ca55a94839582e6f+67108864 0:999:zzz
+`)
+
+ expectEqual(t, m4.Extract("./foo", ".").Text, ". 204e43b8a1185621ca55a94839582e6f+67108864 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar 67108864:3:bar\n")
+ expectEqual(t, m4.Extract("./foo", "./baz").Text, "./baz 204e43b8a1185621ca55a94839582e6f+67108864 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar 67108864:3:bar\n")
+ expectEqual(t, m4.Extract("./foo/bar", ".").Text, ". 204e43b8a1185621ca55a94839582e6f+67108864 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar 67108864:3:bar\n")
+ expectEqual(t, m4.Extract("./foo/bar", "./baz").Text, ". 204e43b8a1185621ca55a94839582e6f+67108864 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:baz 67108864:3:baz\n")
+ expectEqual(t, m4.Extract("./foo/bar", "./quux/").Text, "./quux 204e43b8a1185621ca55a94839582e6f+67108864 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar 67108864:3:bar\n")
+ expectEqual(t, m4.Extract("./foo/bar", "./quux/baz").Text, "./quux 204e43b8a1185621ca55a94839582e6f+67108864 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:baz 67108864:3:baz\n")
+ expectEqual(t, m4.Extract(".", ".").Text, `./foo 204e43b8a1185621ca55a94839582e6f+67108864 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar 67108864:3:bar
+./zzz 204e43b8a1185621ca55a94839582e6f+67108864 0:999:zzz
+`)
+ expectEqual(t, m4.Extract(".", "./zip").Text, `./zip/foo 204e43b8a1185621ca55a94839582e6f+67108864 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar 67108864:3:bar
+./zip/zzz 204e43b8a1185621ca55a94839582e6f+67108864 0:999:zzz
+`)
+
+ expectEqual(t, m4.Extract("foo/.//bar/../../zzz/", "/waz/").Text, `./waz 204e43b8a1185621ca55a94839582e6f+67108864 0:999:zzz
+`)
+
+ m5 := Manifest{Text: `. 204e43b8a1185621ca55a94839582e6f+67108864 0:3:foo/bar
+./zzz 204e43b8a1185621ca55a94839582e6f+67108864 0:999:zzz
+./foo 204e43b8a1185621ca55a94839582e6f+67108864 3:3:bar
+`}
+ expectEqual(t, m5.Extract(".", ".").Text,
+ `./foo 204e43b8a1185621ca55a94839582e6f+67108864 0:6:bar
+./zzz 204e43b8a1185621ca55a94839582e6f+67108864 0:999:zzz
+`)
+
+ m8 := Manifest{Text: `./a\040b\040c 59ca0efa9f5633cb0371bbc0355478d8+13 0:13:hello\040world.txt
+`}
+ expectEqual(t, m8.Extract(".", ".").Text, m8.Text)
+
+ m9 := Manifest{Text: ". acbd18db4cc2f85cedef654fccc4a4d8+40 0:10:one 20:10:two 10:10:one 30:10:two\n"}
+ expectEqual(t, m9.Extract("", "").Text, ". acbd18db4cc2f85cedef654fccc4a4d8+40 0:20:one 20:20:two\n")
+
+ m10 := Manifest{Text: ". acbd18db4cc2f85cedef654fccc4a4d8+40 0:10:one 20:10:two 10:10:one 30:10:two\n"}
+ expectEqual(t, m10.Extract("./two", "./three").Text, ". acbd18db4cc2f85cedef654fccc4a4d8+40 20:20:three\n")
+
+ m11 := Manifest{Text: arvadostest.PathologicalManifest}
+ expectEqual(t, m11.Extract(".", ".").Text, `. acbd18db4cc2f85cedef654fccc4a4d8+3 37b51d194a7513e45b56f6524f2d51f2+3 73feffa4b7f6bb68e44cf984c85f6e88+3+Z+K@xyzzy 0:1:f 1:4:ooba 5:1:r 5:4:rbaz 0:0:zero@0 0:0:zero@1 0:0:zero@4 0:0:zero@9
+./foo acbd18db4cc2f85cedef654fccc4a4d8+3 0:3:foo 0:3:foo 0:0:zero
+./foo\040bar acbd18db4cc2f85cedef654fccc4a4d8+3 0:3:baz 0:3:baz\040waz
+./overlapReverse acbd18db4cc2f85cedef654fccc4a4d8+3 2:1:o 2:1:ofoo 0:3:ofoo 1:2:oo
+./segmented acbd18db4cc2f85cedef654fccc4a4d8+3 37b51d194a7513e45b56f6524f2d51f2+3 0:1:frob 5:1:frob 1:1:frob 3:1:frob 1:2:oof 0:1:oof
+`)
+
+ m12 := Manifest{Text: `./foo 204e43b8a1185621ca55a94839582e6f+67108864 0:3:bar
+./zzz 204e43b8a1185621ca55a94839582e6f+67108864 0:999:zzz
+./foo/baz 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar
+`}
+
+ expectEqual(t, m12.Extract("./foo", ".").Text, `. 204e43b8a1185621ca55a94839582e6f+67108864 0:3:bar
+./baz 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar
+`)
+ expectEqual(t, m12.Extract("./foo", "./blub").Text, `./blub 204e43b8a1185621ca55a94839582e6f+67108864 0:3:bar
+./blub/baz 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar
+`)
+ expectEqual(t, m12.Extract("./foo", "./blub/").Text, `./blub 204e43b8a1185621ca55a94839582e6f+67108864 0:3:bar
+./blub/baz 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar
+`)
+ expectEqual(t, m12.Extract("./foo/", "./blub/").Text, `./blub 204e43b8a1185621ca55a94839582e6f+67108864 0:3:bar
+./blub/baz 323d2a3ce20370c4ca1d3462a344f8fd+25885655 0:3:bar
+`)
+
+ m13 := Manifest{Text: `foo 204e43b8a1185621ca55a94839582e6f+67108864 0:3:bar
+`}
+
+ expectEqual(t, m13.Extract(".", ".").Text, ``)
+ expectEqual(t, m13.Extract(".", ".").Err.Error(), "Invalid stream name: foo")
+
+ m14 := Manifest{Text: `./foo 204e43b8a1185621ca55a94839582e6f+67108864 67108863:3:bar
+`}
+
+ expectEqual(t, m14.Extract(".", ".").Text, ``)
+ expectEqual(t, m14.Extract(".", ".").Err.Error(), "File segment 67108863:3:bar extends past end of stream 67108864")
+
+ m15 := Manifest{Text: `./foo 204e43b8a1185621ca55a94839582e6f+67108864 0:3bar
+`}
+
+ expectEqual(t, m15.Extract(".", ".").Text, ``)
+ expectEqual(t, m15.Extract(".", ".").Err.Error(), "Invalid file token: 0:3bar")
+}
+
+func TestFirstBlock(t *testing.T) {
+ fmt.Println("ZZZ")
+ expectEqual(t, firstBlock([]uint64{1, 2, 3, 4}, 3), 2)
+ expectEqual(t, firstBlock([]uint64{1, 2, 3, 4, 5, 6}, 4), 3)
+}