1 // Copyright (C) The Lightning Authors. All rights reserved.
3 // SPDX-License-Identifier: AGPL-3.0
23 "git.arvados.org/arvados.git/sdk/go/arvados"
24 "github.com/arvados/lightning/hgvs"
25 "github.com/kshedden/gonpy"
26 log "github.com/sirupsen/logrus"
27 "golang.org/x/crypto/blake2b"
30 type sliceNumpy struct {
35 func (cmd *sliceNumpy) RunCommand(prog string, args []string, stdin io.Reader, stdout, stderr io.Writer) int {
39 fmt.Fprintf(stderr, "%s\n", err)
42 flags := flag.NewFlagSet("", flag.ContinueOnError)
43 flags.SetOutput(stderr)
44 pprof := flags.String("pprof", "", "serve Go profile data at http://`[addr]:port`")
45 runlocal := flags.Bool("local", false, "run on local host (default: run in an arvados container)")
46 projectUUID := flags.String("project", "", "project `UUID` for output data")
47 priority := flags.Int("priority", 500, "container request priority")
48 inputDir := flags.String("input-dir", "./in", "input `directory`")
49 outputDir := flags.String("output-dir", "./out", "output `directory`")
50 ref := flags.String("ref", "", "reference name (if blank, choose last one that appears in input)")
51 regionsFilename := flags.String("regions", "", "only output columns/annotations that intersect regions in specified bed `file`")
52 expandRegions := flags.Int("expand-regions", 0, "expand specified regions by `N` base pairs on each side`")
53 mergeOutput := flags.Bool("merge-output", false, "merge output into one matrix.npy and one matrix.annotations.csv")
54 flags.IntVar(&cmd.threads, "threads", 16, "number of memory-hungry assembly threads")
55 cmd.filter.Flags(flags)
56 err = flags.Parse(args)
57 if err == flag.ErrHelp {
60 } else if err != nil {
66 log.Println(http.ListenAndServe(*pprof, nil))
71 runner := arvadosContainerRunner{
72 Name: "lightning slice-numpy",
73 Client: arvados.NewClientFromEnv(),
74 ProjectUUID: *projectUUID,
81 err = runner.TranslatePaths(inputDir, regionsFilename)
85 runner.Args = []string{"slice-numpy", "-local=true",
87 "-input-dir=" + *inputDir,
88 "-output-dir=/mnt/output",
89 "-threads=" + fmt.Sprintf("%d", cmd.threads),
90 "-regions=" + *regionsFilename,
91 "-expand-regions=" + fmt.Sprintf("%d", *expandRegions),
92 "-merge-output=" + fmt.Sprintf("%v", *mergeOutput),
94 runner.Args = append(runner.Args, cmd.filter.Args()...)
96 output, err = runner.Run()
100 fmt.Fprintln(stdout, output)
104 infiles, err := allGobFiles(*inputDir)
108 if len(infiles) == 0 {
109 err = fmt.Errorf("no input files found in %s", *inputDir)
112 sort.Strings(infiles)
115 var refseq map[string][]tileLibRef
116 var reftiledata = make(map[tileLibRef][]byte, 11000000)
117 in0, err := open(infiles[0])
122 matchGenome, err := regexp.Compile(cmd.filter.MatchGenome)
124 err = fmt.Errorf("-match-genome: invalid regexp: %q", cmd.filter.MatchGenome)
129 DecodeLibrary(in0, strings.HasSuffix(infiles[0], ".gz"), func(ent *LibraryEntry) error {
130 if len(ent.TagSet) > 0 {
131 taglen = len(ent.TagSet[0])
133 for _, cseq := range ent.CompactSequences {
134 if cseq.Name == *ref || *ref == "" {
135 refseq = cseq.TileSequences
138 for _, cg := range ent.CompactGenomes {
139 if matchGenome.MatchString(cg.Name) {
140 cgnames = append(cgnames, cg.Name)
143 for _, tv := range ent.TileVariants {
145 reftiledata[tileLibRef{tv.Tag, tv.Variant}] = tv.Sequence
155 err = fmt.Errorf("%s: reference sequence not found", infiles[0])
159 err = fmt.Errorf("tagset not found")
162 if len(cgnames) == 0 {
163 err = fmt.Errorf("no genomes found matching regexp %q", cmd.filter.MatchGenome)
166 sort.Strings(cgnames)
169 labelsFilename := *outputDir + "/labels.csv"
170 log.Infof("writing labels to %s", labelsFilename)
172 f, err = os.Create(labelsFilename)
177 for i, name := range cgnames {
178 _, err = fmt.Fprintf(f, "%d,%q\n", i, trimFilenameForLabel(name))
180 err = fmt.Errorf("write %s: %w", labelsFilename, err)
186 err = fmt.Errorf("close %s: %w", labelsFilename, err)
191 log.Info("indexing reference tiles")
192 type reftileinfo struct {
193 variant tileVariantID
194 seqname string // chr1
195 pos int // distance from start of chromosome to starttag
196 tiledata []byte // acgtggcaa...
198 isdup := map[tagID]bool{}
199 reftile := map[tagID]*reftileinfo{}
200 for seqname, cseq := range refseq {
202 for _, libref := range cseq {
203 tiledata := reftiledata[libref]
204 if len(tiledata) == 0 {
205 err = fmt.Errorf("missing tiledata for tag %d variant %d in %s in ref", libref.Tag, libref.Variant, seqname)
208 if isdup[libref.Tag] {
209 log.Printf("dropping reference tile %+v from %s, tag not unique", libref, seqname)
210 } else if reftile[libref.Tag] != nil {
211 log.Printf("dropping reference tile %+v from %s, tag not unique", tileLibRef{Tag: libref.Tag, Variant: reftile[libref.Tag].variant}, reftile[libref.Tag].seqname)
212 delete(reftile, libref.Tag)
213 log.Printf("dropping reference tile %+v from %s, tag not unique", libref, seqname)
214 isdup[libref.Tag] = true
216 reftile[libref.Tag] = &reftileinfo{
218 variant: libref.Variant,
223 pos += len(tiledata) - taglen
225 log.Printf("... %s done, len %d", seqname, pos+taglen)
229 if *regionsFilename != "" {
230 log.Printf("loading regions from %s", *regionsFilename)
231 mask, err = makeMask(*regionsFilename, *expandRegions)
235 log.Printf("before applying mask, len(reftile) == %d", len(reftile))
236 log.Printf("deleting reftile entries for regions outside %d intervals", mask.Len())
237 for tag, rt := range reftile {
238 if !mask.Check(strings.TrimPrefix(rt.seqname, "chr"), rt.pos, rt.pos+len(rt.tiledata)) {
242 log.Printf("after applying mask, len(reftile) == %d", len(reftile))
245 var toMerge [][]int16
247 toMerge = make([][]int16, len(infiles))
250 throttleMem := throttle{Max: cmd.threads} // TODO: estimate using mem and data size
251 throttleNumpyMem := throttle{Max: cmd.threads/2 + 1}
252 log.Info("generating annotations and numpy matrix for each slice")
254 for infileIdx, infile := range infiles {
255 infileIdx, infile := infileIdx, infile
256 throttleMem.Go(func() error {
257 seq := make(map[tagID][]TileVariant, 50000)
258 cgs := make(map[string]CompactGenome, len(cgnames))
259 f, err := open(infile)
264 log.Infof("%04d: reading %s", infileIdx, infile)
265 err = DecodeLibrary(f, strings.HasSuffix(infile, ".gz"), func(ent *LibraryEntry) error {
266 for _, tv := range ent.TileVariants {
270 if mask != nil && reftile[tv.Tag] == nil {
276 variants := seq[tv.Tag]
277 if len(variants) == 0 {
278 variants = make([]TileVariant, 100)
280 for len(variants) <= int(tv.Variant) {
281 variants = append(variants, TileVariant{})
283 variants[int(tv.Variant)] = tv
284 seq[tv.Tag] = variants
286 for _, cg := range ent.CompactGenomes {
287 if matchGenome.MatchString(cg.Name) {
296 tagstart := cgs[cgnames[0]].StartTag
297 tagend := cgs[cgnames[0]].EndTag
301 log.Infof("%04d: renumber/dedup variants for tags %d-%d", infileIdx, tagstart, tagend)
302 variantRemap := make([][]tileVariantID, tagend-tagstart)
303 throttleCPU := throttle{Max: runtime.GOMAXPROCS(0)}
304 for tag, variants := range seq {
305 tag, variants := tag, variants
306 throttleCPU.Acquire()
308 defer throttleCPU.Release()
309 count := make(map[[blake2b.Size256]byte]int, len(variants))
310 for _, cg := range cgs {
311 idx := int(tag-tagstart) * 2
312 if idx < len(cg.Variants) {
313 for allele := 0; allele < 2; allele++ {
314 v := cg.Variants[idx+allele]
315 if v > 0 && len(variants[v].Sequence) > 0 {
316 count[variants[v].Blake2b]++
321 // hash[i] will be the hash of
322 // the variant(s) that should
323 // be at rank i (0-based).
324 hash := make([][blake2b.Size256]byte, 0, len(count))
325 for b := range count {
326 hash = append(hash, b)
328 sort.Slice(hash, func(i, j int) bool {
329 bi, bj := &hash[i], &hash[j]
330 if ci, cj := count[*bi], count[*bj]; ci != cj {
333 return bytes.Compare((*bi)[:], (*bj)[:]) < 0
336 // rank[b] will be the 1-based
337 // new variant number for
338 // variants whose hash is b.
339 rank := make(map[[blake2b.Size256]byte]tileVariantID, len(hash))
340 for i, h := range hash {
341 rank[h] = tileVariantID(i + 1)
343 // remap[v] will be the new
344 // variant number for original
346 remap := make([]tileVariantID, len(variants))
347 for i, tv := range variants {
348 remap[i] = rank[tv.Blake2b]
350 variantRemap[tag-tagstart] = remap
355 annotationsFilename := fmt.Sprintf("%s/matrix.%04d.annotations.csv", *outputDir, infileIdx)
356 log.Infof("%04d: writing %s", infileIdx, annotationsFilename)
357 annof, err := os.Create(annotationsFilename)
361 annow := bufio.NewWriterSize(annof, 1<<20)
363 for tag := tagstart; tag < tagend; tag++ {
364 rt, ok := reftile[tag]
369 // Excluded by specified
370 // regions, or reference does
371 // not use any variant of this
372 // tile. (TODO: log this?
373 // mention it in annotations?)
376 variants, ok := seq[tag]
381 reftilestr := strings.ToUpper(string(rt.tiledata))
382 remap := variantRemap[tag-tagstart]
383 done := make([]bool, len(variants))
384 for v, tv := range variants {
391 if len(tv.Sequence) < taglen || !bytes.HasSuffix(rt.tiledata, tv.Sequence[len(tv.Sequence)-taglen:]) {
394 if lendiff := len(rt.tiledata) - len(tv.Sequence); lendiff < -1000 || lendiff > 1000 {
397 diffs, _ := hgvs.Diff(reftilestr, strings.ToUpper(string(tv.Sequence)), 0)
398 for _, diff := range diffs {
399 diff.Position += rt.pos
400 fmt.Fprintf(annow, "%d,%d,%d,%s:g.%s,%s,%d,%s,%s,%s\n", tag, outcol, v, rt.seqname, diff.String(), rt.seqname, diff.Position, diff.Ref, diff.New, diff.Left)
414 log.Infof("%04d: preparing numpy", infileIdx)
415 throttleNumpyMem.Acquire()
418 out := make([]int16, rows*cols)
419 for row, name := range cgnames {
420 out := out[row*cols:]
422 for col, v := range cgs[name].Variants {
423 tag := tagstart + tagID(col/2)
424 if mask != nil && reftile[tag] == nil {
427 if variants, ok := seq[tag]; ok && len(variants) > int(v) && len(variants[v].Sequence) > 0 {
428 out[outcol] = int16(variantRemap[tag-tagstart][v])
436 throttleNumpyMem.Release()
439 log.Infof("%04d: matrix fragment %d rows x %d cols", infileIdx, rows, cols)
440 toMerge[infileIdx] = out
442 fnm := fmt.Sprintf("%s/matrix.%04d.npy", *outputDir, infileIdx)
443 err = writeNumpyInt16(fnm, out, rows, cols)
448 log.Infof("%s: done (%d/%d)", infile, int(atomic.AddInt64(&done, 1)), len(infiles))
452 if err = throttleMem.Wait(); err != nil {
456 log.Info("merging output matrix and annotations")
458 annoFilename := fmt.Sprintf("%s/matrix.annotations.csv", *outputDir)
459 annof, err := os.Create(annoFilename)
463 annow := bufio.NewWriterSize(annof, 1<<20)
467 for _, chunk := range toMerge {
468 cols += len(chunk) / rows
470 out := make([]int16, rows*cols)
472 for outIdx, chunk := range toMerge {
473 chunkcols := len(chunk) / rows
474 for row := 0; row < rows; row++ {
475 copy(out[row*cols+startcol:], chunk[row*chunkcols:(row+1)*chunkcols])
477 toMerge[outIdx] = nil
479 annotationsFilename := fmt.Sprintf("%s/matrix.%04d.annotations.csv", *outputDir, outIdx)
480 log.Infof("reading %s", annotationsFilename)
481 buf, err := os.ReadFile(annotationsFilename)
485 err = os.Remove(annotationsFilename)
489 for _, line := range bytes.Split(buf, []byte{'\n'}) {
493 fields := bytes.SplitN(line, []byte{','}, 3)
494 incol, _ := strconv.Atoi(string(fields[1]))
495 fmt.Fprintf(annow, "%s,%d,%s\n", fields[0], incol+startcol/2, fields[2])
498 startcol += chunkcols
508 err = writeNumpyInt16(fmt.Sprintf("%s/matrix.npy", *outputDir), out, rows, cols)
516 func writeNumpyInt16(fnm string, out []int16, rows, cols int) error {
517 output, err := os.Create(fnm)
522 bufw := bufio.NewWriterSize(output, 1<<26)
523 npw, err := gonpy.NewWriter(nopCloser{bufw})
527 log.WithFields(log.Fields{
531 }).Infof("writing numpy: %s", fnm)
532 npw.Shape = []int{rows, cols}
538 return output.Close()