1 // Copyright (C) The Lightning Authors. All rights reserved.
3 // SPDX-License-Identifier: AGPL-3.0
29 "git.arvados.org/arvados.git/sdk/go/arvados"
30 "github.com/arvados/lightning/hgvs"
31 "github.com/james-bowman/nlp"
32 "github.com/kshedden/gonpy"
33 "github.com/sirupsen/logrus"
34 log "github.com/sirupsen/logrus"
35 "golang.org/x/crypto/blake2b"
36 "gonum.org/v1/gonum/mat"
39 const annotationMaxTileSpan = 100
41 type sliceNumpy struct {
44 chi2CaseControlColumn string
45 chi2CaseControlFile string
54 func (cmd *sliceNumpy) RunCommand(prog string, args []string, stdin io.Reader, stdout, stderr io.Writer) int {
55 err := cmd.run(prog, args, stdin, stdout, stderr)
57 fmt.Fprintf(stderr, "%s\n", err)
62 func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout, stderr io.Writer) error {
63 flags := flag.NewFlagSet("", flag.ContinueOnError)
64 flags.SetOutput(stderr)
65 pprof := flags.String("pprof", "", "serve Go profile data at http://`[addr]:port`")
66 runlocal := flags.Bool("local", false, "run on local host (default: run in an arvados container)")
67 projectUUID := flags.String("project", "", "project `UUID` for output data")
68 priority := flags.Int("priority", 500, "container request priority")
69 inputDir := flags.String("input-dir", "./in", "input `directory`")
70 outputDir := flags.String("output-dir", "./out", "output `directory`")
71 ref := flags.String("ref", "", "reference name (if blank, choose last one that appears in input)")
72 regionsFilename := flags.String("regions", "", "only output columns/annotations that intersect regions in specified bed `file`")
73 expandRegions := flags.Int("expand-regions", 0, "expand specified regions by `N` base pairs on each side`")
74 mergeOutput := flags.Bool("merge-output", false, "merge output into one matrix.npy and one matrix.annotations.csv")
75 hgvsSingle := flags.Bool("single-hgvs-matrix", false, "also generate hgvs-based matrix")
76 hgvsChunked := flags.Bool("chunked-hgvs-matrix", false, "also generate hgvs-based matrix per chromosome")
77 onehotSingle := flags.Bool("single-onehot", false, "generate one-hot tile-based matrix")
78 onehotChunked := flags.Bool("chunked-onehot", false, "generate one-hot tile-based matrix per input chunk")
79 onlyPCA := flags.Bool("pca", false, "generate pca matrix")
80 pcaComponents := flags.Int("pca-components", 4, "number of PCA components")
81 debugTag := flags.Int("debug-tag", -1, "log debugging details about specified tag")
82 flags.IntVar(&cmd.threads, "threads", 16, "number of memory-hungry assembly threads")
83 flags.StringVar(&cmd.chi2CaseControlFile, "chi2-case-control-file", "", "tsv file or directory indicating cases and controls for Χ² test (if directory, all .tsv files will be read)")
84 flags.StringVar(&cmd.chi2CaseControlColumn, "chi2-case-control-column", "", "name of case/control column in case-control files for Χ² test (value must be 0 for control, 1 for case)")
85 flags.Float64Var(&cmd.chi2PValue, "chi2-p-value", 1, "do Χ² test and omit columns with p-value above this threshold")
86 flags.BoolVar(&cmd.includeVariant1, "include-variant-1", false, "include most common variant when building one-hot matrix")
87 cmd.filter.Flags(flags)
88 err := flags.Parse(args)
89 if err == flag.ErrHelp {
91 } else if err != nil {
97 log.Println(http.ListenAndServe(*pprof, nil))
101 if cmd.chi2PValue != 1 && (cmd.chi2CaseControlFile == "" || cmd.chi2CaseControlColumn == "") {
102 return fmt.Errorf("cannot use provided -chi2-p-value=%f because -chi2-case-control-file= or -chi2-case-control-column= value is empty", cmd.chi2PValue)
105 cmd.debugTag = tagID(*debugTag)
108 runner := arvadosContainerRunner{
109 Name: "lightning slice-numpy",
110 Client: arvados.NewClientFromEnv(),
111 ProjectUUID: *projectUUID,
118 err = runner.TranslatePaths(inputDir, regionsFilename, &cmd.chi2CaseControlFile)
122 runner.Args = []string{"slice-numpy", "-local=true",
124 "-input-dir=" + *inputDir,
125 "-output-dir=/mnt/output",
126 "-threads=" + fmt.Sprintf("%d", cmd.threads),
127 "-regions=" + *regionsFilename,
128 "-expand-regions=" + fmt.Sprintf("%d", *expandRegions),
129 "-merge-output=" + fmt.Sprintf("%v", *mergeOutput),
130 "-single-hgvs-matrix=" + fmt.Sprintf("%v", *hgvsSingle),
131 "-chunked-hgvs-matrix=" + fmt.Sprintf("%v", *hgvsChunked),
132 "-single-onehot=" + fmt.Sprintf("%v", *onehotSingle),
133 "-chunked-onehot=" + fmt.Sprintf("%v", *onehotChunked),
134 "-pca=" + fmt.Sprintf("%v", *onlyPCA),
135 "-pca-components=" + fmt.Sprintf("%d", *pcaComponents),
136 "-chi2-case-control-file=" + cmd.chi2CaseControlFile,
137 "-chi2-case-control-column=" + cmd.chi2CaseControlColumn,
138 "-chi2-p-value=" + fmt.Sprintf("%f", cmd.chi2PValue),
139 "-include-variant-1=" + fmt.Sprintf("%v", cmd.includeVariant1),
140 "-debug-tag=" + fmt.Sprintf("%d", cmd.debugTag),
142 runner.Args = append(runner.Args, cmd.filter.Args()...)
144 output, err = runner.Run()
148 fmt.Fprintln(stdout, output)
152 infiles, err := allFiles(*inputDir, matchGobFile)
156 if len(infiles) == 0 {
157 err = fmt.Errorf("no input files found in %s", *inputDir)
160 sort.Strings(infiles)
162 var refseq map[string][]tileLibRef
163 var reftiledata = make(map[tileLibRef][]byte, 11000000)
164 in0, err := open(infiles[0])
169 matchGenome, err := regexp.Compile(cmd.filter.MatchGenome)
171 err = fmt.Errorf("-match-genome: invalid regexp: %q", cmd.filter.MatchGenome)
177 err = DecodeLibrary(in0, strings.HasSuffix(infiles[0], ".gz"), func(ent *LibraryEntry) error {
178 if len(ent.TagSet) > 0 {
181 for _, cseq := range ent.CompactSequences {
182 if cseq.Name == *ref || *ref == "" {
183 refseq = cseq.TileSequences
186 for _, cg := range ent.CompactGenomes {
187 if matchGenome.MatchString(cg.Name) {
188 cmd.cgnames = append(cmd.cgnames, cg.Name)
191 for _, tv := range ent.TileVariants {
193 reftiledata[tileLibRef{tv.Tag, tv.Variant}] = tv.Sequence
203 err = fmt.Errorf("%s: reference sequence not found", infiles[0])
206 if len(tagset) == 0 {
207 err = fmt.Errorf("tagset not found")
211 taglib := &tagLibrary{}
212 err = taglib.setTags(tagset)
216 taglen := taglib.TagLen()
218 if len(cmd.cgnames) == 0 {
219 err = fmt.Errorf("no genomes found matching regexp %q", cmd.filter.MatchGenome)
222 sort.Strings(cmd.cgnames)
223 err = cmd.useCaseControlFiles()
227 if len(cmd.cgnames) == 0 {
228 err = fmt.Errorf("fatal: 0 cases, 0 controls, nothing to do")
231 if cmd.filter.MinCoverage == 1 {
232 // In the generic formula below, floating point
233 // arithmetic can effectively push the coverage
234 // threshold above 1.0, which is impossible/useless.
235 // 1.0 needs to mean exactly 100% coverage.
236 cmd.minCoverage = len(cmd.cgnames)
238 cmd.minCoverage = int(math.Ceil(cmd.filter.MinCoverage * float64(len(cmd.cgnames))))
242 labelsFilename := *outputDir + "/samples.csv"
243 log.Infof("writing labels to %s", labelsFilename)
245 f, err = os.Create(labelsFilename)
250 for i, name := range cmd.cgnames {
252 if cmd.chi2Cases != nil && cmd.chi2Cases[i] {
255 _, err = fmt.Fprintf(f, "%d,%q,%d\n", i, trimFilenameForLabel(name), cc)
257 err = fmt.Errorf("write %s: %w", labelsFilename, err)
263 err = fmt.Errorf("close %s: %w", labelsFilename, err)
268 log.Info("indexing reference tiles")
269 type reftileinfo struct {
270 variant tileVariantID
271 seqname string // chr1
272 pos int // distance from start of chromosome to starttag
273 tiledata []byte // acgtggcaa...
274 excluded bool // true if excluded by regions file
275 nexttag tagID // tagID of following tile (-1 for last tag of chromosome)
277 isdup := map[tagID]bool{}
278 reftile := map[tagID]*reftileinfo{}
279 for seqname, cseq := range refseq {
281 lastreftag := tagID(-1)
282 for _, libref := range cseq {
283 if cmd.filter.MaxTag >= 0 && libref.Tag > tagID(cmd.filter.MaxTag) {
286 tiledata := reftiledata[libref]
287 if len(tiledata) == 0 {
288 err = fmt.Errorf("missing tiledata for tag %d variant %d in %s in ref", libref.Tag, libref.Variant, seqname)
291 foundthistag := false
292 taglib.FindAll(tiledata[:len(tiledata)-1], func(tagid tagID, offset, _ int) {
293 if !foundthistag && tagid == libref.Tag {
297 if dupref, ok := reftile[tagid]; ok {
298 log.Printf("dropping reference tile %+v from %s @ %d, tag not unique, also found inside %+v from %s @ %d", tileLibRef{Tag: tagid, Variant: dupref.variant}, dupref.seqname, dupref.pos, libref, seqname, pos+offset+1)
299 delete(reftile, tagid)
301 log.Printf("found tag %d at offset %d inside tile variant %+v on %s @ %d", tagid, offset, libref, seqname, pos+offset+1)
305 if isdup[libref.Tag] {
306 log.Printf("dropping reference tile %+v from %s @ %d, tag not unique", libref, seqname, pos)
307 } else if reftile[libref.Tag] != nil {
308 log.Printf("dropping reference tile %+v from %s @ %d, tag not unique", tileLibRef{Tag: libref.Tag, Variant: reftile[libref.Tag].variant}, reftile[libref.Tag].seqname, reftile[libref.Tag].pos)
309 delete(reftile, libref.Tag)
310 log.Printf("dropping reference tile %+v from %s @ %d, tag not unique", libref, seqname, pos)
311 isdup[libref.Tag] = true
313 reftile[libref.Tag] = &reftileinfo{
315 variant: libref.Variant,
321 reftile[lastreftag].nexttag = libref.Tag
323 lastreftag = libref.Tag
325 pos += len(tiledata) - taglen
327 log.Printf("... %s done, len %d", seqname, pos+taglen)
331 if *regionsFilename != "" {
332 log.Printf("loading regions from %s", *regionsFilename)
333 mask, err = makeMask(*regionsFilename, *expandRegions)
337 log.Printf("before applying mask, len(reftile) == %d", len(reftile))
338 log.Printf("deleting reftile entries for regions outside %d intervals", mask.Len())
339 for _, rt := range reftile {
340 if !mask.Check(strings.TrimPrefix(rt.seqname, "chr"), rt.pos, rt.pos+len(rt.tiledata)) {
344 log.Printf("after applying mask, len(reftile) == %d", len(reftile))
347 type hgvsColSet map[hgvs.Variant][2][]int8
348 encodeHGVS := throttle{Max: len(refseq)}
349 encodeHGVSTodo := map[string]chan hgvsColSet{}
350 tmpHGVSCols := map[string]*os.File{}
352 for seqname := range refseq {
354 f, err = os.Create(*outputDir + "/tmp." + seqname + ".gob")
358 defer os.Remove(f.Name())
359 bufw := bufio.NewWriterSize(f, 1<<24)
360 enc := gob.NewEncoder(bufw)
361 tmpHGVSCols[seqname] = f
362 todo := make(chan hgvsColSet, 128)
363 encodeHGVSTodo[seqname] = todo
364 encodeHGVS.Go(func() error {
365 for colset := range todo {
366 err := enc.Encode(colset)
368 encodeHGVS.Report(err)
379 var toMerge [][]int16
380 if *mergeOutput || *hgvsSingle {
381 toMerge = make([][]int16, len(infiles))
383 var onehotIndirect [][2][]uint32 // [chunkIndex][axis][index]
384 var onehotChunkSize []uint32
385 var onehotXrefs [][]onehotXref
386 if *onehotSingle || *onlyPCA {
387 onehotIndirect = make([][2][]uint32, len(infiles))
388 onehotChunkSize = make([]uint32, len(infiles))
389 onehotXrefs = make([][]onehotXref, len(infiles))
391 chunkStartTag := make([]tagID, len(infiles))
393 throttleMem := throttle{Max: cmd.threads} // TODO: estimate using mem and data size
394 throttleNumpyMem := throttle{Max: cmd.threads/2 + 1}
395 log.Info("generating annotations and numpy matrix for each slice")
396 var errSkip = errors.New("skip infile")
398 for infileIdx, infile := range infiles {
399 infileIdx, infile := infileIdx, infile
400 throttleMem.Go(func() error {
401 seq := make(map[tagID][]TileVariant, 50000)
402 cgs := make(map[string]CompactGenome, len(cmd.cgnames))
403 f, err := open(infile)
408 log.Infof("%04d: reading %s", infileIdx, infile)
409 err = DecodeLibrary(f, strings.HasSuffix(infile, ".gz"), func(ent *LibraryEntry) error {
410 for _, tv := range ent.TileVariants {
415 // corresponding ref tile, if
416 // mask is in play (we can't
417 // determine coordinates for
419 if mask != nil && reftile[tv.Tag] == nil {
423 // corresponding ref tile is
424 // outside target regions --
425 // unless it's a potential
427 if mask != nil && reftile[tv.Tag].excluded &&
428 (int(tv.Tag+1) >= len(tagset) ||
429 (bytes.HasSuffix(tv.Sequence, tagset[tv.Tag+1]) && reftile[tv.Tag+1] != nil && !reftile[tv.Tag+1].excluded)) {
432 if tv.Tag == cmd.debugTag {
433 log.Printf("infile %d %s tag %d variant %d hash %x", infileIdx, infile, tv.Tag, tv.Variant, tv.Blake2b[:3])
435 variants := seq[tv.Tag]
436 if len(variants) == 0 {
437 variants = make([]TileVariant, 100)
439 for len(variants) <= int(tv.Variant) {
440 variants = append(variants, TileVariant{})
442 variants[int(tv.Variant)] = tv
443 seq[tv.Tag] = variants
445 for _, cg := range ent.CompactGenomes {
446 if cmd.filter.MaxTag >= 0 && cg.StartTag > tagID(cmd.filter.MaxTag) {
449 if !matchGenome.MatchString(cg.Name) {
452 // pad to full slice size
453 // to avoid out-of-bounds
455 if sliceSize := 2 * int(cg.EndTag-cg.StartTag); len(cg.Variants) < sliceSize {
456 cg.Variants = append(cg.Variants, make([]tileVariantID, sliceSize-len(cg.Variants))...)
464 } else if err != nil {
465 return fmt.Errorf("%04d: DecodeLibrary(%s): err", infileIdx, infile)
467 tagstart := cgs[cmd.cgnames[0]].StartTag
468 tagend := cgs[cmd.cgnames[0]].EndTag
469 chunkStartTag[infileIdx] = tagstart
473 log.Infof("%04d: renumber/dedup variants for tags %d-%d", infileIdx, tagstart, tagend)
474 variantRemap := make([][]tileVariantID, tagend-tagstart)
475 throttleCPU := throttle{Max: runtime.GOMAXPROCS(0)}
476 for tag, variants := range seq {
477 tag, variants := tag, variants
478 throttleCPU.Go(func() error {
480 count := make(map[[blake2b.Size256]byte]int, len(variants))
484 count[blake2b.Sum256(rt.tiledata)] = 0
487 for cgname, cg := range cgs {
488 idx := int(tag-tagstart) * 2
489 for allele := 0; allele < 2; allele++ {
490 v := cg.Variants[idx+allele]
491 if v > 0 && len(variants[v].Sequence) > 0 {
492 count[variants[v].Blake2b]++
495 if v > 0 && tag == cmd.debugTag {
496 log.Printf("tag %d cg %s allele %d tv %d hash %x count is now %d", tag, cgname, allele, v, variants[v].Blake2b[:3], count[variants[v].Blake2b])
500 if alleleCoverage < cmd.minCoverage*2 {
501 idx := int(tag-tagstart) * 2
502 for _, cg := range cgs {
504 cg.Variants[idx+1] = 0
506 if tag == cmd.debugTag {
507 log.Printf("tag %d alleleCoverage %d < min %d, sample data wiped", tag, alleleCoverage, cmd.minCoverage*2)
512 // hash[i] will be the hash of
513 // the variant(s) that should
514 // be at rank i (0-based).
515 hash := make([][blake2b.Size256]byte, 0, len(count))
516 for b := range count {
517 hash = append(hash, b)
519 sort.Slice(hash, func(i, j int) bool {
520 bi, bj := &hash[i], &hash[j]
521 if ci, cj := count[*bi], count[*bj]; ci != cj {
524 return bytes.Compare((*bi)[:], (*bj)[:]) < 0
527 // rank[b] will be the 1-based
528 // new variant number for
529 // variants whose hash is b.
530 rank := make(map[[blake2b.Size256]byte]tileVariantID, len(hash))
531 for i, h := range hash {
532 rank[h] = tileVariantID(i + 1)
534 if tag == cmd.debugTag {
535 for h, r := range rank {
536 log.Printf("tag %d rank(%x) = %v", tag, h[:3], r)
539 // remap[v] will be the new
540 // variant number for original
542 remap := make([]tileVariantID, len(variants))
543 for i, tv := range variants {
544 remap[i] = rank[tv.Blake2b]
546 if tag == cmd.debugTag {
547 for in, out := range remap {
549 log.Printf("tag %d remap %d => %d", tag, in, out)
553 variantRemap[tag-tagstart] = remap
555 refrank := rank[blake2b.Sum256(rt.tiledata)]
556 if tag == cmd.debugTag {
557 log.Printf("tag %d reftile variant %d => %d", tag, rt.variant, refrank)
566 var onehotChunk [][]int8
567 var onehotXref []onehotXref
569 var annotationsFilename string
571 annotationsFilename = "/dev/null"
573 annotationsFilename = fmt.Sprintf("%s/matrix.%04d.annotations.csv", *outputDir, infileIdx)
574 log.Infof("%04d: writing %s", infileIdx, annotationsFilename)
576 annof, err := os.Create(annotationsFilename)
580 annow := bufio.NewWriterSize(annof, 1<<20)
582 for tag := tagstart; tag < tagend; tag++ {
584 if rt == nil && mask != nil {
585 // With no ref tile, we don't
586 // have coordinates to say
587 // this is in the desired
588 // regions -- so it's not.
589 // TODO: handle ref spanning
593 if rt != nil && rt.excluded {
594 // TODO: don't skip yet --
595 // first check for spanning
596 // tile variants that
597 // intersect non-excluded ref
601 if cmd.filter.MaxTag >= 0 && tag > tagID(cmd.filter.MaxTag) {
604 remap := variantRemap[tag-tagstart]
605 maxv := tileVariantID(0)
606 for _, v := range remap {
611 if *onehotChunked || *onehotSingle || *onlyPCA {
612 onehot, xrefs := cmd.tv2homhet(cgs, maxv, remap, tag, tagstart, seq)
613 if tag == cmd.debugTag {
614 log.WithFields(logrus.Fields{
617 }).Info("tv2homhet()")
619 onehotChunk = append(onehotChunk, onehot...)
620 onehotXref = append(onehotXref, xrefs...)
627 // Reference does not use any
628 // variant of this tile
630 // TODO: diff against the
631 // relevant portion of the
632 // ref's spanning tile
636 fmt.Fprintf(annow, "%d,%d,%d,=,%s,%d,,,\n", tag, outcol, rt.variant, rt.seqname, rt.pos)
638 reftilestr := strings.ToUpper(string(rt.tiledata))
640 done := make([]bool, maxv+1)
641 variantDiffs := make([][]hgvs.Variant, maxv+1)
642 for v, tv := range variants {
644 if v == 0 || v == rt.variant || done[v] {
649 if len(tv.Sequence) < taglen {
652 // if reftilestr doesn't end
653 // in the same tag as tv,
654 // extend reftilestr with
655 // following ref tiles until
656 // it does (up to an arbitrary
657 // sanity-check limit)
658 reftilestr := reftilestr
659 endtagstr := strings.ToUpper(string(tv.Sequence[len(tv.Sequence)-taglen:]))
660 for i, rt := 0, rt; i < annotationMaxTileSpan && !strings.HasSuffix(reftilestr, endtagstr) && rt.nexttag >= 0; i++ {
661 rt = reftile[rt.nexttag]
665 reftilestr += strings.ToUpper(string(rt.tiledata[taglen:]))
667 if mask != nil && !mask.Check(strings.TrimPrefix(rt.seqname, "chr"), rt.pos, rt.pos+len(reftilestr)) {
670 if !strings.HasSuffix(reftilestr, endtagstr) {
671 fmt.Fprintf(annow, "%d,%d,%d,,%s,%d,,,\n", tag, outcol, v, rt.seqname, rt.pos)
674 if lendiff := len(reftilestr) - len(tv.Sequence); lendiff < -1000 || lendiff > 1000 {
675 fmt.Fprintf(annow, "%d,%d,%d,,%s,%d,,,\n", tag, outcol, v, rt.seqname, rt.pos)
678 diffs, _ := hgvs.Diff(reftilestr, strings.ToUpper(string(tv.Sequence)), 0)
679 for i := range diffs {
680 diffs[i].Position += rt.pos
682 for _, diff := range diffs {
683 fmt.Fprintf(annow, "%d,%d,%d,%s:g.%s,%s,%d,%s,%s,%s\n", tag, outcol, v, rt.seqname, diff.String(), rt.seqname, diff.Position, diff.Ref, diff.New, diff.Left)
686 variantDiffs[v] = diffs
690 // We can now determine, for each HGVS
691 // variant (diff) in this reftile
692 // region, whether a given genome
693 // phase/allele (1) has the variant, (0) has
694 // =ref or a different variant in that
695 // position, or (-1) is lacking
696 // coverage / couldn't be diffed.
697 hgvsCol := hgvsColSet{}
698 for _, diffs := range variantDiffs {
699 for _, diff := range diffs {
700 if _, ok := hgvsCol[diff]; ok {
703 hgvsCol[diff] = [2][]int8{
704 make([]int8, len(cmd.cgnames)),
705 make([]int8, len(cmd.cgnames)),
709 for row, name := range cmd.cgnames {
710 variants := cgs[name].Variants[(tag-tagstart)*2:]
711 for ph := 0; ph < 2; ph++ {
713 if int(v) >= len(remap) {
719 // hgvsCol[*][ph][row] is already 0
720 } else if len(variantDiffs[v]) == 0 {
721 // lacking coverage / couldn't be diffed
722 for _, col := range hgvsCol {
726 for _, diff := range variantDiffs[v] {
727 hgvsCol[diff][ph][row] = 1
732 for diff, colpair := range hgvsCol {
733 allele2homhet(colpair)
734 if !cmd.filterHGVScolpair(colpair) {
735 delete(hgvsCol, diff)
738 if len(hgvsCol) > 0 {
739 encodeHGVSTodo[rt.seqname] <- hgvsCol
754 // transpose onehotChunk[col][row] to numpy[row*ncols+col]
755 rows := len(cmd.cgnames)
756 cols := len(onehotChunk)
757 log.Infof("%04d: preparing onehot numpy (rows=%d, cols=%d, mem=%d)", infileIdx, rows, cols, rows*cols)
758 throttleNumpyMem.Acquire()
759 out := onehotcols2int8(onehotChunk)
760 fnm := fmt.Sprintf("%s/onehot.%04d.npy", *outputDir, infileIdx)
761 err = writeNumpyInt8(fnm, out, rows, cols)
765 fnm = fmt.Sprintf("%s/onehot-columns.%04d.npy", *outputDir, infileIdx)
766 err = writeNumpyInt32(fnm, onehotXref2int32(onehotXref), 4, len(onehotXref))
771 throttleNumpyMem.Release()
773 if *onehotSingle || *onlyPCA {
774 onehotIndirect[infileIdx] = onehotChunk2Indirect(onehotChunk)
775 onehotChunkSize[infileIdx] = uint32(len(onehotChunk))
776 onehotXrefs[infileIdx] = onehotXref
777 n := len(onehotIndirect[infileIdx][0])
778 log.Infof("%04d: keeping onehot coordinates in memory (n=%d, mem=%d)", infileIdx, n, n*8*2)
780 if !(*onehotSingle || *onehotChunked || *onlyPCA) || *mergeOutput || *hgvsSingle {
781 log.Infof("%04d: preparing numpy (rows=%d, cols=%d)", infileIdx, len(cmd.cgnames), 2*outcol)
782 throttleNumpyMem.Acquire()
783 rows := len(cmd.cgnames)
785 out := make([]int16, rows*cols)
786 for row, name := range cmd.cgnames {
788 for col, v := range cgs[name].Variants {
789 tag := tagstart + tagID(col/2)
790 if cmd.filter.MaxTag >= 0 && tag > tagID(cmd.filter.MaxTag) {
793 if rt := reftile[tag]; rt == nil || rt.excluded {
797 out[outidx] = 0 // tag not found / spanning tile
798 } else if variants, ok := seq[tag]; ok && int(v) < len(variants) && len(variants[v].Sequence) > 0 {
799 out[outidx] = int16(variantRemap[tag-tagstart][v])
801 out[outidx] = -1 // low quality tile variant
803 if tag == cmd.debugTag {
804 log.Printf("tag %d row %d col %d outidx %d v %d out %d", tag, row, col, outidx, v, out[outidx])
812 throttleNumpyMem.Release()
813 if *mergeOutput || *hgvsSingle {
814 log.Infof("%04d: matrix fragment %d rows x %d cols", infileIdx, rows, cols)
815 toMerge[infileIdx] = out
817 if !*mergeOutput && !*onehotChunked && !*onehotSingle {
818 fnm := fmt.Sprintf("%s/matrix.%04d.npy", *outputDir, infileIdx)
819 err = writeNumpyInt16(fnm, out, rows, cols)
826 log.Infof("%s: done (%d/%d)", infile, int(atomic.AddInt64(&done, 1)), len(infiles))
830 if err = throttleMem.Wait(); err != nil {
835 log.Info("flushing hgvsCols temp files")
836 for seqname := range refseq {
837 close(encodeHGVSTodo[seqname])
839 err = encodeHGVS.Wait()
843 for seqname := range refseq {
844 log.Infof("%s: reading hgvsCols from temp file", seqname)
845 f := tmpHGVSCols[seqname]
846 _, err = f.Seek(0, io.SeekStart)
850 var hgvsCols hgvsColSet
851 dec := gob.NewDecoder(bufio.NewReaderSize(f, 1<<24))
853 err = dec.Decode(&hgvsCols)
858 log.Infof("%s: sorting %d hgvs variants", seqname, len(hgvsCols))
859 variants := make([]hgvs.Variant, 0, len(hgvsCols))
860 for v := range hgvsCols {
861 variants = append(variants, v)
863 sort.Slice(variants, func(i, j int) bool {
864 vi, vj := &variants[i], &variants[j]
865 if vi.Position != vj.Position {
866 return vi.Position < vj.Position
867 } else if vi.Ref != vj.Ref {
868 return vi.Ref < vj.Ref
870 return vi.New < vj.New
873 rows := len(cmd.cgnames)
874 cols := len(variants) * 2
875 log.Infof("%s: building hgvs matrix (rows=%d, cols=%d, mem=%d)", seqname, rows, cols, rows*cols)
876 out := make([]int8, rows*cols)
877 for varIdx, variant := range variants {
878 hgvsCols := hgvsCols[variant]
879 for row := range cmd.cgnames {
880 for ph := 0; ph < 2; ph++ {
881 out[row*cols+varIdx+ph] = hgvsCols[ph][row]
885 err = writeNumpyInt8(fmt.Sprintf("%s/hgvs.%s.npy", *outputDir, seqname), out, rows, cols)
891 fnm := fmt.Sprintf("%s/hgvs.%s.annotations.csv", *outputDir, seqname)
892 log.Infof("%s: writing hgvs column labels to %s", seqname, fnm)
893 var hgvsLabels bytes.Buffer
894 for varIdx, variant := range variants {
895 fmt.Fprintf(&hgvsLabels, "%d,%s:g.%s\n", varIdx, seqname, variant.String())
897 err = ioutil.WriteFile(fnm, hgvsLabels.Bytes(), 0666)
904 if *mergeOutput || *hgvsSingle {
905 var annow *bufio.Writer
908 annoFilename := fmt.Sprintf("%s/matrix.annotations.csv", *outputDir)
909 annof, err = os.Create(annoFilename)
913 annow = bufio.NewWriterSize(annof, 1<<20)
916 rows := len(cmd.cgnames)
918 for _, chunk := range toMerge {
919 cols += len(chunk) / rows
921 log.Infof("merging output matrix (rows=%d, cols=%d, mem=%d) and annotations", rows, cols, rows*cols*2)
924 out = make([]int16, rows*cols)
926 hgvsCols := map[string][2][]int16{} // hgvs -> [[g0,g1,g2,...], [g0,g1,g2,...]] (slice of genomes for each phase)
928 for outIdx, chunk := range toMerge {
929 chunkcols := len(chunk) / rows
931 for row := 0; row < rows; row++ {
932 copy(out[row*cols+startcol:], chunk[row*chunkcols:(row+1)*chunkcols])
935 toMerge[outIdx] = nil
937 annotationsFilename := fmt.Sprintf("%s/matrix.%04d.annotations.csv", *outputDir, outIdx)
938 log.Infof("reading %s", annotationsFilename)
939 buf, err := os.ReadFile(annotationsFilename)
944 err = os.Remove(annotationsFilename)
949 for _, line := range bytes.Split(buf, []byte{'\n'}) {
953 fields := bytes.SplitN(line, []byte{','}, 9)
954 tag, _ := strconv.Atoi(string(fields[0]))
955 incol, _ := strconv.Atoi(string(fields[1]))
956 tileVariant, _ := strconv.Atoi(string(fields[2]))
957 hgvsID := string(fields[3])
958 seqname := string(fields[4])
959 pos, _ := strconv.Atoi(string(fields[5]))
962 // Null entry for un-diffable
967 // Null entry for ref tile
970 if mask != nil && !mask.Check(strings.TrimPrefix(seqname, "chr"), pos, pos+len(refseq)) {
971 // The tile intersects one of
972 // the selected regions, but
973 // this particular HGVS
977 hgvsColPair := hgvsCols[hgvsID]
978 if hgvsColPair[0] == nil {
979 // values in new columns start
980 // out as -1 ("no data yet")
981 // or 0 ("=ref") here, may
982 // change to 1 ("hgvs variant
983 // present") below, either on
984 // this line or a future line.
985 hgvsColPair = [2][]int16{make([]int16, len(cmd.cgnames)), make([]int16, len(cmd.cgnames))}
986 rt, ok := reftile[tagID(tag)]
988 err = fmt.Errorf("bug: seeing annotations for tag %d, but it has no reftile entry", tag)
991 for ph := 0; ph < 2; ph++ {
992 for row := 0; row < rows; row++ {
993 v := chunk[row*chunkcols+incol*2+ph]
994 if tileVariantID(v) == rt.variant {
995 hgvsColPair[ph][row] = 0
997 hgvsColPair[ph][row] = -1
1001 hgvsCols[hgvsID] = hgvsColPair
1003 hgvsref := hgvs.Variant{
1005 Ref: string(refseq),
1006 New: string(refseq),
1008 fmt.Fprintf(annow, "%d,%d,%d,%s:g.%s,%s,%d,%s,%s,%s\n", tag, incol+startcol/2, rt.variant, seqname, hgvsref.String(), seqname, pos, refseq, refseq, fields[8])
1012 fmt.Fprintf(annow, "%d,%d,%d,%s,%s,%d,%s,%s,%s\n", tag, incol+startcol/2, tileVariant, hgvsID, seqname, pos, refseq, fields[7], fields[8])
1014 for ph := 0; ph < 2; ph++ {
1015 for row := 0; row < rows; row++ {
1016 v := chunk[row*chunkcols+incol*2+ph]
1017 if int(v) == tileVariant {
1018 hgvsColPair[ph][row] = 1
1024 startcol += chunkcols
1035 err = writeNumpyInt16(fmt.Sprintf("%s/matrix.npy", *outputDir), out, rows, cols)
1043 cols = len(hgvsCols) * 2
1044 log.Printf("building hgvs-based matrix: %d rows x %d cols", rows, cols)
1045 out = make([]int16, rows*cols)
1046 hgvsIDs := make([]string, 0, cols/2)
1047 for hgvsID := range hgvsCols {
1048 hgvsIDs = append(hgvsIDs, hgvsID)
1050 sort.Strings(hgvsIDs)
1051 var hgvsLabels bytes.Buffer
1052 for idx, hgvsID := range hgvsIDs {
1053 fmt.Fprintf(&hgvsLabels, "%d,%s\n", idx, hgvsID)
1054 for ph := 0; ph < 2; ph++ {
1055 hgvscol := hgvsCols[hgvsID][ph]
1056 for row, val := range hgvscol {
1057 out[row*cols+idx*2+ph] = val
1061 err = writeNumpyInt16(fmt.Sprintf("%s/hgvs.npy", *outputDir), out, rows, cols)
1066 fnm := fmt.Sprintf("%s/hgvs.annotations.csv", *outputDir)
1067 log.Printf("writing hgvs labels: %s", fnm)
1068 err = ioutil.WriteFile(fnm, hgvsLabels.Bytes(), 0777)
1074 if *onehotSingle || *onlyPCA {
1076 for _, part := range onehotIndirect {
1077 nzCount += len(part[0])
1079 onehot := make([]uint32, nzCount*2) // [r,r,r,...,c,c,c,...]
1080 var xrefs []onehotXref
1081 chunkOffset := uint32(0)
1083 for i, part := range onehotIndirect {
1084 for i := range part[1] {
1085 part[1][i] += chunkOffset
1087 copy(onehot[outcol:], part[0])
1088 copy(onehot[outcol+nzCount:], part[1])
1089 xrefs = append(xrefs, onehotXrefs[i]...)
1091 outcol += len(part[0])
1092 chunkOffset += onehotChunkSize[i]
1096 onehotXrefs[i] = nil
1097 debug.FreeOSMemory()
1100 fnm := fmt.Sprintf("%s/onehot.npy", *outputDir)
1101 err = writeNumpyUint32(fnm, onehot, 2, nzCount)
1105 fnm = fmt.Sprintf("%s/onehot-columns.npy", *outputDir)
1106 err = writeNumpyInt32(fnm, onehotXref2int32(xrefs), 5, len(xrefs))
1113 for _, c := range onehot[nzCount:] {
1119 return fmt.Errorf("cannot do PCA: one-hot matrix is empty")
1121 log.Printf("creating matrix: %d rows, %d cols", len(cmd.cgnames), cols)
1122 mtx := mat.NewDense(len(cmd.cgnames), cols, nil)
1123 for i, c := range onehot[nzCount:] {
1124 mtx.Set(int(onehot[i]), int(c), 1)
1126 log.Print("fitting")
1127 transformer := nlp.NewPCA(*pcaComponents)
1128 transformer.Fit(mtx.T())
1129 log.Printf("transforming")
1130 pca, err := transformer.Transform(mtx.T())
1135 outrows, outcols := pca.Dims()
1136 log.Printf("copying result to numpy output array: %d rows, %d cols", outrows, outcols)
1137 out := make([]float64, outrows*outcols)
1138 for i := 0; i < outrows; i++ {
1139 for j := 0; j < outcols; j++ {
1140 out[i*outcols+j] = pca.At(i, j)
1143 fnm := fmt.Sprintf("%s/pca.npy", *outputDir)
1144 log.Printf("writing numpy: %s", fnm)
1145 output, err := os.OpenFile(fnm, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0777)
1149 npw, err := gonpy.NewWriter(nopCloser{output})
1151 return fmt.Errorf("gonpy.NewWriter: %w", err)
1153 npw.Shape = []int{outrows, outcols}
1154 err = npw.WriteFloat64(out)
1156 return fmt.Errorf("WriteFloat64: %w", err)
1158 err = output.Close()
1165 if !*mergeOutput && !*onehotChunked && !*onehotSingle && !*onlyPCA {
1166 tagoffsetFilename := *outputDir + "/chunk-tag-offset.csv"
1167 log.Infof("writing tag offsets to %s", tagoffsetFilename)
1169 f, err = os.Create(tagoffsetFilename)
1174 for idx, offset := range chunkStartTag {
1175 _, err = fmt.Fprintf(f, "%q,%d\n", fmt.Sprintf("matrix.%04d.npy", idx), offset)
1177 err = fmt.Errorf("write %s: %w", tagoffsetFilename, err)
1183 err = fmt.Errorf("close %s: %w", tagoffsetFilename, err)
1190 // Read case/control files, remove non-case/control entries from
1191 // cmd.cgnames, and build cmd.chi2Cases.
1192 func (cmd *sliceNumpy) useCaseControlFiles() error {
1193 if cmd.chi2CaseControlFile == "" {
1196 infiles, err := allFiles(cmd.chi2CaseControlFile, nil)
1200 // index in cmd.cgnames => case(true) / control(false)
1201 cc := map[int]bool{}
1202 for _, infile := range infiles {
1203 f, err := open(infile)
1207 buf, err := io.ReadAll(f)
1213 for _, tsv := range bytes.Split(buf, []byte{'\n'}) {
1217 split := strings.Split(string(tsv), "\t")
1220 for col, name := range split {
1221 if name == cmd.chi2CaseControlColumn {
1227 return fmt.Errorf("%s: no column named %q in header row %q", infile, cmd.chi2CaseControlColumn, tsv)
1231 if len(split) <= ccCol {
1236 for i, name := range cmd.cgnames {
1237 if strings.Contains(name, pattern) {
1239 log.Warnf("pattern %q in %s matches multiple genome IDs (%qs, %q)", pattern, infile, cmd.cgnames[found], name)
1245 log.Warnf("pattern %q in %s does not match any genome IDs", pattern, infile)
1248 if split[ccCol] == "0" {
1251 if split[ccCol] == "1" {
1256 allnames := cmd.cgnames
1260 for i, name := range allnames {
1261 if cc, ok := cc[i]; ok {
1262 cmd.cgnames = append(cmd.cgnames, name)
1263 cmd.chi2Cases = append(cmd.chi2Cases, cc)
1269 log.Printf("%d cases, %d controls, %d neither (dropped)", ncases, len(cmd.cgnames)-ncases, len(allnames)-len(cmd.cgnames))
1273 func (cmd *sliceNumpy) filterHGVScolpair(colpair [2][]int8) bool {
1274 if cmd.chi2PValue >= 1 {
1277 col0 := make([]bool, 0, len(cmd.chi2Cases))
1278 col1 := make([]bool, 0, len(cmd.chi2Cases))
1279 cases := make([]bool, 0, len(cmd.chi2Cases))
1280 for i, c := range cmd.chi2Cases {
1281 if colpair[0][i] < 0 {
1284 col0 = append(col0, colpair[0][i] != 0)
1285 col1 = append(col1, colpair[1][i] != 0)
1286 cases = append(cases, c)
1288 return len(cases) >= cmd.minCoverage &&
1289 (pvalue(col0, cases) <= cmd.chi2PValue || pvalue(col1, cases) <= cmd.chi2PValue)
1292 func writeNumpyUint32(fnm string, out []uint32, rows, cols int) error {
1293 output, err := os.Create(fnm)
1297 defer output.Close()
1298 bufw := bufio.NewWriterSize(output, 1<<26)
1299 npw, err := gonpy.NewWriter(nopCloser{bufw})
1303 log.WithFields(log.Fields{
1307 "bytes": rows * cols * 4,
1308 }).Infof("writing numpy: %s", fnm)
1309 npw.Shape = []int{rows, cols}
1310 npw.WriteUint32(out)
1315 return output.Close()
1318 func writeNumpyInt32(fnm string, out []int32, rows, cols int) error {
1319 output, err := os.Create(fnm)
1323 defer output.Close()
1324 bufw := bufio.NewWriterSize(output, 1<<26)
1325 npw, err := gonpy.NewWriter(nopCloser{bufw})
1329 log.WithFields(log.Fields{
1333 "bytes": rows * cols * 4,
1334 }).Infof("writing numpy: %s", fnm)
1335 npw.Shape = []int{rows, cols}
1341 return output.Close()
1344 func writeNumpyInt16(fnm string, out []int16, rows, cols int) error {
1345 output, err := os.Create(fnm)
1349 defer output.Close()
1350 bufw := bufio.NewWriterSize(output, 1<<26)
1351 npw, err := gonpy.NewWriter(nopCloser{bufw})
1355 log.WithFields(log.Fields{
1359 "bytes": rows * cols * 2,
1360 }).Infof("writing numpy: %s", fnm)
1361 npw.Shape = []int{rows, cols}
1367 return output.Close()
1370 func writeNumpyInt8(fnm string, out []int8, rows, cols int) error {
1371 output, err := os.Create(fnm)
1375 defer output.Close()
1376 bufw := bufio.NewWriterSize(output, 1<<26)
1377 npw, err := gonpy.NewWriter(nopCloser{bufw})
1381 log.WithFields(log.Fields{
1385 "bytes": rows * cols,
1386 }).Infof("writing numpy: %s", fnm)
1387 npw.Shape = []int{rows, cols}
1393 return output.Close()
1396 func allele2homhet(colpair [2][]int8) {
1397 a, b := colpair[0], colpair[1]
1398 for i, av := range a {
1400 if av < 0 || bv < 0 {
1403 } else if av > 0 && bv > 0 {
1406 } else if av > 0 || bv > 0 {
1410 // ref (or a different variant in same position)
1411 // (this is a no-op) a[i], b[i] = 0, 0
1416 type onehotXref struct {
1418 variant tileVariantID
1423 const onehotXrefSize = unsafe.Sizeof(onehotXref{})
1425 // Build onehot matrix (m[tileVariantIndex][genome] == 0 or 1) for all
1426 // variants of a single tile/tag#.
1428 // Return nil if no tile variant passes Χ² filter.
1429 func (cmd *sliceNumpy) tv2homhet(cgs map[string]CompactGenome, maxv tileVariantID, remap []tileVariantID, tag, chunkstarttag tagID, seq map[tagID][]TileVariant) ([][]int8, []onehotXref) {
1430 if tag == cmd.debugTag {
1431 tv := make([]tileVariantID, len(cmd.cgnames)*2)
1432 for i, name := range cmd.cgnames {
1433 copy(tv[i*2:(i+1)*2], cgs[name].Variants[(tag-chunkstarttag)*2:])
1435 log.WithFields(logrus.Fields{
1436 "cgs[i].Variants[tag*2+j]": tv,
1440 "chunkstarttag": chunkstarttag,
1441 }).Info("tv2homhet()")
1443 if maxv < 1 || (maxv < 2 && !cmd.includeVariant1) {
1444 // everyone has the most common variant (of the variants we don't drop)
1447 tagoffset := tag - chunkstarttag
1449 for _, cg := range cgs {
1451 for _, v := range cg.Variants[tagoffset*2 : tagoffset*2+2] {
1452 if v > 0 && int(v) < len(seq[tag]) && len(seq[tag][v].Sequence) > 0 {
1460 if coverage < cmd.minCoverage {
1463 obs := make([][]bool, (maxv+1)*2) // 2 slices (hom + het) for each variant#
1464 for i := range obs {
1465 obs[i] = make([]bool, len(cmd.cgnames))
1467 for cgid, name := range cmd.cgnames {
1468 cgvars := cgs[name].Variants[tagoffset*2:]
1469 tv0, tv1 := remap[cgvars[0]], remap[cgvars[1]]
1470 for v := tileVariantID(1); v <= maxv; v++ {
1471 if tv0 == v && tv1 == v {
1472 obs[v*2][cgid] = true
1473 } else if tv0 == v || tv1 == v {
1474 obs[v*2+1][cgid] = true
1479 var xref []onehotXref
1480 for col := 2; col < len(obs); col++ {
1481 // col 0,1 correspond to tile variant 0, i.e.,
1482 // no-call; col 2,3 correspond to the most common
1483 // variant; so we (normally) start at col 4.
1484 if col < 4 && !cmd.includeVariant1 {
1487 p := pvalue(obs[col], cmd.chi2Cases)
1488 if cmd.chi2PValue < 1 && !(p < cmd.chi2PValue) {
1491 onehot = append(onehot, bool2int8(obs[col]))
1492 xref = append(xref, onehotXref{
1494 variant: tileVariantID(col >> 1),
1502 func bool2int8(in []bool) []int8 {
1503 out := make([]int8, len(in))
1504 for i, v := range in {
1512 // convert a []onehotXref with length N to a numpy-style []int32
1513 // matrix with N columns, one row per field of onehotXref struct.
1515 // Hom/het row contains hom=0, het=1.
1517 // P-value row contains 1000000x actual p-value.
1518 func onehotXref2int32(xrefs []onehotXref) []int32 {
1520 xdata := make([]int32, 5*xcols)
1521 for i, xref := range xrefs {
1522 xdata[i] = int32(xref.tag)
1523 xdata[xcols+i] = int32(xref.variant)
1525 xdata[xcols*2+i] = 1
1527 xdata[xcols*3+i] = int32(xref.pvalue * 1000000)
1528 xdata[xcols*4+i] = int32(-math.Log10(xref.pvalue) * 1000000)
1533 // transpose onehot data from in[col][row] to numpy-style
1534 // out[row*cols+col].
1535 func onehotcols2int8(in [][]int8) []int8 {
1541 out := make([]int8, rows*cols)
1542 for row := 0; row < rows; row++ {
1543 outrow := out[row*cols:]
1544 for col, incol := range in {
1545 outrow[col] = incol[row]
1551 // Return [2][]uint32{rowIndices, colIndices} indicating which
1552 // elements of matrixT[c][r] have non-zero values.
1553 func onehotChunk2Indirect(matrixT [][]int8) [2][]uint32 {
1555 for c, col := range matrixT {
1556 for r, val := range col {
1558 nz[0] = append(nz[0], uint32(r))
1559 nz[1] = append(nz[1], uint32(c))