19995: Report MAF in onehot-columns.npy.
[lightning.git] / slicenumpy.go
1 // Copyright (C) The Lightning Authors. All rights reserved.
2 //
3 // SPDX-License-Identifier: AGPL-3.0
4
5 package lightning
6
7 import (
8         "bufio"
9         "bytes"
10         "encoding/gob"
11         "encoding/json"
12         "errors"
13         "flag"
14         "fmt"
15         "io"
16         "io/ioutil"
17         "math"
18         "net/http"
19         _ "net/http/pprof"
20         "os"
21         "regexp"
22         "runtime"
23         "runtime/debug"
24         "sort"
25         "strconv"
26         "strings"
27         "sync/atomic"
28         "unsafe"
29
30         "git.arvados.org/arvados.git/sdk/go/arvados"
31         "github.com/arvados/lightning/hgvs"
32         "github.com/james-bowman/nlp"
33         "github.com/kshedden/gonpy"
34         "github.com/sirupsen/logrus"
35         log "github.com/sirupsen/logrus"
36         "golang.org/x/crypto/blake2b"
37         "gonum.org/v1/gonum/mat"
38 )
39
40 const annotationMaxTileSpan = 100
41
42 type sliceNumpy struct {
43         filter             filter
44         threads            int
45         chi2Cases          []bool
46         chi2PValue         float64
47         pvalueMinFrequency float64
48         pcaComponents      int
49         minCoverage        int
50         includeVariant1    bool
51         debugTag           tagID
52
53         cgnames         []string
54         samples         []sampleInfo
55         trainingSet     []int // samples index => training set index, or -1 if not in training set
56         trainingSetSize int
57         pvalue          func(onehot []bool) float64
58         pvalueCallCount int64
59 }
60
61 func (cmd *sliceNumpy) RunCommand(prog string, args []string, stdin io.Reader, stdout, stderr io.Writer) int {
62         err := cmd.run(prog, args, stdin, stdout, stderr)
63         if err != nil {
64                 fmt.Fprintf(stderr, "%s\n", err)
65                 return 1
66         }
67         return 0
68 }
69
70 func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout, stderr io.Writer) error {
71         flags := flag.NewFlagSet("", flag.ContinueOnError)
72         flags.SetOutput(stderr)
73         pprof := flags.String("pprof", "", "serve Go profile data at http://`[addr]:port`")
74         runlocal := flags.Bool("local", false, "run on local host (default: run in an arvados container)")
75         arvadosRAM := flags.Int("arvados-ram", 750000000000, "amount of memory to request for arvados container (`bytes`)")
76         arvadosVCPUs := flags.Int("arvados-vcpus", 96, "number of VCPUs to request for arvados container")
77         projectUUID := flags.String("project", "", "project `UUID` for output data")
78         priority := flags.Int("priority", 500, "container request priority")
79         preemptible := flags.Bool("preemptible", true, "request preemptible instance")
80         inputDir := flags.String("input-dir", "./in", "input `directory`")
81         outputDir := flags.String("output-dir", "./out", "output `directory`")
82         ref := flags.String("ref", "", "reference name (if blank, choose last one that appears in input)")
83         regionsFilename := flags.String("regions", "", "only output columns/annotations that intersect regions in specified bed `file`")
84         expandRegions := flags.Int("expand-regions", 0, "expand specified regions by `N` base pairs on each side`")
85         mergeOutput := flags.Bool("merge-output", false, "merge output into one matrix.npy and one matrix.annotations.csv")
86         hgvsSingle := flags.Bool("single-hgvs-matrix", false, "also generate hgvs-based matrix")
87         hgvsChunked := flags.Bool("chunked-hgvs-matrix", false, "also generate hgvs-based matrix per chromosome")
88         onehotSingle := flags.Bool("single-onehot", false, "generate one-hot tile-based matrix")
89         onehotChunked := flags.Bool("chunked-onehot", false, "generate one-hot tile-based matrix per input chunk")
90         samplesFilename := flags.String("samples", "", "`samples.csv` file with training/validation and case/control groups (see 'lightning choose-samples')")
91         caseControlOnly := flags.Bool("case-control-only", false, "drop samples that are not in case/control groups")
92         onlyPCA := flags.Bool("pca", false, "run principal component analysis, write components to pca.npy and samples.csv")
93         flags.IntVar(&cmd.pcaComponents, "pca-components", 4, "number of PCA components to compute / use in logistic regression")
94         maxPCATiles := flags.Int("max-pca-tiles", 0, "maximum tiles to use as PCA input (filter, then drop every 2nd colum pair until below max)")
95         debugTag := flags.Int("debug-tag", -1, "log debugging details about specified tag")
96         flags.IntVar(&cmd.threads, "threads", 16, "number of memory-hungry assembly threads, and number of VCPUs to request for arvados container")
97         flags.Float64Var(&cmd.chi2PValue, "chi2-p-value", 1, "do Χ² test (or logistic regression if -samples file has PCA components) and omit columns with p-value above this threshold")
98         flags.Float64Var(&cmd.pvalueMinFrequency, "pvalue-min-frequency", 0.01, "skip p-value calculation on tile variants below this frequency in the training set")
99         flags.BoolVar(&cmd.includeVariant1, "include-variant-1", false, "include most common variant when building one-hot matrix")
100         cmd.filter.Flags(flags)
101         err := flags.Parse(args)
102         if err == flag.ErrHelp {
103                 return nil
104         } else if err != nil {
105                 return err
106         } else if flags.NArg() > 0 {
107                 return fmt.Errorf("errant command line arguments after parsed flags: %v", flags.Args())
108         }
109
110         if *pprof != "" {
111                 go func() {
112                         log.Println(http.ListenAndServe(*pprof, nil))
113                 }()
114         }
115
116         if cmd.chi2PValue != 1 && *samplesFilename == "" {
117                 return fmt.Errorf("cannot use provided -chi2-p-value=%f because -samples= value is empty", cmd.chi2PValue)
118         }
119
120         cmd.debugTag = tagID(*debugTag)
121
122         if !*runlocal {
123                 runner := arvadosContainerRunner{
124                         Name:        "lightning slice-numpy",
125                         Client:      arvados.NewClientFromEnv(),
126                         ProjectUUID: *projectUUID,
127                         RAM:         int64(*arvadosRAM),
128                         VCPUs:       *arvadosVCPUs,
129                         Priority:    *priority,
130                         KeepCache:   2,
131                         APIAccess:   true,
132                         Preemptible: *preemptible,
133                 }
134                 err = runner.TranslatePaths(inputDir, regionsFilename, samplesFilename)
135                 if err != nil {
136                         return err
137                 }
138                 runner.Args = []string{"slice-numpy", "-local=true",
139                         "-pprof=:6060",
140                         "-input-dir=" + *inputDir,
141                         "-output-dir=/mnt/output",
142                         "-threads=" + fmt.Sprintf("%d", cmd.threads),
143                         "-regions=" + *regionsFilename,
144                         "-expand-regions=" + fmt.Sprintf("%d", *expandRegions),
145                         "-merge-output=" + fmt.Sprintf("%v", *mergeOutput),
146                         "-single-hgvs-matrix=" + fmt.Sprintf("%v", *hgvsSingle),
147                         "-chunked-hgvs-matrix=" + fmt.Sprintf("%v", *hgvsChunked),
148                         "-single-onehot=" + fmt.Sprintf("%v", *onehotSingle),
149                         "-chunked-onehot=" + fmt.Sprintf("%v", *onehotChunked),
150                         "-samples=" + *samplesFilename,
151                         "-case-control-only=" + fmt.Sprintf("%v", *caseControlOnly),
152                         "-pca=" + fmt.Sprintf("%v", *onlyPCA),
153                         "-pca-components=" + fmt.Sprintf("%d", cmd.pcaComponents),
154                         "-max-pca-tiles=" + fmt.Sprintf("%d", *maxPCATiles),
155                         "-chi2-p-value=" + fmt.Sprintf("%f", cmd.chi2PValue),
156                         "-pvalue-min-frequency=" + fmt.Sprintf("%f", cmd.pvalueMinFrequency),
157                         "-include-variant-1=" + fmt.Sprintf("%v", cmd.includeVariant1),
158                         "-debug-tag=" + fmt.Sprintf("%d", cmd.debugTag),
159                 }
160                 runner.Args = append(runner.Args, cmd.filter.Args()...)
161                 var output string
162                 output, err = runner.Run()
163                 if err != nil {
164                         return err
165                 }
166                 fmt.Fprintln(stdout, output)
167                 return nil
168         }
169
170         infiles, err := allFiles(*inputDir, matchGobFile)
171         if err != nil {
172                 return err
173         }
174         if len(infiles) == 0 {
175                 err = fmt.Errorf("no input files found in %s", *inputDir)
176                 return err
177         }
178         sort.Strings(infiles)
179
180         var refseq map[string][]tileLibRef
181         var reftiledata = make(map[tileLibRef][]byte, 11000000)
182         in0, err := open(infiles[0])
183         if err != nil {
184                 return err
185         }
186
187         matchGenome, err := regexp.Compile(cmd.filter.MatchGenome)
188         if err != nil {
189                 err = fmt.Errorf("-match-genome: invalid regexp: %q", cmd.filter.MatchGenome)
190                 return err
191         }
192
193         if *samplesFilename != "" {
194                 cmd.samples, err = loadSampleInfo(*samplesFilename)
195                 if err != nil {
196                         return err
197                 }
198         } else if *caseControlOnly {
199                 return fmt.Errorf("-case-control-only does not make sense without -samples")
200         }
201
202         cmd.cgnames = nil
203         var tagset [][]byte
204         err = DecodeLibrary(in0, strings.HasSuffix(infiles[0], ".gz"), func(ent *LibraryEntry) error {
205                 if len(ent.TagSet) > 0 {
206                         tagset = ent.TagSet
207                 }
208                 for _, cseq := range ent.CompactSequences {
209                         if cseq.Name == *ref || *ref == "" {
210                                 refseq = cseq.TileSequences
211                         }
212                 }
213                 for _, cg := range ent.CompactGenomes {
214                         if matchGenome.MatchString(cg.Name) {
215                                 cmd.cgnames = append(cmd.cgnames, cg.Name)
216                         }
217                 }
218                 for _, tv := range ent.TileVariants {
219                         if tv.Ref {
220                                 reftiledata[tileLibRef{tv.Tag, tv.Variant}] = tv.Sequence
221                         }
222                 }
223                 return nil
224         })
225         if err != nil {
226                 return err
227         }
228         in0.Close()
229         if refseq == nil {
230                 err = fmt.Errorf("%s: reference sequence not found", infiles[0])
231                 return err
232         }
233         if len(tagset) == 0 {
234                 err = fmt.Errorf("tagset not found")
235                 return err
236         }
237
238         taglib := &tagLibrary{}
239         err = taglib.setTags(tagset)
240         if err != nil {
241                 return err
242         }
243         taglen := taglib.TagLen()
244         sort.Strings(cmd.cgnames)
245
246         if len(cmd.cgnames) == 0 {
247                 return fmt.Errorf("fatal: 0 matching samples in library, nothing to do")
248         }
249         cmd.trainingSet = make([]int, len(cmd.cgnames))
250         if *samplesFilename == "" {
251                 cmd.trainingSetSize = len(cmd.cgnames)
252                 for i, name := range cmd.cgnames {
253                         cmd.samples = append(cmd.samples, sampleInfo{
254                                 id:         trimFilenameForLabel(name),
255                                 isTraining: true,
256                         })
257                         cmd.trainingSet[i] = i
258                 }
259         } else if len(cmd.cgnames) != len(cmd.samples) {
260                 return fmt.Errorf("mismatched sample list: %d samples in library, %d in %s", len(cmd.cgnames), len(cmd.samples), *samplesFilename)
261         } else {
262                 for i, name := range cmd.cgnames {
263                         if s := trimFilenameForLabel(name); s != cmd.samples[i].id {
264                                 return fmt.Errorf("mismatched sample list: sample %d is %q in library, %q in %s", i, s, cmd.samples[i].id, *samplesFilename)
265                         }
266                 }
267                 if *caseControlOnly {
268                         for i := 0; i < len(cmd.samples); i++ {
269                                 if !cmd.samples[i].isTraining && !cmd.samples[i].isValidation {
270                                         if i+1 < len(cmd.samples) {
271                                                 copy(cmd.samples[i:], cmd.samples[i+1:])
272                                                 copy(cmd.cgnames[i:], cmd.cgnames[i+1:])
273                                         }
274                                         cmd.samples = cmd.samples[:len(cmd.samples)-1]
275                                         cmd.cgnames = cmd.cgnames[:len(cmd.cgnames)-1]
276                                         i--
277                                 }
278                         }
279                 }
280                 cmd.chi2Cases = nil
281                 cmd.trainingSetSize = 0
282                 for i := range cmd.cgnames {
283                         if cmd.samples[i].isTraining {
284                                 cmd.trainingSet[i] = cmd.trainingSetSize
285                                 cmd.trainingSetSize++
286                                 cmd.chi2Cases = append(cmd.chi2Cases, cmd.samples[i].isCase)
287                         } else {
288                                 cmd.trainingSet[i] = -1
289                         }
290                 }
291                 if cmd.pvalue == nil {
292                         cmd.pvalue = func(onehot []bool) float64 {
293                                 return pvalue(onehot, cmd.chi2Cases)
294                         }
295                 }
296         }
297         if cmd.filter.MinCoverage == 1 {
298                 // In the generic formula below, floating point
299                 // arithmetic can effectively push the coverage
300                 // threshold above 1.0, which is impossible/useless.
301                 // 1.0 needs to mean exactly 100% coverage.
302                 cmd.minCoverage = len(cmd.cgnames)
303         } else {
304                 cmd.minCoverage = int(math.Ceil(cmd.filter.MinCoverage * float64(len(cmd.cgnames))))
305         }
306
307         if len(cmd.samples[0].pcaComponents) > 0 {
308                 cmd.pvalue = glmPvalueFunc(cmd.samples, cmd.pcaComponents)
309                 // Unfortunately, statsmodel/glm lib logs stuff to
310                 // os.Stdout when it panics on an unsolvable
311                 // problem. We recover() from the panic in glm.go, but
312                 // we also need to commandeer os.Stdout to avoid
313                 // producing large quantities of logs.
314                 stdoutWas := os.Stdout
315                 defer func() { os.Stdout = stdoutWas }()
316                 os.Stdout, err = os.Open(os.DevNull)
317                 if err != nil {
318                         return err
319                 }
320         }
321
322         // cgnamemap[name]==true for samples that we are including in
323         // output
324         cgnamemap := map[string]bool{}
325         for _, name := range cmd.cgnames {
326                 cgnamemap[name] = true
327         }
328
329         err = writeSampleInfo(cmd.samples, *outputDir)
330         if err != nil {
331                 return err
332         }
333
334         log.Info("indexing reference tiles")
335         type reftileinfo struct {
336                 variant  tileVariantID
337                 seqname  string // chr1
338                 pos      int    // distance from start of chromosome to starttag
339                 tiledata []byte // acgtggcaa...
340                 excluded bool   // true if excluded by regions file
341                 nexttag  tagID  // tagID of following tile (-1 for last tag of chromosome)
342         }
343         isdup := map[tagID]bool{}
344         reftile := map[tagID]*reftileinfo{}
345         for seqname, cseq := range refseq {
346                 pos := 0
347                 lastreftag := tagID(-1)
348                 for _, libref := range cseq {
349                         if cmd.filter.MaxTag >= 0 && libref.Tag > tagID(cmd.filter.MaxTag) {
350                                 continue
351                         }
352                         tiledata := reftiledata[libref]
353                         if len(tiledata) == 0 {
354                                 err = fmt.Errorf("missing tiledata for tag %d variant %d in %s in ref", libref.Tag, libref.Variant, seqname)
355                                 return err
356                         }
357                         foundthistag := false
358                         taglib.FindAll(tiledata[:len(tiledata)-1], func(tagid tagID, offset, _ int) {
359                                 if !foundthistag && tagid == libref.Tag {
360                                         foundthistag = true
361                                         return
362                                 }
363                                 if dupref, ok := reftile[tagid]; ok {
364                                         log.Printf("dropping reference tile %+v from %s @ %d, tag not unique, also found inside %+v from %s @ %d", tileLibRef{Tag: tagid, Variant: dupref.variant}, dupref.seqname, dupref.pos, libref, seqname, pos+offset+1)
365                                         delete(reftile, tagid)
366                                 } else {
367                                         log.Printf("found tag %d at offset %d inside tile variant %+v on %s @ %d", tagid, offset, libref, seqname, pos+offset+1)
368                                 }
369                                 isdup[tagid] = true
370                         })
371                         if isdup[libref.Tag] {
372                                 log.Printf("dropping reference tile %+v from %s @ %d, tag not unique", libref, seqname, pos)
373                         } else if reftile[libref.Tag] != nil {
374                                 log.Printf("dropping reference tile %+v from %s @ %d, tag not unique", tileLibRef{Tag: libref.Tag, Variant: reftile[libref.Tag].variant}, reftile[libref.Tag].seqname, reftile[libref.Tag].pos)
375                                 delete(reftile, libref.Tag)
376                                 log.Printf("dropping reference tile %+v from %s @ %d, tag not unique", libref, seqname, pos)
377                                 isdup[libref.Tag] = true
378                         } else {
379                                 reftile[libref.Tag] = &reftileinfo{
380                                         seqname:  seqname,
381                                         variant:  libref.Variant,
382                                         tiledata: tiledata,
383                                         pos:      pos,
384                                         nexttag:  -1,
385                                 }
386                                 if lastreftag >= 0 {
387                                         reftile[lastreftag].nexttag = libref.Tag
388                                 }
389                                 lastreftag = libref.Tag
390                         }
391                         pos += len(tiledata) - taglen
392                 }
393                 log.Printf("... %s done, len %d", seqname, pos+taglen)
394         }
395
396         var mask *mask
397         if *regionsFilename != "" {
398                 log.Printf("loading regions from %s", *regionsFilename)
399                 mask, err = makeMask(*regionsFilename, *expandRegions)
400                 if err != nil {
401                         return err
402                 }
403                 log.Printf("before applying mask, len(reftile) == %d", len(reftile))
404                 log.Printf("deleting reftile entries for regions outside %d intervals", mask.Len())
405                 for _, rt := range reftile {
406                         if !mask.Check(strings.TrimPrefix(rt.seqname, "chr"), rt.pos, rt.pos+len(rt.tiledata)) {
407                                 rt.excluded = true
408                         }
409                 }
410                 log.Printf("after applying mask, len(reftile) == %d", len(reftile))
411         }
412
413         type hgvsColSet map[hgvs.Variant][2][]int8
414         encodeHGVS := throttle{Max: len(refseq)}
415         encodeHGVSTodo := map[string]chan hgvsColSet{}
416         tmpHGVSCols := map[string]*os.File{}
417         if *hgvsChunked {
418                 for seqname := range refseq {
419                         var f *os.File
420                         f, err = os.Create(*outputDir + "/tmp." + seqname + ".gob")
421                         if err != nil {
422                                 return err
423                         }
424                         defer os.Remove(f.Name())
425                         bufw := bufio.NewWriterSize(f, 1<<24)
426                         enc := gob.NewEncoder(bufw)
427                         tmpHGVSCols[seqname] = f
428                         todo := make(chan hgvsColSet, 128)
429                         encodeHGVSTodo[seqname] = todo
430                         encodeHGVS.Go(func() error {
431                                 for colset := range todo {
432                                         err := enc.Encode(colset)
433                                         if err != nil {
434                                                 encodeHGVS.Report(err)
435                                                 for range todo {
436                                                 }
437                                                 return err
438                                         }
439                                 }
440                                 return bufw.Flush()
441                         })
442                 }
443         }
444
445         var toMerge [][]int16
446         if *mergeOutput || *hgvsSingle {
447                 toMerge = make([][]int16, len(infiles))
448         }
449         var onehotIndirect [][2][]uint32 // [chunkIndex][axis][index]
450         var onehotChunkSize []uint32
451         var onehotXrefs [][]onehotXref
452         if *onehotSingle || *onlyPCA {
453                 onehotIndirect = make([][2][]uint32, len(infiles))
454                 onehotChunkSize = make([]uint32, len(infiles))
455                 onehotXrefs = make([][]onehotXref, len(infiles))
456         }
457         chunkStartTag := make([]tagID, len(infiles))
458
459         throttleMem := throttle{Max: cmd.threads} // TODO: estimate using mem and data size
460         throttleNumpyMem := throttle{Max: cmd.threads/2 + 1}
461         log.Info("generating annotations and numpy matrix for each slice")
462         var errSkip = errors.New("skip infile")
463         var done int64
464         for infileIdx, infile := range infiles {
465                 infileIdx, infile := infileIdx, infile
466                 throttleMem.Go(func() error {
467                         seq := make(map[tagID][]TileVariant, 50000)
468                         cgs := make(map[string]CompactGenome, len(cmd.cgnames))
469                         f, err := open(infile)
470                         if err != nil {
471                                 return err
472                         }
473                         defer f.Close()
474                         log.Infof("%04d: reading %s", infileIdx, infile)
475                         err = DecodeLibrary(f, strings.HasSuffix(infile, ".gz"), func(ent *LibraryEntry) error {
476                                 for _, tv := range ent.TileVariants {
477                                         if tv.Ref {
478                                                 continue
479                                         }
480                                         // Skip tile with no
481                                         // corresponding ref tile, if
482                                         // mask is in play (we can't
483                                         // determine coordinates for
484                                         // these)
485                                         if mask != nil && reftile[tv.Tag] == nil {
486                                                 continue
487                                         }
488                                         // Skip tile whose
489                                         // corresponding ref tile is
490                                         // outside target regions --
491                                         // unless it's a potential
492                                         // spanning tile.
493                                         if mask != nil && reftile[tv.Tag].excluded &&
494                                                 (int(tv.Tag+1) >= len(tagset) ||
495                                                         (bytes.HasSuffix(tv.Sequence, tagset[tv.Tag+1]) && reftile[tv.Tag+1] != nil && !reftile[tv.Tag+1].excluded)) {
496                                                 continue
497                                         }
498                                         if tv.Tag == cmd.debugTag {
499                                                 log.Printf("infile %d %s tag %d variant %d hash %x", infileIdx, infile, tv.Tag, tv.Variant, tv.Blake2b[:3])
500                                         }
501                                         variants := seq[tv.Tag]
502                                         if len(variants) == 0 {
503                                                 variants = make([]TileVariant, 100)
504                                         }
505                                         for len(variants) <= int(tv.Variant) {
506                                                 variants = append(variants, TileVariant{})
507                                         }
508                                         variants[int(tv.Variant)] = tv
509                                         seq[tv.Tag] = variants
510                                 }
511                                 for _, cg := range ent.CompactGenomes {
512                                         if cmd.filter.MaxTag >= 0 && cg.StartTag > tagID(cmd.filter.MaxTag) {
513                                                 return errSkip
514                                         }
515                                         if !cgnamemap[cg.Name] {
516                                                 continue
517                                         }
518                                         // pad to full slice size
519                                         // to avoid out-of-bounds
520                                         // checks later
521                                         if sliceSize := 2 * int(cg.EndTag-cg.StartTag); len(cg.Variants) < sliceSize {
522                                                 cg.Variants = append(cg.Variants, make([]tileVariantID, sliceSize-len(cg.Variants))...)
523                                         }
524                                         cgs[cg.Name] = cg
525                                 }
526                                 return nil
527                         })
528                         if err == errSkip {
529                                 return nil
530                         } else if err != nil {
531                                 return fmt.Errorf("%04d: DecodeLibrary(%s): err", infileIdx, infile)
532                         }
533                         tagstart := cgs[cmd.cgnames[0]].StartTag
534                         tagend := cgs[cmd.cgnames[0]].EndTag
535                         chunkStartTag[infileIdx] = tagstart
536
537                         // TODO: filters
538
539                         log.Infof("%04d: renumber/dedup variants for tags %d-%d", infileIdx, tagstart, tagend)
540                         variantRemap := make([][]tileVariantID, tagend-tagstart)
541                         throttleCPU := throttle{Max: runtime.GOMAXPROCS(0)}
542                         for tag, variants := range seq {
543                                 tag, variants := tag, variants
544                                 throttleCPU.Go(func() error {
545                                         alleleCoverage := 0
546                                         count := make(map[[blake2b.Size256]byte]int, len(variants))
547
548                                         rt := reftile[tag]
549                                         if rt != nil {
550                                                 count[blake2b.Sum256(rt.tiledata)] = 0
551                                         }
552
553                                         for cgname, cg := range cgs {
554                                                 idx := int(tag-tagstart) * 2
555                                                 for allele := 0; allele < 2; allele++ {
556                                                         v := cg.Variants[idx+allele]
557                                                         if v > 0 && len(variants[v].Sequence) > 0 {
558                                                                 count[variants[v].Blake2b]++
559                                                                 alleleCoverage++
560                                                         }
561                                                         if v > 0 && tag == cmd.debugTag {
562                                                                 log.Printf("tag %d cg %s allele %d tv %d hash %x count is now %d", tag, cgname, allele, v, variants[v].Blake2b[:3], count[variants[v].Blake2b])
563                                                         }
564                                                 }
565                                         }
566                                         if alleleCoverage < cmd.minCoverage*2 {
567                                                 idx := int(tag-tagstart) * 2
568                                                 for _, cg := range cgs {
569                                                         cg.Variants[idx] = 0
570                                                         cg.Variants[idx+1] = 0
571                                                 }
572                                                 if tag == cmd.debugTag {
573                                                         log.Printf("tag %d alleleCoverage %d < min %d, sample data wiped", tag, alleleCoverage, cmd.minCoverage*2)
574                                                 }
575                                                 return nil
576                                         }
577
578                                         // hash[i] will be the hash of
579                                         // the variant(s) that should
580                                         // be at rank i (0-based).
581                                         hash := make([][blake2b.Size256]byte, 0, len(count))
582                                         for b := range count {
583                                                 hash = append(hash, b)
584                                         }
585                                         sort.Slice(hash, func(i, j int) bool {
586                                                 bi, bj := &hash[i], &hash[j]
587                                                 if ci, cj := count[*bi], count[*bj]; ci != cj {
588                                                         return ci > cj
589                                                 } else {
590                                                         return bytes.Compare((*bi)[:], (*bj)[:]) < 0
591                                                 }
592                                         })
593                                         // rank[b] will be the 1-based
594                                         // new variant number for
595                                         // variants whose hash is b.
596                                         rank := make(map[[blake2b.Size256]byte]tileVariantID, len(hash))
597                                         for i, h := range hash {
598                                                 rank[h] = tileVariantID(i + 1)
599                                         }
600                                         if tag == cmd.debugTag {
601                                                 for h, r := range rank {
602                                                         log.Printf("tag %d rank(%x) = %v", tag, h[:3], r)
603                                                 }
604                                         }
605                                         // remap[v] will be the new
606                                         // variant number for original
607                                         // variant number v.
608                                         remap := make([]tileVariantID, len(variants))
609                                         for i, tv := range variants {
610                                                 remap[i] = rank[tv.Blake2b]
611                                         }
612                                         if tag == cmd.debugTag {
613                                                 for in, out := range remap {
614                                                         if out > 0 {
615                                                                 log.Printf("tag %d remap %d => %d", tag, in, out)
616                                                         }
617                                                 }
618                                         }
619                                         variantRemap[tag-tagstart] = remap
620                                         if rt != nil {
621                                                 refrank := rank[blake2b.Sum256(rt.tiledata)]
622                                                 if tag == cmd.debugTag {
623                                                         log.Printf("tag %d reftile variant %d => %d", tag, rt.variant, refrank)
624                                                 }
625                                                 rt.variant = refrank
626                                         }
627                                         return nil
628                                 })
629                         }
630                         throttleCPU.Wait()
631
632                         var onehotChunk [][]int8
633                         var onehotXref []onehotXref
634
635                         var annotationsFilename string
636                         if *onlyPCA {
637                                 annotationsFilename = "/dev/null"
638                         } else {
639                                 annotationsFilename = fmt.Sprintf("%s/matrix.%04d.annotations.csv", *outputDir, infileIdx)
640                                 log.Infof("%04d: writing %s", infileIdx, annotationsFilename)
641                         }
642                         annof, err := os.Create(annotationsFilename)
643                         if err != nil {
644                                 return err
645                         }
646                         annow := bufio.NewWriterSize(annof, 1<<20)
647                         outcol := 0
648                         for tag := tagstart; tag < tagend; tag++ {
649                                 rt := reftile[tag]
650                                 if rt == nil && mask != nil {
651                                         // With no ref tile, we don't
652                                         // have coordinates to say
653                                         // this is in the desired
654                                         // regions -- so it's not.
655                                         // TODO: handle ref spanning
656                                         // tile case.
657                                         continue
658                                 }
659                                 if rt != nil && rt.excluded {
660                                         // TODO: don't skip yet --
661                                         // first check for spanning
662                                         // tile variants that
663                                         // intersect non-excluded ref
664                                         // tiles.
665                                         continue
666                                 }
667                                 if cmd.filter.MaxTag >= 0 && tag > tagID(cmd.filter.MaxTag) {
668                                         break
669                                 }
670                                 remap := variantRemap[tag-tagstart]
671                                 if remap == nil {
672                                         // was not assigned above,
673                                         // because minCoverage
674                                         outcol++
675                                         continue
676                                 }
677                                 maxv := tileVariantID(0)
678                                 for _, v := range remap {
679                                         if maxv < v {
680                                                 maxv = v
681                                         }
682                                 }
683                                 if *onehotChunked || *onehotSingle || *onlyPCA {
684                                         onehot, xrefs := cmd.tv2homhet(cgs, maxv, remap, tag, tagstart, seq)
685                                         if tag == cmd.debugTag {
686                                                 log.WithFields(logrus.Fields{
687                                                         "onehot": onehot,
688                                                         "xrefs":  xrefs,
689                                                 }).Info("tv2homhet()")
690                                         }
691                                         onehotChunk = append(onehotChunk, onehot...)
692                                         onehotXref = append(onehotXref, xrefs...)
693                                 }
694                                 if *onlyPCA {
695                                         outcol++
696                                         continue
697                                 }
698                                 if rt == nil {
699                                         // Reference does not use any
700                                         // variant of this tile
701                                         //
702                                         // TODO: diff against the
703                                         // relevant portion of the
704                                         // ref's spanning tile
705                                         outcol++
706                                         continue
707                                 }
708                                 fmt.Fprintf(annow, "%d,%d,%d,=,%s,%d,,,\n", tag, outcol, rt.variant, rt.seqname, rt.pos)
709                                 variants := seq[tag]
710                                 reftilestr := strings.ToUpper(string(rt.tiledata))
711
712                                 done := make([]bool, maxv+1)
713                                 variantDiffs := make([][]hgvs.Variant, maxv+1)
714                                 for v, tv := range variants {
715                                         v := remap[v]
716                                         if v == 0 || v == rt.variant || done[v] {
717                                                 continue
718                                         } else {
719                                                 done[v] = true
720                                         }
721                                         if len(tv.Sequence) < taglen {
722                                                 continue
723                                         }
724                                         // if reftilestr doesn't end
725                                         // in the same tag as tv,
726                                         // extend reftilestr with
727                                         // following ref tiles until
728                                         // it does (up to an arbitrary
729                                         // sanity-check limit)
730                                         reftilestr := reftilestr
731                                         endtagstr := strings.ToUpper(string(tv.Sequence[len(tv.Sequence)-taglen:]))
732                                         for i, rt := 0, rt; i < annotationMaxTileSpan && !strings.HasSuffix(reftilestr, endtagstr) && rt.nexttag >= 0; i++ {
733                                                 rt = reftile[rt.nexttag]
734                                                 if rt == nil {
735                                                         break
736                                                 }
737                                                 reftilestr += strings.ToUpper(string(rt.tiledata[taglen:]))
738                                         }
739                                         if mask != nil && !mask.Check(strings.TrimPrefix(rt.seqname, "chr"), rt.pos, rt.pos+len(reftilestr)) {
740                                                 continue
741                                         }
742                                         if !strings.HasSuffix(reftilestr, endtagstr) {
743                                                 fmt.Fprintf(annow, "%d,%d,%d,,%s,%d,,,\n", tag, outcol, v, rt.seqname, rt.pos)
744                                                 continue
745                                         }
746                                         if lendiff := len(reftilestr) - len(tv.Sequence); lendiff < -1000 || lendiff > 1000 {
747                                                 fmt.Fprintf(annow, "%d,%d,%d,,%s,%d,,,\n", tag, outcol, v, rt.seqname, rt.pos)
748                                                 continue
749                                         }
750                                         diffs, _ := hgvs.Diff(reftilestr, strings.ToUpper(string(tv.Sequence)), 0)
751                                         for i := range diffs {
752                                                 diffs[i].Position += rt.pos
753                                         }
754                                         for _, diff := range diffs {
755                                                 fmt.Fprintf(annow, "%d,%d,%d,%s:g.%s,%s,%d,%s,%s,%s\n", tag, outcol, v, rt.seqname, diff.String(), rt.seqname, diff.Position, diff.Ref, diff.New, diff.Left)
756                                         }
757                                         if *hgvsChunked {
758                                                 variantDiffs[v] = diffs
759                                         }
760                                 }
761                                 if *hgvsChunked {
762                                         // We can now determine, for each HGVS
763                                         // variant (diff) in this reftile
764                                         // region, whether a given genome
765                                         // phase/allele (1) has the variant, (0) has
766                                         // =ref or a different variant in that
767                                         // position, or (-1) is lacking
768                                         // coverage / couldn't be diffed.
769                                         hgvsCol := hgvsColSet{}
770                                         for _, diffs := range variantDiffs {
771                                                 for _, diff := range diffs {
772                                                         if _, ok := hgvsCol[diff]; ok {
773                                                                 continue
774                                                         }
775                                                         hgvsCol[diff] = [2][]int8{
776                                                                 make([]int8, len(cmd.cgnames)),
777                                                                 make([]int8, len(cmd.cgnames)),
778                                                         }
779                                                 }
780                                         }
781                                         for row, name := range cmd.cgnames {
782                                                 variants := cgs[name].Variants[(tag-tagstart)*2:]
783                                                 for ph := 0; ph < 2; ph++ {
784                                                         v := variants[ph]
785                                                         if int(v) >= len(remap) {
786                                                                 v = 0
787                                                         } else {
788                                                                 v = remap[v]
789                                                         }
790                                                         if v == rt.variant {
791                                                                 // hgvsCol[*][ph][row] is already 0
792                                                         } else if len(variantDiffs[v]) == 0 {
793                                                                 // lacking coverage / couldn't be diffed
794                                                                 for _, col := range hgvsCol {
795                                                                         col[ph][row] = -1
796                                                                 }
797                                                         } else {
798                                                                 for _, diff := range variantDiffs[v] {
799                                                                         hgvsCol[diff][ph][row] = 1
800                                                                 }
801                                                         }
802                                                 }
803                                         }
804                                         for diff, colpair := range hgvsCol {
805                                                 allele2homhet(colpair)
806                                                 if !cmd.filterHGVScolpair(colpair) {
807                                                         delete(hgvsCol, diff)
808                                                 }
809                                         }
810                                         if len(hgvsCol) > 0 {
811                                                 encodeHGVSTodo[rt.seqname] <- hgvsCol
812                                         }
813                                 }
814                                 outcol++
815                         }
816                         err = annow.Flush()
817                         if err != nil {
818                                 return err
819                         }
820                         err = annof.Close()
821                         if err != nil {
822                                 return err
823                         }
824
825                         if *onehotChunked {
826                                 // transpose onehotChunk[col][row] to numpy[row*ncols+col]
827                                 rows := len(cmd.cgnames)
828                                 cols := len(onehotChunk)
829                                 log.Infof("%04d: preparing onehot numpy (rows=%d, cols=%d, mem=%d)", infileIdx, rows, cols, rows*cols)
830                                 throttleNumpyMem.Acquire()
831                                 out := onehotcols2int8(onehotChunk)
832                                 fnm := fmt.Sprintf("%s/onehot.%04d.npy", *outputDir, infileIdx)
833                                 err = writeNumpyInt8(fnm, out, rows, cols)
834                                 if err != nil {
835                                         return err
836                                 }
837                                 fnm = fmt.Sprintf("%s/onehot-columns.%04d.npy", *outputDir, infileIdx)
838                                 err = writeNumpyInt32(fnm, onehotXref2int32(onehotXref), 4, len(onehotXref))
839                                 if err != nil {
840                                         return err
841                                 }
842                                 debug.FreeOSMemory()
843                                 throttleNumpyMem.Release()
844                         }
845                         if *onehotSingle || *onlyPCA {
846                                 onehotIndirect[infileIdx] = onehotChunk2Indirect(onehotChunk)
847                                 onehotChunkSize[infileIdx] = uint32(len(onehotChunk))
848                                 onehotXrefs[infileIdx] = onehotXref
849                                 n := len(onehotIndirect[infileIdx][0])
850                                 log.Infof("%04d: keeping onehot coordinates in memory (n=%d, mem=%d)", infileIdx, n, n*8*2)
851                         }
852                         if !(*onehotSingle || *onehotChunked || *onlyPCA) || *mergeOutput || *hgvsSingle {
853                                 log.Infof("%04d: preparing numpy (rows=%d, cols=%d)", infileIdx, len(cmd.cgnames), 2*outcol)
854                                 throttleNumpyMem.Acquire()
855                                 rows := len(cmd.cgnames)
856                                 cols := 2 * outcol
857                                 out := make([]int16, rows*cols)
858                                 for row, name := range cmd.cgnames {
859                                         outidx := row * cols
860                                         for col, v := range cgs[name].Variants {
861                                                 tag := tagstart + tagID(col/2)
862                                                 if cmd.filter.MaxTag >= 0 && tag > tagID(cmd.filter.MaxTag) {
863                                                         break
864                                                 }
865                                                 if rt := reftile[tag]; rt == nil || rt.excluded {
866                                                         continue
867                                                 }
868                                                 if v == 0 {
869                                                         out[outidx] = 0 // tag not found / spanning tile
870                                                 } else if variants, ok := seq[tag]; ok && int(v) < len(variants) && len(variants[v].Sequence) > 0 {
871                                                         out[outidx] = int16(variantRemap[tag-tagstart][v])
872                                                 } else {
873                                                         out[outidx] = -1 // low quality tile variant
874                                                 }
875                                                 if tag == cmd.debugTag {
876                                                         log.Printf("tag %d row %d col %d outidx %d v %d out %d", tag, row, col, outidx, v, out[outidx])
877                                                 }
878                                                 outidx++
879                                         }
880                                 }
881                                 seq = nil
882                                 cgs = nil
883                                 debug.FreeOSMemory()
884                                 throttleNumpyMem.Release()
885                                 if *mergeOutput || *hgvsSingle {
886                                         log.Infof("%04d: matrix fragment %d rows x %d cols", infileIdx, rows, cols)
887                                         toMerge[infileIdx] = out
888                                 }
889                                 if !*mergeOutput && !*onehotChunked && !*onehotSingle {
890                                         fnm := fmt.Sprintf("%s/matrix.%04d.npy", *outputDir, infileIdx)
891                                         err = writeNumpyInt16(fnm, out, rows, cols)
892                                         if err != nil {
893                                                 return err
894                                         }
895                                 }
896                         }
897                         debug.FreeOSMemory()
898                         log.Infof("%s: done (%d/%d)", infile, int(atomic.AddInt64(&done, 1)), len(infiles))
899                         return nil
900                 })
901         }
902         if err = throttleMem.Wait(); err != nil {
903                 return err
904         }
905
906         if *hgvsChunked {
907                 log.Info("flushing hgvsCols temp files")
908                 for seqname := range refseq {
909                         close(encodeHGVSTodo[seqname])
910                 }
911                 err = encodeHGVS.Wait()
912                 if err != nil {
913                         return err
914                 }
915                 for seqname := range refseq {
916                         log.Infof("%s: reading hgvsCols from temp file", seqname)
917                         f := tmpHGVSCols[seqname]
918                         _, err = f.Seek(0, io.SeekStart)
919                         if err != nil {
920                                 return err
921                         }
922                         var hgvsCols hgvsColSet
923                         dec := gob.NewDecoder(bufio.NewReaderSize(f, 1<<24))
924                         for err == nil {
925                                 err = dec.Decode(&hgvsCols)
926                         }
927                         if err != io.EOF {
928                                 return err
929                         }
930                         log.Infof("%s: sorting %d hgvs variants", seqname, len(hgvsCols))
931                         variants := make([]hgvs.Variant, 0, len(hgvsCols))
932                         for v := range hgvsCols {
933                                 variants = append(variants, v)
934                         }
935                         sort.Slice(variants, func(i, j int) bool {
936                                 vi, vj := &variants[i], &variants[j]
937                                 if vi.Position != vj.Position {
938                                         return vi.Position < vj.Position
939                                 } else if vi.Ref != vj.Ref {
940                                         return vi.Ref < vj.Ref
941                                 } else {
942                                         return vi.New < vj.New
943                                 }
944                         })
945                         rows := len(cmd.cgnames)
946                         cols := len(variants) * 2
947                         log.Infof("%s: building hgvs matrix (rows=%d, cols=%d, mem=%d)", seqname, rows, cols, rows*cols)
948                         out := make([]int8, rows*cols)
949                         for varIdx, variant := range variants {
950                                 hgvsCols := hgvsCols[variant]
951                                 for row := range cmd.cgnames {
952                                         for ph := 0; ph < 2; ph++ {
953                                                 out[row*cols+varIdx+ph] = hgvsCols[ph][row]
954                                         }
955                                 }
956                         }
957                         err = writeNumpyInt8(fmt.Sprintf("%s/hgvs.%s.npy", *outputDir, seqname), out, rows, cols)
958                         if err != nil {
959                                 return err
960                         }
961                         out = nil
962
963                         fnm := fmt.Sprintf("%s/hgvs.%s.annotations.csv", *outputDir, seqname)
964                         log.Infof("%s: writing hgvs column labels to %s", seqname, fnm)
965                         var hgvsLabels bytes.Buffer
966                         for varIdx, variant := range variants {
967                                 fmt.Fprintf(&hgvsLabels, "%d,%s:g.%s\n", varIdx, seqname, variant.String())
968                         }
969                         err = ioutil.WriteFile(fnm, hgvsLabels.Bytes(), 0666)
970                         if err != nil {
971                                 return err
972                         }
973                 }
974         }
975
976         if *mergeOutput || *hgvsSingle {
977                 var annow *bufio.Writer
978                 var annof *os.File
979                 if *mergeOutput {
980                         annoFilename := fmt.Sprintf("%s/matrix.annotations.csv", *outputDir)
981                         annof, err = os.Create(annoFilename)
982                         if err != nil {
983                                 return err
984                         }
985                         annow = bufio.NewWriterSize(annof, 1<<20)
986                 }
987
988                 rows := len(cmd.cgnames)
989                 cols := 0
990                 for _, chunk := range toMerge {
991                         cols += len(chunk) / rows
992                 }
993                 log.Infof("merging output matrix (rows=%d, cols=%d, mem=%d) and annotations", rows, cols, rows*cols*2)
994                 var out []int16
995                 if *mergeOutput {
996                         out = make([]int16, rows*cols)
997                 }
998                 hgvsCols := map[string][2][]int16{} // hgvs -> [[g0,g1,g2,...], [g0,g1,g2,...]] (slice of genomes for each phase)
999                 startcol := 0
1000                 for outIdx, chunk := range toMerge {
1001                         chunkcols := len(chunk) / rows
1002                         if *mergeOutput {
1003                                 for row := 0; row < rows; row++ {
1004                                         copy(out[row*cols+startcol:], chunk[row*chunkcols:(row+1)*chunkcols])
1005                                 }
1006                         }
1007                         toMerge[outIdx] = nil
1008
1009                         annotationsFilename := fmt.Sprintf("%s/matrix.%04d.annotations.csv", *outputDir, outIdx)
1010                         log.Infof("reading %s", annotationsFilename)
1011                         buf, err := os.ReadFile(annotationsFilename)
1012                         if err != nil {
1013                                 return err
1014                         }
1015                         if *mergeOutput {
1016                                 err = os.Remove(annotationsFilename)
1017                                 if err != nil {
1018                                         return err
1019                                 }
1020                         }
1021                         for _, line := range bytes.Split(buf, []byte{'\n'}) {
1022                                 if len(line) == 0 {
1023                                         continue
1024                                 }
1025                                 fields := bytes.SplitN(line, []byte{','}, 9)
1026                                 tag, _ := strconv.Atoi(string(fields[0]))
1027                                 incol, _ := strconv.Atoi(string(fields[1]))
1028                                 tileVariant, _ := strconv.Atoi(string(fields[2]))
1029                                 hgvsID := string(fields[3])
1030                                 seqname := string(fields[4])
1031                                 pos, _ := strconv.Atoi(string(fields[5]))
1032                                 refseq := fields[6]
1033                                 if hgvsID == "" {
1034                                         // Null entry for un-diffable
1035                                         // tile variant
1036                                         continue
1037                                 }
1038                                 if hgvsID == "=" {
1039                                         // Null entry for ref tile
1040                                         continue
1041                                 }
1042                                 if mask != nil && !mask.Check(strings.TrimPrefix(seqname, "chr"), pos, pos+len(refseq)) {
1043                                         // The tile intersects one of
1044                                         // the selected regions, but
1045                                         // this particular HGVS
1046                                         // variant does not.
1047                                         continue
1048                                 }
1049                                 hgvsColPair := hgvsCols[hgvsID]
1050                                 if hgvsColPair[0] == nil {
1051                                         // values in new columns start
1052                                         // out as -1 ("no data yet")
1053                                         // or 0 ("=ref") here, may
1054                                         // change to 1 ("hgvs variant
1055                                         // present") below, either on
1056                                         // this line or a future line.
1057                                         hgvsColPair = [2][]int16{make([]int16, len(cmd.cgnames)), make([]int16, len(cmd.cgnames))}
1058                                         rt, ok := reftile[tagID(tag)]
1059                                         if !ok {
1060                                                 err = fmt.Errorf("bug: seeing annotations for tag %d, but it has no reftile entry", tag)
1061                                                 return err
1062                                         }
1063                                         for ph := 0; ph < 2; ph++ {
1064                                                 for row := 0; row < rows; row++ {
1065                                                         v := chunk[row*chunkcols+incol*2+ph]
1066                                                         if tileVariantID(v) == rt.variant {
1067                                                                 hgvsColPair[ph][row] = 0
1068                                                         } else {
1069                                                                 hgvsColPair[ph][row] = -1
1070                                                         }
1071                                                 }
1072                                         }
1073                                         hgvsCols[hgvsID] = hgvsColPair
1074                                         if annow != nil {
1075                                                 hgvsref := hgvs.Variant{
1076                                                         Position: pos,
1077                                                         Ref:      string(refseq),
1078                                                         New:      string(refseq),
1079                                                 }
1080                                                 fmt.Fprintf(annow, "%d,%d,%d,%s:g.%s,%s,%d,%s,%s,%s\n", tag, incol+startcol/2, rt.variant, seqname, hgvsref.String(), seqname, pos, refseq, refseq, fields[8])
1081                                         }
1082                                 }
1083                                 if annow != nil {
1084                                         fmt.Fprintf(annow, "%d,%d,%d,%s,%s,%d,%s,%s,%s\n", tag, incol+startcol/2, tileVariant, hgvsID, seqname, pos, refseq, fields[7], fields[8])
1085                                 }
1086                                 for ph := 0; ph < 2; ph++ {
1087                                         for row := 0; row < rows; row++ {
1088                                                 v := chunk[row*chunkcols+incol*2+ph]
1089                                                 if int(v) == tileVariant {
1090                                                         hgvsColPair[ph][row] = 1
1091                                                 }
1092                                         }
1093                                 }
1094                         }
1095
1096                         startcol += chunkcols
1097                 }
1098                 if *mergeOutput {
1099                         err = annow.Flush()
1100                         if err != nil {
1101                                 return err
1102                         }
1103                         err = annof.Close()
1104                         if err != nil {
1105                                 return err
1106                         }
1107                         err = writeNumpyInt16(fmt.Sprintf("%s/matrix.npy", *outputDir), out, rows, cols)
1108                         if err != nil {
1109                                 return err
1110                         }
1111                 }
1112                 out = nil
1113
1114                 if *hgvsSingle {
1115                         cols = len(hgvsCols) * 2
1116                         log.Printf("building hgvs-based matrix: %d rows x %d cols", rows, cols)
1117                         out = make([]int16, rows*cols)
1118                         hgvsIDs := make([]string, 0, cols/2)
1119                         for hgvsID := range hgvsCols {
1120                                 hgvsIDs = append(hgvsIDs, hgvsID)
1121                         }
1122                         sort.Strings(hgvsIDs)
1123                         var hgvsLabels bytes.Buffer
1124                         for idx, hgvsID := range hgvsIDs {
1125                                 fmt.Fprintf(&hgvsLabels, "%d,%s\n", idx, hgvsID)
1126                                 for ph := 0; ph < 2; ph++ {
1127                                         hgvscol := hgvsCols[hgvsID][ph]
1128                                         for row, val := range hgvscol {
1129                                                 out[row*cols+idx*2+ph] = val
1130                                         }
1131                                 }
1132                         }
1133                         err = writeNumpyInt16(fmt.Sprintf("%s/hgvs.npy", *outputDir), out, rows, cols)
1134                         if err != nil {
1135                                 return err
1136                         }
1137
1138                         fnm := fmt.Sprintf("%s/hgvs.annotations.csv", *outputDir)
1139                         log.Printf("writing hgvs labels: %s", fnm)
1140                         err = ioutil.WriteFile(fnm, hgvsLabels.Bytes(), 0777)
1141                         if err != nil {
1142                                 return err
1143                         }
1144                 }
1145         }
1146         if *onehotSingle || *onlyPCA {
1147                 nzCount := 0
1148                 for _, part := range onehotIndirect {
1149                         nzCount += len(part[0])
1150                 }
1151                 onehot := make([]uint32, nzCount*2) // [r,r,r,...,c,c,c,...]
1152                 var xrefs []onehotXref
1153                 chunkOffset := uint32(0)
1154                 outcol := 0
1155                 for i, part := range onehotIndirect {
1156                         for i := range part[1] {
1157                                 part[1][i] += chunkOffset
1158                         }
1159                         copy(onehot[outcol:], part[0])
1160                         copy(onehot[outcol+nzCount:], part[1])
1161                         xrefs = append(xrefs, onehotXrefs[i]...)
1162
1163                         outcol += len(part[0])
1164                         chunkOffset += onehotChunkSize[i]
1165
1166                         part[0] = nil
1167                         part[1] = nil
1168                         onehotXrefs[i] = nil
1169                         debug.FreeOSMemory()
1170                 }
1171                 if *onehotSingle {
1172                         fnm := fmt.Sprintf("%s/onehot.npy", *outputDir)
1173                         err = writeNumpyUint32(fnm, onehot, 2, nzCount)
1174                         if err != nil {
1175                                 return err
1176                         }
1177                         fnm = fmt.Sprintf("%s/onehot-columns.npy", *outputDir)
1178                         err = writeNumpyInt32(fnm, onehotXref2int32(xrefs), 5, len(xrefs))
1179                         if err != nil {
1180                                 return err
1181                         }
1182                         fnm = fmt.Sprintf("%s/stats.json", *outputDir)
1183                         j, err := json.Marshal(map[string]interface{}{
1184                                 "pvalueCallCount": cmd.pvalueCallCount,
1185                         })
1186                         if err != nil {
1187                                 return err
1188                         }
1189                         err = os.WriteFile(fnm, j, 0777)
1190                         if err != nil {
1191                                 return err
1192                         }
1193                 }
1194                 if *onlyPCA {
1195                         cols := 0
1196                         for _, c := range onehot[nzCount:] {
1197                                 if int(c) >= cols {
1198                                         cols = int(c) + 1
1199                                 }
1200                         }
1201                         if cols == 0 {
1202                                 return fmt.Errorf("cannot do PCA: one-hot matrix is empty")
1203                         }
1204                         log.Printf("have %d one-hot cols", cols)
1205                         stride := 1
1206                         for *maxPCATiles > 0 && cols > *maxPCATiles*2 {
1207                                 cols = (cols + 1) / 2
1208                                 stride = stride * 2
1209                         }
1210                         if cols%2 == 1 {
1211                                 // we work with pairs of columns
1212                                 cols++
1213                         }
1214                         log.Printf("creating full matrix (%d rows) and training matrix (%d rows) with %d cols, stride %d", len(cmd.cgnames), cmd.trainingSetSize, cols, stride)
1215                         mtxFull := mat.NewDense(len(cmd.cgnames), cols, nil)
1216                         mtxTrain := mat.NewDense(cmd.trainingSetSize, cols, nil)
1217                         for i, c := range onehot[nzCount:] {
1218                                 if int(c/2)%stride == 0 {
1219                                         outcol := int(c/2)/stride*2 + int(c)%2
1220                                         mtxFull.Set(int(onehot[i]), outcol, 1)
1221                                         if trainRow := cmd.trainingSet[int(onehot[i])]; trainRow >= 0 {
1222                                                 mtxTrain.Set(trainRow, outcol, 1)
1223                                         }
1224                                 }
1225                         }
1226                         log.Print("fitting")
1227                         transformer := nlp.NewPCA(cmd.pcaComponents)
1228                         transformer.Fit(mtxTrain.T())
1229                         log.Printf("transforming")
1230                         pca, err := transformer.Transform(mtxFull.T())
1231                         if err != nil {
1232                                 return err
1233                         }
1234                         pca = pca.T()
1235                         outrows, outcols := pca.Dims()
1236                         log.Printf("copying result to numpy output array: %d rows, %d cols", outrows, outcols)
1237                         out := make([]float64, outrows*outcols)
1238                         for i := 0; i < outrows; i++ {
1239                                 for j := 0; j < outcols; j++ {
1240                                         out[i*outcols+j] = pca.At(i, j)
1241                                 }
1242                         }
1243                         fnm := fmt.Sprintf("%s/pca.npy", *outputDir)
1244                         log.Printf("writing numpy: %s", fnm)
1245                         output, err := os.OpenFile(fnm, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0777)
1246                         if err != nil {
1247                                 return err
1248                         }
1249                         npw, err := gonpy.NewWriter(nopCloser{output})
1250                         if err != nil {
1251                                 return fmt.Errorf("gonpy.NewWriter: %w", err)
1252                         }
1253                         npw.Shape = []int{outrows, outcols}
1254                         err = npw.WriteFloat64(out)
1255                         if err != nil {
1256                                 return fmt.Errorf("WriteFloat64: %w", err)
1257                         }
1258                         err = output.Close()
1259                         if err != nil {
1260                                 return err
1261                         }
1262                         log.Print("done")
1263
1264                         log.Print("copying pca components to sampleInfo")
1265                         for i := range cmd.samples {
1266                                 cmd.samples[i].pcaComponents = make([]float64, outcols)
1267                                 for c := 0; c < outcols; c++ {
1268                                         cmd.samples[i].pcaComponents[i] = pca.At(i, c)
1269                                 }
1270                         }
1271                         log.Print("done")
1272
1273                         err = writeSampleInfo(cmd.samples, *outputDir)
1274                         if err != nil {
1275                                 return err
1276                         }
1277                 }
1278         }
1279         if !*mergeOutput && !*onehotChunked && !*onehotSingle && !*onlyPCA {
1280                 tagoffsetFilename := *outputDir + "/chunk-tag-offset.csv"
1281                 log.Infof("writing tag offsets to %s", tagoffsetFilename)
1282                 var f *os.File
1283                 f, err = os.Create(tagoffsetFilename)
1284                 if err != nil {
1285                         return err
1286                 }
1287                 defer f.Close()
1288                 for idx, offset := range chunkStartTag {
1289                         _, err = fmt.Fprintf(f, "%q,%d\n", fmt.Sprintf("matrix.%04d.npy", idx), offset)
1290                         if err != nil {
1291                                 err = fmt.Errorf("write %s: %w", tagoffsetFilename, err)
1292                                 return err
1293                         }
1294                 }
1295                 err = f.Close()
1296                 if err != nil {
1297                         err = fmt.Errorf("close %s: %w", tagoffsetFilename, err)
1298                         return err
1299                 }
1300         }
1301
1302         return nil
1303 }
1304
1305 type sampleInfo struct {
1306         id            string
1307         isCase        bool
1308         isControl     bool
1309         isTraining    bool
1310         isValidation  bool
1311         pcaComponents []float64
1312 }
1313
1314 // Read samples.csv file with case/control and training/validation
1315 // flags.
1316 func loadSampleInfo(samplesFilename string) ([]sampleInfo, error) {
1317         var si []sampleInfo
1318         f, err := open(samplesFilename)
1319         if err != nil {
1320                 return nil, err
1321         }
1322         buf, err := io.ReadAll(f)
1323         f.Close()
1324         if err != nil {
1325                 return nil, err
1326         }
1327         lineNum := 0
1328         for _, csv := range bytes.Split(buf, []byte{'\n'}) {
1329                 lineNum++
1330                 if len(csv) == 0 {
1331                         continue
1332                 }
1333                 split := strings.Split(string(csv), ",")
1334                 if len(split) < 4 {
1335                         return nil, fmt.Errorf("%d fields < 4 in %s line %d: %q", len(split), samplesFilename, lineNum, csv)
1336                 }
1337                 if split[0] == "Index" && split[1] == "SampleID" && split[2] == "CaseControl" && split[3] == "TrainingValidation" {
1338                         continue
1339                 }
1340                 idx, err := strconv.Atoi(split[0])
1341                 if err != nil {
1342                         if lineNum == 1 {
1343                                 return nil, fmt.Errorf("header does not look right: %q", csv)
1344                         }
1345                         return nil, fmt.Errorf("%s line %d: index: %s", samplesFilename, lineNum, err)
1346                 }
1347                 if idx != len(si) {
1348                         return nil, fmt.Errorf("%s line %d: index %d out of order", samplesFilename, lineNum, idx)
1349                 }
1350                 var pcaComponents []float64
1351                 if len(split) > 4 {
1352                         for _, s := range split[4:] {
1353                                 f, err := strconv.ParseFloat(s, 64)
1354                                 if err != nil {
1355                                         return nil, fmt.Errorf("%s line %d: cannot parse float %q: %s", samplesFilename, lineNum, s, err)
1356                                 }
1357                                 pcaComponents = append(pcaComponents, f)
1358                         }
1359                 }
1360                 si = append(si, sampleInfo{
1361                         id:            split[1],
1362                         isCase:        split[2] == "1",
1363                         isControl:     split[2] == "0",
1364                         isTraining:    split[3] == "1",
1365                         isValidation:  split[3] == "0" && len(split[2]) > 0, // fix errant 0s in input
1366                         pcaComponents: pcaComponents,
1367                 })
1368         }
1369         return si, nil
1370 }
1371
1372 func writeSampleInfo(samples []sampleInfo, outputDir string) error {
1373         fnm := outputDir + "/samples.csv"
1374         log.Infof("writing sample metadata to %s", fnm)
1375         f, err := os.Create(fnm)
1376         if err != nil {
1377                 return err
1378         }
1379         defer f.Close()
1380         pcaLabels := ""
1381         if len(samples) > 0 {
1382                 for i := range samples[0].pcaComponents {
1383                         pcaLabels += fmt.Sprintf(",PCA%d", i)
1384                 }
1385         }
1386         _, err = fmt.Fprintf(f, "Index,SampleID,CaseControl,TrainingValidation%s\n", pcaLabels)
1387         if err != nil {
1388                 return err
1389         }
1390         for i, si := range samples {
1391                 var cc, tv string
1392                 if si.isCase {
1393                         cc = "1"
1394                 } else if si.isControl {
1395                         cc = "0"
1396                 }
1397                 if si.isTraining {
1398                         tv = "1"
1399                 } else if si.isValidation {
1400                         tv = "0"
1401                 }
1402                 var pcavals string
1403                 for _, pcaval := range si.pcaComponents {
1404                         pcavals += fmt.Sprintf(",%f", pcaval)
1405                 }
1406                 _, err = fmt.Fprintf(f, "%d,%s,%s,%s%s\n", i, si.id, cc, tv, pcavals)
1407                 if err != nil {
1408                         return fmt.Errorf("write %s: %w", fnm, err)
1409                 }
1410         }
1411         err = f.Close()
1412         if err != nil {
1413                 return fmt.Errorf("close %s: %w", fnm, err)
1414         }
1415         log.Print("done")
1416         return nil
1417 }
1418
1419 func (cmd *sliceNumpy) filterHGVScolpair(colpair [2][]int8) bool {
1420         if cmd.chi2PValue >= 1 {
1421                 return true
1422         }
1423         col0 := make([]bool, 0, len(cmd.chi2Cases))
1424         col1 := make([]bool, 0, len(cmd.chi2Cases))
1425         cases := make([]bool, 0, len(cmd.chi2Cases))
1426         for i, c := range cmd.chi2Cases {
1427                 if colpair[0][i] < 0 {
1428                         continue
1429                 }
1430                 col0 = append(col0, colpair[0][i] != 0)
1431                 col1 = append(col1, colpair[1][i] != 0)
1432                 cases = append(cases, c)
1433         }
1434         return len(cases) >= cmd.minCoverage &&
1435                 (pvalue(col0, cases) <= cmd.chi2PValue || pvalue(col1, cases) <= cmd.chi2PValue)
1436 }
1437
1438 func writeNumpyUint32(fnm string, out []uint32, rows, cols int) error {
1439         output, err := os.Create(fnm)
1440         if err != nil {
1441                 return err
1442         }
1443         defer output.Close()
1444         bufw := bufio.NewWriterSize(output, 1<<26)
1445         npw, err := gonpy.NewWriter(nopCloser{bufw})
1446         if err != nil {
1447                 return err
1448         }
1449         log.WithFields(log.Fields{
1450                 "filename": fnm,
1451                 "rows":     rows,
1452                 "cols":     cols,
1453                 "bytes":    rows * cols * 4,
1454         }).Infof("writing numpy: %s", fnm)
1455         npw.Shape = []int{rows, cols}
1456         npw.WriteUint32(out)
1457         err = bufw.Flush()
1458         if err != nil {
1459                 return err
1460         }
1461         return output.Close()
1462 }
1463
1464 func writeNumpyInt32(fnm string, out []int32, rows, cols int) error {
1465         output, err := os.Create(fnm)
1466         if err != nil {
1467                 return err
1468         }
1469         defer output.Close()
1470         bufw := bufio.NewWriterSize(output, 1<<26)
1471         npw, err := gonpy.NewWriter(nopCloser{bufw})
1472         if err != nil {
1473                 return err
1474         }
1475         log.WithFields(log.Fields{
1476                 "filename": fnm,
1477                 "rows":     rows,
1478                 "cols":     cols,
1479                 "bytes":    rows * cols * 4,
1480         }).Infof("writing numpy: %s", fnm)
1481         npw.Shape = []int{rows, cols}
1482         npw.WriteInt32(out)
1483         err = bufw.Flush()
1484         if err != nil {
1485                 return err
1486         }
1487         return output.Close()
1488 }
1489
1490 func writeNumpyInt16(fnm string, out []int16, rows, cols int) error {
1491         output, err := os.Create(fnm)
1492         if err != nil {
1493                 return err
1494         }
1495         defer output.Close()
1496         bufw := bufio.NewWriterSize(output, 1<<26)
1497         npw, err := gonpy.NewWriter(nopCloser{bufw})
1498         if err != nil {
1499                 return err
1500         }
1501         log.WithFields(log.Fields{
1502                 "filename": fnm,
1503                 "rows":     rows,
1504                 "cols":     cols,
1505                 "bytes":    rows * cols * 2,
1506         }).Infof("writing numpy: %s", fnm)
1507         npw.Shape = []int{rows, cols}
1508         npw.WriteInt16(out)
1509         err = bufw.Flush()
1510         if err != nil {
1511                 return err
1512         }
1513         return output.Close()
1514 }
1515
1516 func writeNumpyInt8(fnm string, out []int8, rows, cols int) error {
1517         output, err := os.Create(fnm)
1518         if err != nil {
1519                 return err
1520         }
1521         defer output.Close()
1522         bufw := bufio.NewWriterSize(output, 1<<26)
1523         npw, err := gonpy.NewWriter(nopCloser{bufw})
1524         if err != nil {
1525                 return err
1526         }
1527         log.WithFields(log.Fields{
1528                 "filename": fnm,
1529                 "rows":     rows,
1530                 "cols":     cols,
1531                 "bytes":    rows * cols,
1532         }).Infof("writing numpy: %s", fnm)
1533         npw.Shape = []int{rows, cols}
1534         npw.WriteInt8(out)
1535         err = bufw.Flush()
1536         if err != nil {
1537                 return err
1538         }
1539         return output.Close()
1540 }
1541
1542 func allele2homhet(colpair [2][]int8) {
1543         a, b := colpair[0], colpair[1]
1544         for i, av := range a {
1545                 bv := b[i]
1546                 if av < 0 || bv < 0 {
1547                         // no-call
1548                         a[i], b[i] = -1, -1
1549                 } else if av > 0 && bv > 0 {
1550                         // hom
1551                         a[i], b[i] = 1, 0
1552                 } else if av > 0 || bv > 0 {
1553                         // het
1554                         a[i], b[i] = 0, 1
1555                 } else {
1556                         // ref (or a different variant in same position)
1557                         // (this is a no-op) a[i], b[i] = 0, 0
1558                 }
1559         }
1560 }
1561
1562 type onehotXref struct {
1563         tag     tagID
1564         variant tileVariantID
1565         hom     bool
1566         pvalue  float64
1567         maf     float64
1568 }
1569
1570 const onehotXrefSize = unsafe.Sizeof(onehotXref{})
1571
1572 // Build onehot matrix (m[tileVariantIndex][genome] == 0 or 1) for all
1573 // variants of a single tile/tag#.
1574 //
1575 // Return nil if no tile variant passes Χ² filter.
1576 func (cmd *sliceNumpy) tv2homhet(cgs map[string]CompactGenome, maxv tileVariantID, remap []tileVariantID, tag, chunkstarttag tagID, seq map[tagID][]TileVariant) ([][]int8, []onehotXref) {
1577         if tag == cmd.debugTag {
1578                 tv := make([]tileVariantID, len(cmd.cgnames)*2)
1579                 for i, name := range cmd.cgnames {
1580                         copy(tv[i*2:(i+1)*2], cgs[name].Variants[(tag-chunkstarttag)*2:])
1581                 }
1582                 log.WithFields(logrus.Fields{
1583                         "cgs[i].Variants[tag*2+j]": tv,
1584                         "maxv":                     maxv,
1585                         "remap":                    remap,
1586                         "tag":                      tag,
1587                         "chunkstarttag":            chunkstarttag,
1588                 }).Info("tv2homhet()")
1589         }
1590         if maxv < 1 || (maxv < 2 && !cmd.includeVariant1) {
1591                 // everyone has the most common variant (of the variants we don't drop)
1592                 return nil, nil
1593         }
1594         tagoffset := tag - chunkstarttag
1595         coverage := 0
1596         for _, cg := range cgs {
1597                 alleles := 0
1598                 for _, v := range cg.Variants[tagoffset*2 : tagoffset*2+2] {
1599                         if v > 0 && int(v) < len(seq[tag]) && len(seq[tag][v].Sequence) > 0 {
1600                                 alleles++
1601                         }
1602                 }
1603                 if alleles == 2 {
1604                         coverage++
1605                 }
1606         }
1607         if coverage < cmd.minCoverage {
1608                 return nil, nil
1609         }
1610         // "observed" array for p-value calculation (training set
1611         // only)
1612         obs := make([][]bool, (maxv+1)*2) // 2 slices (hom + het) for each variant#
1613         // one-hot output (all samples)
1614         outcols := make([][]int8, (maxv+1)*2)
1615         for i := range obs {
1616                 obs[i] = make([]bool, cmd.trainingSetSize)
1617                 outcols[i] = make([]int8, len(cmd.cgnames))
1618         }
1619         for cgid, name := range cmd.cgnames {
1620                 tsid := cmd.trainingSet[cgid]
1621                 cgvars := cgs[name].Variants[tagoffset*2:]
1622                 tv0, tv1 := remap[cgvars[0]], remap[cgvars[1]]
1623                 for v := tileVariantID(1); v <= maxv; v++ {
1624                         if tv0 == v && tv1 == v {
1625                                 if tsid >= 0 {
1626                                         obs[v*2][tsid] = true
1627                                 }
1628                                 outcols[v*2][cgid] = 1
1629                         } else if tv0 == v || tv1 == v {
1630                                 if tsid >= 0 {
1631                                         obs[v*2+1][tsid] = true
1632                                 }
1633                                 outcols[v*2+1][cgid] = 1
1634                         }
1635                 }
1636         }
1637         var onehot [][]int8
1638         var xref []onehotXref
1639         var maf float64
1640         for col := 2; col < len(obs); col++ {
1641                 // col 0,1 correspond to tile variant 0, i.e.,
1642                 // no-call; col 2,3 correspond to the most common
1643                 // variant; so we (normally) start at col 4.
1644                 if col < 4 && !cmd.includeVariant1 {
1645                         continue
1646                 }
1647                 if col&1 == 0 {
1648                         maf = homhet2maf(obs[col : col+2])
1649                         if cmd.pvalueMinFrequency < 1 && maf < cmd.pvalueMinFrequency {
1650                                 // Skip both columns (hom and het) if
1651                                 // allele frequency is below threshold
1652                                 col++
1653                                 continue
1654                         }
1655                 }
1656                 atomic.AddInt64(&cmd.pvalueCallCount, 1)
1657                 p := cmd.pvalue(obs[col])
1658                 if cmd.chi2PValue < 1 && !(p < cmd.chi2PValue) {
1659                         continue
1660                 }
1661                 onehot = append(onehot, outcols[col])
1662                 xref = append(xref, onehotXref{
1663                         tag:     tag,
1664                         variant: tileVariantID(col >> 1),
1665                         hom:     col&1 == 0,
1666                         pvalue:  p,
1667                         maf:     maf,
1668                 })
1669         }
1670         return onehot, xref
1671 }
1672
1673 func homhet2maf(onehot [][]bool) float64 {
1674         if len(onehot[0]) == 0 {
1675                 return 0
1676         }
1677         n := 0
1678         for i := range onehot[0] {
1679                 if onehot[0][i] {
1680                         // hom
1681                         n += 2
1682                 } else if onehot[1][i] {
1683                         // het
1684                         n += 1
1685                 }
1686         }
1687         return float64(n) / float64(len(onehot[0])*2)
1688 }
1689
1690 // convert a []onehotXref with length N to a numpy-style []int32
1691 // matrix with N columns, one row per field of onehotXref struct.
1692 //
1693 // Hom/het row contains hom=0, het=1.
1694 //
1695 // P-value row contains 1000000x actual p-value.
1696 func onehotXref2int32(xrefs []onehotXref) []int32 {
1697         xcols := len(xrefs)
1698         xdata := make([]int32, 6*xcols)
1699         for i, xref := range xrefs {
1700                 xdata[i] = int32(xref.tag)
1701                 xdata[xcols+i] = int32(xref.variant)
1702                 if xref.hom {
1703                         xdata[xcols*2+i] = 1
1704                 }
1705                 xdata[xcols*3+i] = int32(xref.pvalue * 1000000)
1706                 xdata[xcols*4+i] = int32(-math.Log10(xref.pvalue) * 1000000)
1707                 xdata[xcols*5+i] = int32(xref.maf * 1000000)
1708         }
1709         return xdata
1710 }
1711
1712 // transpose onehot data from in[col][row] to numpy-style
1713 // out[row*cols+col].
1714 func onehotcols2int8(in [][]int8) []int8 {
1715         if len(in) == 0 {
1716                 return nil
1717         }
1718         cols := len(in)
1719         rows := len(in[0])
1720         out := make([]int8, rows*cols)
1721         for row := 0; row < rows; row++ {
1722                 outrow := out[row*cols:]
1723                 for col, incol := range in {
1724                         outrow[col] = incol[row]
1725                 }
1726         }
1727         return out
1728 }
1729
1730 // Return [2][]uint32{rowIndices, colIndices} indicating which
1731 // elements of matrixT[c][r] have non-zero values.
1732 func onehotChunk2Indirect(matrixT [][]int8) [2][]uint32 {
1733         var nz [2][]uint32
1734         for c, col := range matrixT {
1735                 for r, val := range col {
1736                         if val != 0 {
1737                                 nz[0] = append(nz[0], uint32(r))
1738                                 nz[1] = append(nz[1], uint32(c))
1739                         }
1740                 }
1741         }
1742         return nz
1743 }