19566: glm one column at a time.
[lightning.git] / slicenumpy.go
index 2e63bc19519c33e96df4b30218968bc19c712f31..25fc466c116c4b9cc0a9508378ff33db91247c0b 100644 (file)
@@ -82,12 +82,13 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
        onehotSingle := flags.Bool("single-onehot", false, "generate one-hot tile-based matrix")
        onehotChunked := flags.Bool("chunked-onehot", false, "generate one-hot tile-based matrix per input chunk")
        samplesFilename := flags.String("samples", "", "`samples.csv` file with training/validation and case/control groups (see 'lightning choose-samples')")
-       onlyPCA := flags.Bool("pca", false, "generate pca matrix")
+       caseControlOnly := flags.Bool("case-control-only", false, "drop samples that are not in case/control groups")
+       onlyPCA := flags.Bool("pca", false, "run principal component analysis, write components to pca.npy and samples.csv")
        pcaComponents := flags.Int("pca-components", 4, "number of PCA components")
        maxPCATiles := flags.Int("max-pca-tiles", 0, "maximum tiles to use as PCA input (filter, then drop every 2nd colum pair until below max)")
        debugTag := flags.Int("debug-tag", -1, "log debugging details about specified tag")
        flags.IntVar(&cmd.threads, "threads", 16, "number of memory-hungry assembly threads, and number of VCPUs to request for arvados container")
-       flags.Float64Var(&cmd.chi2PValue, "chi2-p-value", 1, "do Χ² test and omit columns with p-value above this threshold")
+       flags.Float64Var(&cmd.chi2PValue, "chi2-p-value", 1, "do Χ² test (or logistic regression if -samples file has PCA components) and omit columns with p-value above this threshold")
        flags.BoolVar(&cmd.includeVariant1, "include-variant-1", false, "include most common variant when building one-hot matrix")
        cmd.filter.Flags(flags)
        err := flags.Parse(args)
@@ -95,6 +96,8 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                return nil
        } else if err != nil {
                return err
+       } else if flags.NArg() > 0 {
+               return fmt.Errorf("errant command line arguments after parsed flags: %v", flags.Args())
        }
 
        if *pprof != "" {
@@ -137,6 +140,7 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                        "-single-onehot=" + fmt.Sprintf("%v", *onehotSingle),
                        "-chunked-onehot=" + fmt.Sprintf("%v", *onehotChunked),
                        "-samples=" + *samplesFilename,
+                       "-case-control-only=" + fmt.Sprintf("%v", *caseControlOnly),
                        "-pca=" + fmt.Sprintf("%v", *onlyPCA),
                        "-pca-components=" + fmt.Sprintf("%d", *pcaComponents),
                        "-max-pca-tiles=" + fmt.Sprintf("%d", *maxPCATiles),
@@ -182,6 +186,8 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                if err != nil {
                        return err
                }
+       } else if *caseControlOnly {
+               return fmt.Errorf("-case-control-only does not make sense without -samples")
        }
 
        cmd.cgnames = nil
@@ -244,14 +250,31 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
        } else if len(cmd.cgnames) != len(cmd.samples) {
                return fmt.Errorf("mismatched sample list: %d samples in library, %d in %s", len(cmd.cgnames), len(cmd.samples), *samplesFilename)
        } else {
-               cmd.trainingSetSize = 0
                for i, name := range cmd.cgnames {
                        if s := trimFilenameForLabel(name); s != cmd.samples[i].id {
                                return fmt.Errorf("mismatched sample list: sample %d is %q in library, %q in %s", i, s, cmd.samples[i].id, *samplesFilename)
                        }
+               }
+               if *caseControlOnly {
+                       for i := 0; i < len(cmd.samples); i++ {
+                               if !cmd.samples[i].isTraining && !cmd.samples[i].isValidation {
+                                       if i+1 < len(cmd.samples) {
+                                               copy(cmd.samples[i:], cmd.samples[i+1:])
+                                               copy(cmd.cgnames[i:], cmd.cgnames[i+1:])
+                                       }
+                                       cmd.samples = cmd.samples[:len(cmd.samples)-1]
+                                       cmd.cgnames = cmd.cgnames[:len(cmd.cgnames)-1]
+                                       i--
+                               }
+                       }
+               }
+               cmd.chi2Cases = nil
+               cmd.trainingSetSize = 0
+               for i := range cmd.cgnames {
                        if cmd.samples[i].isTraining {
                                cmd.trainingSet[i] = cmd.trainingSetSize
                                cmd.trainingSetSize++
+                               cmd.chi2Cases = append(cmd.chi2Cases, cmd.samples[i].isCase)
                        } else {
                                cmd.trainingSet[i] = -1
                        }
@@ -267,6 +290,41 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                cmd.minCoverage = int(math.Ceil(cmd.filter.MinCoverage * float64(len(cmd.cgnames))))
        }
 
+       {
+               samplesOutFilename := *outputDir + "/samples.csv"
+               log.Infof("writing sample metadata to %s", samplesOutFilename)
+               var f *os.File
+               f, err = os.Create(samplesOutFilename)
+               if err != nil {
+                       return err
+               }
+               defer f.Close()
+               for i, si := range cmd.samples {
+                       var cc, tv string
+                       if si.isCase {
+                               cc = "1"
+                       } else if si.isControl {
+                               cc = "0"
+                       }
+                       if si.isTraining {
+                               tv = "1"
+                       } else {
+                               tv = "0"
+                       }
+                       _, err = fmt.Fprintf(f, "%d,%s,%s,%s\n", i, si.id, cc, tv)
+                       if err != nil {
+                               err = fmt.Errorf("write %s: %w", samplesOutFilename, err)
+                               return err
+                       }
+               }
+               err = f.Close()
+               if err != nil {
+                       err = fmt.Errorf("close %s: %w", samplesOutFilename, err)
+                       return err
+               }
+               log.Print("done")
+       }
+
        log.Info("indexing reference tiles")
        type reftileinfo struct {
                variant  tileVariantID
@@ -604,6 +662,12 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                                        break
                                }
                                remap := variantRemap[tag-tagstart]
+                               if remap == nil {
+                                       // was not assigned above,
+                                       // because minCoverage
+                                       outcol++
+                                       continue
+                               }
                                maxv := tileVariantID(0)
                                for _, v := range remap {
                                        if maxv < v {
@@ -1126,6 +1190,10 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                                cols = (cols + 1) / 2
                                stride = stride * 2
                        }
+                       if cols%2 == 1 {
+                               // we work with pairs of columns
+                               cols++
+                       }
                        log.Printf("creating full matrix (%d rows) and training matrix (%d rows) with %d cols, stride %d", len(cmd.cgnames), cmd.trainingSetSize, cols, stride)
                        mtxFull := mat.NewDense(len(cmd.cgnames), cols, nil)
                        mtxTrain := mat.NewDense(cmd.trainingSetSize, cols, nil)
@@ -1265,9 +1333,12 @@ func (cmd *sliceNumpy) loadSampleInfo(samplesFilename string) ([]sampleInfo, err
        lineNum := 0
        for _, csv := range bytes.Split(buf, []byte{'\n'}) {
                lineNum++
+               if len(csv) == 0 {
+                       continue
+               }
                split := strings.Split(string(csv), ",")
-               if len(split) != 4 {
-                       return nil, fmt.Errorf("%d fields != 4 in %s line %d: %q", len(split), samplesFilename, lineNum, csv)
+               if len(split) < 4 {
+                       return nil, fmt.Errorf("%d fields < 4 in %s line %d: %q", len(split), samplesFilename, lineNum, csv)
                }
                if split[0] == "Index" && split[1] == "SampleID" && split[2] == "CaseControl" && split[3] == "TrainingValidation" {
                        continue
@@ -1282,12 +1353,23 @@ func (cmd *sliceNumpy) loadSampleInfo(samplesFilename string) ([]sampleInfo, err
                if idx != len(si) {
                        return nil, fmt.Errorf("%s line %d: index %d out of order", samplesFilename, lineNum, idx)
                }
+               var pcaComponents []float64
+               if len(split) > 4 {
+                       for _, s := range split[4:] {
+                               f, err := strconv.ParseFloat(s, 64)
+                               if err != nil {
+                                       return nil, fmt.Errorf("%s line %d: cannot parse float %q: %s", samplesFilename, lineNum, s, err)
+                               }
+                               pcaComponents = append(pcaComponents, f)
+                       }
+               }
                si = append(si, sampleInfo{
-                       id:           split[1],
-                       isCase:       split[2] == "1",
-                       isControl:    split[2] == "0",
-                       isTraining:   split[3] == "1",
-                       isValidation: split[3] == "0",
+                       id:            split[1],
+                       isCase:        split[2] == "1",
+                       isControl:     split[2] == "0",
+                       isTraining:    split[3] == "1",
+                       isValidation:  split[3] == "0",
+                       pcaComponents: pcaComponents,
                })
        }
        return si, nil
@@ -1483,22 +1565,30 @@ func (cmd *sliceNumpy) tv2homhet(cgs map[string]CompactGenome, maxv tileVariantI
        if coverage < cmd.minCoverage {
                return nil, nil
        }
+       // "observed" array for p-value calculation (training set
+       // only)
        obs := make([][]bool, (maxv+1)*2) // 2 slices (hom + het) for each variant#
+       // one-hot output (all samples)
+       outcols := make([][]int8, (maxv+1)*2)
        for i := range obs {
                obs[i] = make([]bool, cmd.trainingSetSize)
+               outcols[i] = make([]int8, len(cmd.cgnames))
        }
        for cgid, name := range cmd.cgnames {
                tsid := cmd.trainingSet[cgid]
-               if tsid < 0 {
-                       continue
-               }
                cgvars := cgs[name].Variants[tagoffset*2:]
                tv0, tv1 := remap[cgvars[0]], remap[cgvars[1]]
                for v := tileVariantID(1); v <= maxv; v++ {
                        if tv0 == v && tv1 == v {
-                               obs[v*2][tsid] = true
+                               if tsid >= 0 {
+                                       obs[v*2][tsid] = true
+                               }
+                               outcols[v*2][cgid] = 1
                        } else if tv0 == v || tv1 == v {
-                               obs[v*2+1][tsid] = true
+                               if tsid >= 0 {
+                                       obs[v*2+1][tsid] = true
+                               }
+                               outcols[v*2+1][cgid] = 1
                        }
                }
        }
@@ -1511,11 +1601,16 @@ func (cmd *sliceNumpy) tv2homhet(cgs map[string]CompactGenome, maxv tileVariantI
                if col < 4 && !cmd.includeVariant1 {
                        continue
                }
-               p := pvalue(obs[col], cmd.chi2Cases)
+               var p float64
+               if len(cmd.samples[0].pcaComponents) > 0 {
+                       p = pvalueGLM(cmd.samples, obs[col])
+               } else {
+                       p = pvalue(obs[col], cmd.chi2Cases)
+               }
                if cmd.chi2PValue < 1 && !(p < cmd.chi2PValue) {
                        continue
                }
-               onehot = append(onehot, bool2int8(obs[col]))
+               onehot = append(onehot, outcols[col])
                xref = append(xref, onehotXref{
                        tag:     tag,
                        variant: tileVariantID(col >> 1),
@@ -1526,16 +1621,6 @@ func (cmd *sliceNumpy) tv2homhet(cgs map[string]CompactGenome, maxv tileVariantI
        return onehot, xref
 }
 
-func bool2int8(in []bool) []int8 {
-       out := make([]int8, len(in))
-       for i, v := range in {
-               if v {
-                       out[i] = 1
-               }
-       }
-       return out
-}
-
 // convert a []onehotXref with length N to a numpy-style []int32
 // matrix with N columns, one row per field of onehotXref struct.
 //