Fix some tests.
[lightning.git] / slicenumpy.go
index b078bc124d9148055d7c77a3b49394981b197fc9..34cd777458ab93d60505b5dfa19b3aaa0280dd32 100644 (file)
@@ -8,6 +8,7 @@ import (
        "bufio"
        "bytes"
        "encoding/gob"
+       "encoding/json"
        "errors"
        "flag"
        "fmt"
@@ -39,18 +40,24 @@ import (
 const annotationMaxTileSpan = 100
 
 type sliceNumpy struct {
-       filter                filter
-       threads               int
-       chi2CaseControlColumn string
-       chi2CaseControlFile   string
-       chi2Cases             []bool
-       chi2PValue            float64
-       trainingSet           []int // see loadTrainingSet
-       trainingSetSize       int
-       minCoverage           int
-       cgnames               []string
-       includeVariant1       bool
-       debugTag              tagID
+       filter             filter
+       threads            int
+       chi2Cases          []bool
+       chi2PValue         float64
+       pvalueMinFrequency float64
+       maxFrequency       float64
+       pcaComponents      int
+       minCoverage        int
+       minCoverageAll     bool
+       includeVariant1    bool
+       debugTag           tagID
+
+       cgnames         []string
+       samples         []sampleInfo
+       trainingSet     []int // samples index => training set index, or -1 if not in training set
+       trainingSetSize int
+       pvalue          func(onehot []bool) float64
+       pvalueCallCount int64
 }
 
 func (cmd *sliceNumpy) RunCommand(prog string, args []string, stdin io.Reader, stdout, stderr io.Writer) int {
@@ -61,6 +68,7 @@ func (cmd *sliceNumpy) RunCommand(prog string, args []string, stdin io.Reader, s
        }
        return 0
 }
+
 func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout, stderr io.Writer) error {
        flags := flag.NewFlagSet("", flag.ContinueOnError)
        flags.SetOutput(stderr)
@@ -70,6 +78,7 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
        arvadosVCPUs := flags.Int("arvados-vcpus", 96, "number of VCPUs to request for arvados container")
        projectUUID := flags.String("project", "", "project `UUID` for output data")
        priority := flags.Int("priority", 500, "container request priority")
+       preemptible := flags.Bool("preemptible", true, "request preemptible instance")
        inputDir := flags.String("input-dir", "./in", "input `directory`")
        outputDir := flags.String("output-dir", "./out", "output `directory`")
        ref := flags.String("ref", "", "reference name (if blank, choose last one that appears in input)")
@@ -80,15 +89,17 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
        hgvsChunked := flags.Bool("chunked-hgvs-matrix", false, "also generate hgvs-based matrix per chromosome")
        onehotSingle := flags.Bool("single-onehot", false, "generate one-hot tile-based matrix")
        onehotChunked := flags.Bool("chunked-onehot", false, "generate one-hot tile-based matrix per input chunk")
-       trainingSetFilename := flags.String("training-set", "", "`tsv` file with sample IDs to be used for PCA fitting and Χ² test (if not provided, use all samples)")
-       onlyPCA := flags.Bool("pca", false, "generate pca matrix")
-       pcaComponents := flags.Int("pca-components", 4, "number of PCA components")
+       samplesFilename := flags.String("samples", "", "`samples.csv` file with training/validation and case/control groups (see 'lightning choose-samples')")
+       caseControlOnly := flags.Bool("case-control-only", false, "drop samples that are not in case/control groups")
+       onlyPCA := flags.Bool("pca", false, "run principal component analysis, write components to pca.npy and samples.csv")
+       flags.IntVar(&cmd.pcaComponents, "pca-components", 4, "number of PCA components to compute / use in logistic regression")
        maxPCATiles := flags.Int("max-pca-tiles", 0, "maximum tiles to use as PCA input (filter, then drop every 2nd colum pair until below max)")
        debugTag := flags.Int("debug-tag", -1, "log debugging details about specified tag")
+       flags.BoolVar(&cmd.minCoverageAll, "min-coverage-all", false, "apply -min-coverage filter based on all samples, not just training set")
        flags.IntVar(&cmd.threads, "threads", 16, "number of memory-hungry assembly threads, and number of VCPUs to request for arvados container")
-       flags.StringVar(&cmd.chi2CaseControlFile, "chi2-case-control-file", "", "tsv file or directory indicating cases and controls for Χ² test (if directory, all .tsv files will be read)")
-       flags.StringVar(&cmd.chi2CaseControlColumn, "chi2-case-control-column", "", "name of case/control column in case-control files for Χ² test (value must be 0 for control, 1 for case)")
-       flags.Float64Var(&cmd.chi2PValue, "chi2-p-value", 1, "do Χ² test and omit columns with p-value above this threshold")
+       flags.Float64Var(&cmd.chi2PValue, "chi2-p-value", 1, "do Χ² test (or logistic regression if -samples file has PCA components) and omit columns with p-value above this threshold")
+       flags.Float64Var(&cmd.pvalueMinFrequency, "pvalue-min-frequency", 0.01, "skip p-value calculation on tile variants below this frequency in the training set")
+       flags.Float64Var(&cmd.maxFrequency, "max-frequency", 1, "do not output variants above this frequency in the training set")
        flags.BoolVar(&cmd.includeVariant1, "include-variant-1", false, "include most common variant when building one-hot matrix")
        cmd.filter.Flags(flags)
        err := flags.Parse(args)
@@ -96,6 +107,8 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                return nil
        } else if err != nil {
                return err
+       } else if flags.NArg() > 0 {
+               return fmt.Errorf("errant command line arguments after parsed flags: %v", flags.Args())
        }
 
        if *pprof != "" {
@@ -104,8 +117,8 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                }()
        }
 
-       if cmd.chi2PValue != 1 && (cmd.chi2CaseControlFile == "" || cmd.chi2CaseControlColumn == "") {
-               return fmt.Errorf("cannot use provided -chi2-p-value=%f because -chi2-case-control-file= or -chi2-case-control-column= value is empty", cmd.chi2PValue)
+       if cmd.chi2PValue != 1 && *samplesFilename == "" {
+               return fmt.Errorf("cannot use provided -chi2-p-value=%f because -samples= value is empty", cmd.chi2PValue)
        }
 
        cmd.debugTag = tagID(*debugTag)
@@ -120,8 +133,9 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                        Priority:    *priority,
                        KeepCache:   2,
                        APIAccess:   true,
+                       Preemptible: *preemptible,
                }
-               err = runner.TranslatePaths(inputDir, regionsFilename, trainingSetFilename, &cmd.chi2CaseControlFile)
+               err = runner.TranslatePaths(inputDir, regionsFilename, samplesFilename)
                if err != nil {
                        return err
                }
@@ -137,13 +151,15 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                        "-chunked-hgvs-matrix=" + fmt.Sprintf("%v", *hgvsChunked),
                        "-single-onehot=" + fmt.Sprintf("%v", *onehotSingle),
                        "-chunked-onehot=" + fmt.Sprintf("%v", *onehotChunked),
-                       "-training-set=" + *trainingSetFilename,
+                       "-samples=" + *samplesFilename,
+                       "-case-control-only=" + fmt.Sprintf("%v", *caseControlOnly),
+                       "-min-coverage-all=" + fmt.Sprintf("%v", cmd.minCoverageAll),
                        "-pca=" + fmt.Sprintf("%v", *onlyPCA),
-                       "-pca-components=" + fmt.Sprintf("%d", *pcaComponents),
+                       "-pca-components=" + fmt.Sprintf("%d", cmd.pcaComponents),
                        "-max-pca-tiles=" + fmt.Sprintf("%d", *maxPCATiles),
-                       "-chi2-case-control-file=" + cmd.chi2CaseControlFile,
-                       "-chi2-case-control-column=" + cmd.chi2CaseControlColumn,
                        "-chi2-p-value=" + fmt.Sprintf("%f", cmd.chi2PValue),
+                       "-pvalue-min-frequency=" + fmt.Sprintf("%f", cmd.pvalueMinFrequency),
+                       "-max-frequency=" + fmt.Sprintf("%f", cmd.maxFrequency),
                        "-include-variant-1=" + fmt.Sprintf("%v", cmd.includeVariant1),
                        "-debug-tag=" + fmt.Sprintf("%d", cmd.debugTag),
                }
@@ -180,6 +196,15 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                return err
        }
 
+       if *samplesFilename != "" {
+               cmd.samples, err = loadSampleInfo(*samplesFilename)
+               if err != nil {
+                       return err
+               }
+       } else if *caseControlOnly {
+               return fmt.Errorf("-case-control-only does not make sense without -samples")
+       }
+
        cmd.cgnames = nil
        var tagset [][]byte
        err = DecodeLibrary(in0, strings.HasSuffix(infiles[0], ".gz"), func(ent *LibraryEntry) error {
@@ -222,61 +247,96 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                return err
        }
        taglen := taglib.TagLen()
-
-       if len(cmd.cgnames) == 0 {
-               err = fmt.Errorf("no genomes found matching regexp %q", cmd.filter.MatchGenome)
-               return err
-       }
        sort.Strings(cmd.cgnames)
-       err = cmd.useCaseControlFiles()
-       if err != nil {
-               return err
-       }
+
        if len(cmd.cgnames) == 0 {
-               err = fmt.Errorf("fatal: 0 cases, 0 controls, nothing to do")
-               return err
+               return fmt.Errorf("fatal: 0 matching samples in library, nothing to do")
        }
-       err = cmd.loadTrainingSet(*trainingSetFilename)
-       if err != nil {
-               return err
-       }
-       if cmd.filter.MinCoverage == 1 {
-               // In the generic formula below, floating point
-               // arithmetic can effectively push the coverage
-               // threshold above 1.0, which is impossible/useless.
-               // 1.0 needs to mean exactly 100% coverage.
-               cmd.minCoverage = len(cmd.cgnames)
-       } else {
-               cmd.minCoverage = int(math.Ceil(cmd.filter.MinCoverage * float64(len(cmd.cgnames))))
-       }
-
-       {
-               labelsFilename := *outputDir + "/samples.csv"
-               log.Infof("writing labels to %s", labelsFilename)
-               var f *os.File
-               f, err = os.Create(labelsFilename)
-               if err != nil {
-                       return err
+       cmd.trainingSet = make([]int, len(cmd.cgnames))
+       if *samplesFilename == "" {
+               cmd.trainingSetSize = len(cmd.cgnames)
+               for i, name := range cmd.cgnames {
+                       cmd.samples = append(cmd.samples, sampleInfo{
+                               id:         trimFilenameForLabel(name),
+                               isTraining: true,
+                       })
+                       cmd.trainingSet[i] = i
                }
-               defer f.Close()
+       } else if len(cmd.cgnames) != len(cmd.samples) {
+               return fmt.Errorf("mismatched sample list: %d samples in library, %d in %s", len(cmd.cgnames), len(cmd.samples), *samplesFilename)
+       } else {
                for i, name := range cmd.cgnames {
-                       cc := 0
-                       if cmd.chi2Cases != nil && cmd.chi2Cases[i] {
-                               cc = 1
+                       if s := trimFilenameForLabel(name); s != cmd.samples[i].id {
+                               return fmt.Errorf("mismatched sample list: sample %d is %q in library, %q in %s", i, s, cmd.samples[i].id, *samplesFilename)
                        }
-                       _, err = fmt.Fprintf(f, "%d,%q,%d\n", i, trimFilenameForLabel(name), cc)
-                       if err != nil {
-                               err = fmt.Errorf("write %s: %w", labelsFilename, err)
-                               return err
+               }
+               if *caseControlOnly {
+                       for i := 0; i < len(cmd.samples); i++ {
+                               if !cmd.samples[i].isTraining && !cmd.samples[i].isValidation {
+                                       if i+1 < len(cmd.samples) {
+                                               copy(cmd.samples[i:], cmd.samples[i+1:])
+                                               copy(cmd.cgnames[i:], cmd.cgnames[i+1:])
+                                       }
+                                       cmd.samples = cmd.samples[:len(cmd.samples)-1]
+                                       cmd.cgnames = cmd.cgnames[:len(cmd.cgnames)-1]
+                                       i--
+                               }
                        }
                }
-               err = f.Close()
+               cmd.chi2Cases = nil
+               cmd.trainingSetSize = 0
+               for i := range cmd.cgnames {
+                       if cmd.samples[i].isTraining {
+                               cmd.trainingSet[i] = cmd.trainingSetSize
+                               cmd.trainingSetSize++
+                               cmd.chi2Cases = append(cmd.chi2Cases, cmd.samples[i].isCase)
+                       } else {
+                               cmd.trainingSet[i] = -1
+                       }
+               }
+               if cmd.pvalue == nil {
+                       cmd.pvalue = func(onehot []bool) float64 {
+                               return pvalue(onehot, cmd.chi2Cases)
+                       }
+               }
+       }
+
+       if cmd.minCoverageAll {
+               cmd.minCoverage = len(cmd.cgnames)
+       } else {
+               cmd.minCoverage = cmd.trainingSetSize
+       }
+       if cmd.filter.MinCoverage < 1 {
+               cmd.minCoverage = int(math.Ceil(cmd.filter.MinCoverage * float64(cmd.minCoverage)))
+       }
+
+       if len(cmd.samples[0].pcaComponents) > 0 {
+               cmd.pvalue = glmPvalueFunc(cmd.samples, cmd.pcaComponents)
+               // Unfortunately, statsmodel/glm lib logs stuff to
+               // os.Stdout when it panics on an unsolvable
+               // problem. We recover() from the panic in glm.go, but
+               // we also need to commandeer os.Stdout to avoid
+               // producing large quantities of logs.
+               stdoutWas := os.Stdout
+               defer func() { os.Stdout = stdoutWas }()
+               os.Stdout, err = os.Open(os.DevNull)
                if err != nil {
-                       err = fmt.Errorf("close %s: %w", labelsFilename, err)
                        return err
                }
        }
 
+       // cgnamemap[name]==true for samples that we are including in
+       // output
+       cgnamemap := map[string]bool{}
+       for _, name := range cmd.cgnames {
+               cgnamemap[name] = true
+       }
+
+       err = writeSampleInfo(cmd.samples, *outputDir)
+       if err != nil {
+               return err
+       }
+
        log.Info("indexing reference tiles")
        type reftileinfo struct {
                variant  tileVariantID
@@ -301,7 +361,7 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                                return err
                        }
                        foundthistag := false
-                       taglib.FindAll(tiledata[:len(tiledata)-1], func(tagid tagID, offset, _ int) {
+                       taglib.FindAll(bufio.NewReader(bytes.NewReader(tiledata[:len(tiledata)-1])), nil, func(tagid tagID, offset, _ int) {
                                if !foundthistag && tagid == libref.Tag {
                                        foundthistag = true
                                        return
@@ -458,7 +518,7 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                                        if cmd.filter.MaxTag >= 0 && cg.StartTag > tagID(cmd.filter.MaxTag) {
                                                return errSkip
                                        }
-                                       if !matchGenome.MatchString(cg.Name) {
+                                       if !cgnamemap[cg.Name] {
                                                continue
                                        }
                                        // pad to full slice size
@@ -474,7 +534,7 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                        if err == errSkip {
                                return nil
                        } else if err != nil {
-                               return fmt.Errorf("%04d: DecodeLibrary(%s): err", infileIdx, infile)
+                               return fmt.Errorf("%04d: DecodeLibrary(%s): %w", infileIdx, infile, err)
                        }
                        tagstart := cgs[cmd.cgnames[0]].StartTag
                        tagend := cgs[cmd.cgnames[0]].EndTag
@@ -496,7 +556,11 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                                                count[blake2b.Sum256(rt.tiledata)] = 0
                                        }
 
-                                       for cgname, cg := range cgs {
+                                       for cgidx, cgname := range cmd.cgnames {
+                                               if !cmd.minCoverageAll && !cmd.samples[cgidx].isTraining {
+                                                       continue
+                                               }
+                                               cg := cgs[cgname]
                                                idx := int(tag-tagstart) * 2
                                                for allele := 0; allele < 2; allele++ {
                                                        v := cg.Variants[idx+allele]
@@ -614,6 +678,12 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                                        break
                                }
                                remap := variantRemap[tag-tagstart]
+                               if remap == nil {
+                                       // was not assigned above,
+                                       // because minCoverage
+                                       outcol++
+                                       continue
+                               }
                                maxv := tileVariantID(0)
                                for _, v := range remap {
                                        if maxv < v {
@@ -802,7 +872,7 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                                                if cmd.filter.MaxTag >= 0 && tag > tagID(cmd.filter.MaxTag) {
                                                        break
                                                }
-                                               if rt := reftile[tag]; rt == nil || rt.excluded {
+                                               if rt := reftile[tag]; mask != nil && (rt == nil || rt.excluded) {
                                                        continue
                                                }
                                                if v == 0 {
@@ -1119,6 +1189,17 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                        if err != nil {
                                return err
                        }
+                       fnm = fmt.Sprintf("%s/stats.json", *outputDir)
+                       j, err := json.Marshal(map[string]interface{}{
+                               "pvalueCallCount": cmd.pvalueCallCount,
+                       })
+                       if err != nil {
+                               return err
+                       }
+                       err = os.WriteFile(fnm, j, 0777)
+                       if err != nil {
+                               return err
+                       }
                }
                if *onlyPCA {
                        cols := 0
@@ -1136,6 +1217,10 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                                cols = (cols + 1) / 2
                                stride = stride * 2
                        }
+                       if cols%2 == 1 {
+                               // we work with pairs of columns
+                               cols++
+                       }
                        log.Printf("creating full matrix (%d rows) and training matrix (%d rows) with %d cols, stride %d", len(cmd.cgnames), cmd.trainingSetSize, cols, stride)
                        mtxFull := mat.NewDense(len(cmd.cgnames), cols, nil)
                        mtxTrain := mat.NewDense(cmd.trainingSetSize, cols, nil)
@@ -1149,7 +1234,7 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                                }
                        }
                        log.Print("fitting")
-                       transformer := nlp.NewPCA(*pcaComponents)
+                       transformer := nlp.NewPCA(cmd.pcaComponents)
                        transformer.Fit(mtxTrain.T())
                        log.Printf("transforming")
                        pca, err := transformer.Transform(mtxFull.T())
@@ -1185,6 +1270,20 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                                return err
                        }
                        log.Print("done")
+
+                       log.Print("copying pca components to sampleInfo")
+                       for i := range cmd.samples {
+                               cmd.samples[i].pcaComponents = make([]float64, outcols)
+                               for c := 0; c < outcols; c++ {
+                                       cmd.samples[i].pcaComponents[c] = pca.At(i, c)
+                               }
+                       }
+                       log.Print("done")
+
+                       err = writeSampleInfo(cmd.samples, *outputDir)
+                       if err != nil {
+                               return err
+                       }
                }
        }
        if !*mergeOutput && !*onehotChunked && !*onehotSingle && !*onlyPCA {
@@ -1209,157 +1308,121 @@ func (cmd *sliceNumpy) run(prog string, args []string, stdin io.Reader, stdout,
                        return err
                }
        }
+
        return nil
 }
 
-// Read training set file(s) from path (may be dir or file) and set up
-// cmd.trainingSet.
-//
-// cmd.trainingSet[i] == n >= 0 if cmd.cgnames[i] is the nth training
-// set sample.
-//
-// cmd.trainingSet[i] == -1 if cmd.cgnames[i] is not in the training
-// set.
-func (cmd *sliceNumpy) loadTrainingSet(path string) error {
-       cmd.trainingSet = make([]int, len(cmd.cgnames))
-       if path == "" {
-               cmd.trainingSetSize = len(cmd.cgnames)
-               for i := range cmd.trainingSet {
-                       cmd.trainingSet[i] = i
-               }
-               return nil
-       }
-       for i := range cmd.trainingSet {
-               cmd.trainingSet[i] = -1
+type sampleInfo struct {
+       id            string
+       isCase        bool
+       isControl     bool
+       isTraining    bool
+       isValidation  bool
+       pcaComponents []float64
+}
+
+// Read samples.csv file with case/control and training/validation
+// flags.
+func loadSampleInfo(samplesFilename string) ([]sampleInfo, error) {
+       var si []sampleInfo
+       f, err := open(samplesFilename)
+       if err != nil {
+               return nil, err
        }
-       infiles, err := allFiles(path, nil)
+       buf, err := io.ReadAll(f)
+       f.Close()
        if err != nil {
-               return err
+               return nil, err
        }
-       for _, infile := range infiles {
-               f, err := open(infile)
-               if err != nil {
-                       return err
+       lineNum := 0
+       for _, csv := range bytes.Split(buf, []byte{'\n'}) {
+               lineNum++
+               if len(csv) == 0 {
+                       continue
                }
-               buf, err := io.ReadAll(f)
-               f.Close()
-               if err != nil {
-                       return err
+               split := strings.Split(string(csv), ",")
+               if len(split) < 4 {
+                       return nil, fmt.Errorf("%d fields < 4 in %s line %d: %q", len(split), samplesFilename, lineNum, csv)
                }
-               for _, tsv := range bytes.Split(buf, []byte{'\n'}) {
-                       if len(tsv) == 0 {
-                               continue
+               if split[0] == "Index" && split[1] == "SampleID" && split[2] == "CaseControl" && split[3] == "TrainingValidation" {
+                       continue
+               }
+               idx, err := strconv.Atoi(split[0])
+               if err != nil {
+                       if lineNum == 1 {
+                               return nil, fmt.Errorf("header does not look right: %q", csv)
                        }
-                       split := strings.Split(string(tsv), "\t")
-                       pattern := split[0]
-                       found := -1
-                       for i, name := range cmd.cgnames {
-                               if strings.Contains(name, pattern) {
-                                       if found >= 0 {
-                                               log.Warnf("pattern %q in %s already matched sample ID %q -- not using %q", pattern, infile, cmd.cgnames[found], name)
-                                       } else {
-                                               found = i
-                                               cmd.trainingSet[found] = 1
-                                       }
+                       return nil, fmt.Errorf("%s line %d: index: %s", samplesFilename, lineNum, err)
+               }
+               if idx != len(si) {
+                       return nil, fmt.Errorf("%s line %d: index %d out of order", samplesFilename, lineNum, idx)
+               }
+               var pcaComponents []float64
+               if len(split) > 4 {
+                       for _, s := range split[4:] {
+                               f, err := strconv.ParseFloat(s, 64)
+                               if err != nil {
+                                       return nil, fmt.Errorf("%s line %d: cannot parse float %q: %s", samplesFilename, lineNum, s, err)
                                }
-                       }
-                       if found < 0 {
-                               log.Warnf("pattern %q in %s does not match any genome IDs", pattern, infile)
-                               continue
+                               pcaComponents = append(pcaComponents, f)
                        }
                }
+               si = append(si, sampleInfo{
+                       id:            split[1],
+                       isCase:        split[2] == "1",
+                       isControl:     split[2] == "0",
+                       isTraining:    split[3] == "1",
+                       isValidation:  split[3] == "0" && len(split[2]) > 0, // fix errant 0s in input
+                       pcaComponents: pcaComponents,
+               })
        }
-       tsi := 0
-       for i, x := range cmd.trainingSet {
-               if x == 1 {
-                       cmd.trainingSet[i] = tsi
-                       tsi++
-               }
-       }
-       cmd.trainingSetSize = tsi + 1
-       return nil
+       return si, nil
 }
 
-// Read case/control files, remove non-case/control entries from
-// cmd.cgnames, and build cmd.chi2Cases.
-func (cmd *sliceNumpy) useCaseControlFiles() error {
-       if cmd.chi2CaseControlFile == "" {
-               return nil
+func writeSampleInfo(samples []sampleInfo, outputDir string) error {
+       fnm := outputDir + "/samples.csv"
+       log.Infof("writing sample metadata to %s", fnm)
+       f, err := os.Create(fnm)
+       if err != nil {
+               return err
+       }
+       defer f.Close()
+       pcaLabels := ""
+       if len(samples) > 0 {
+               for i := range samples[0].pcaComponents {
+                       pcaLabels += fmt.Sprintf(",PCA%d", i)
+               }
        }
-       infiles, err := allFiles(cmd.chi2CaseControlFile, nil)
+       _, err = fmt.Fprintf(f, "Index,SampleID,CaseControl,TrainingValidation%s\n", pcaLabels)
        if err != nil {
                return err
        }
-       // index in cmd.cgnames => case(true) / control(false)
-       cc := map[int]bool{}
-       for _, infile := range infiles {
-               f, err := open(infile)
-               if err != nil {
-                       return err
+       for i, si := range samples {
+               var cc, tv string
+               if si.isCase {
+                       cc = "1"
+               } else if si.isControl {
+                       cc = "0"
                }
-               buf, err := io.ReadAll(f)
-               f.Close()
-               if err != nil {
-                       return err
+               if si.isTraining {
+                       tv = "1"
+               } else if si.isValidation {
+                       tv = "0"
                }
-               ccCol := -1
-               for _, tsv := range bytes.Split(buf, []byte{'\n'}) {
-                       if len(tsv) == 0 {
-                               continue
-                       }
-                       split := strings.Split(string(tsv), "\t")
-                       if ccCol < 0 {
-                               // header row
-                               for col, name := range split {
-                                       if name == cmd.chi2CaseControlColumn {
-                                               ccCol = col
-                                               break
-                                       }
-                               }
-                               if ccCol < 0 {
-                                       return fmt.Errorf("%s: no column named %q in header row %q", infile, cmd.chi2CaseControlColumn, tsv)
-                               }
-                               continue
-                       }
-                       if len(split) <= ccCol {
-                               continue
-                       }
-                       pattern := split[0]
-                       found := -1
-                       for i, name := range cmd.cgnames {
-                               if strings.Contains(name, pattern) {
-                                       if found >= 0 {
-                                               log.Warnf("pattern %q in %s matches multiple genome IDs (%q, %q)", pattern, infile, cmd.cgnames[found], name)
-                                       }
-                                       found = i
-                                       if split[ccCol] == "0" {
-                                               cc[found] = false
-                                       }
-                                       if split[ccCol] == "1" {
-                                               cc[found] = true
-                                       }
-                               }
-                       }
-                       if found < 0 {
-                               log.Warnf("pattern %q in %s does not match any genome IDs", pattern, infile)
-                               continue
-                       }
+               var pcavals string
+               for _, pcaval := range si.pcaComponents {
+                       pcavals += fmt.Sprintf(",%f", pcaval)
                }
-       }
-       allnames := cmd.cgnames
-       cmd.cgnames = nil
-       cmd.chi2Cases = nil
-       ncases := 0
-       for i, name := range allnames {
-               if cc, ok := cc[i]; ok {
-                       cmd.cgnames = append(cmd.cgnames, name)
-                       cmd.chi2Cases = append(cmd.chi2Cases, cc)
-                       if cc {
-                               ncases++
-                       }
+               _, err = fmt.Fprintf(f, "%d,%s,%s,%s%s\n", i, si.id, cc, tv, pcavals)
+               if err != nil {
+                       return fmt.Errorf("write %s: %w", fnm, err)
                }
        }
-       log.Printf("%d cases, %d controls, %d neither (dropped)", ncases, len(cmd.cgnames)-ncases, len(allnames)-len(cmd.cgnames))
+       err = f.Close()
+       if err != nil {
+               return fmt.Errorf("close %s: %w", fnm, err)
+       }
+       log.Print("done")
        return nil
 }
 
@@ -1511,6 +1574,7 @@ type onehotXref struct {
        variant tileVariantID
        hom     bool
        pvalue  float64
+       maf     float64
 }
 
 const onehotXrefSize = unsafe.Sizeof(onehotXref{})
@@ -1539,7 +1603,11 @@ func (cmd *sliceNumpy) tv2homhet(cgs map[string]CompactGenome, maxv tileVariantI
        }
        tagoffset := tag - chunkstarttag
        coverage := 0
-       for _, cg := range cgs {
+       for cgidx, cgname := range cmd.cgnames {
+               if !cmd.minCoverageAll && !cmd.samples[cgidx].isTraining {
+                       continue
+               }
+               cg := cgs[cgname]
                alleles := 0
                for _, v := range cg.Variants[tagoffset*2 : tagoffset*2+2] {
                        if v > 0 && int(v) < len(seq[tag]) && len(seq[tag][v].Sequence) > 0 {
@@ -1553,23 +1621,36 @@ func (cmd *sliceNumpy) tv2homhet(cgs map[string]CompactGenome, maxv tileVariantI
        if coverage < cmd.minCoverage {
                return nil, nil
        }
+       // "observed" array for p-value calculation (training set
+       // only)
        obs := make([][]bool, (maxv+1)*2) // 2 slices (hom + het) for each variant#
+       // one-hot output (all samples)
+       outcols := make([][]int8, (maxv+1)*2)
        for i := range obs {
-               obs[i] = make([]bool, len(cmd.cgnames))
+               obs[i] = make([]bool, cmd.trainingSetSize)
+               outcols[i] = make([]int8, len(cmd.cgnames))
        }
        for cgid, name := range cmd.cgnames {
+               tsid := cmd.trainingSet[cgid]
                cgvars := cgs[name].Variants[tagoffset*2:]
                tv0, tv1 := remap[cgvars[0]], remap[cgvars[1]]
                for v := tileVariantID(1); v <= maxv; v++ {
                        if tv0 == v && tv1 == v {
-                               obs[v*2][cgid] = true
+                               if tsid >= 0 {
+                                       obs[v*2][tsid] = true
+                               }
+                               outcols[v*2][cgid] = 1
                        } else if tv0 == v || tv1 == v {
-                               obs[v*2+1][cgid] = true
+                               if tsid >= 0 {
+                                       obs[v*2+1][tsid] = true
+                               }
+                               outcols[v*2+1][cgid] = 1
                        }
                }
        }
        var onehot [][]int8
        var xref []onehotXref
+       var maf float64
        for col := 2; col < len(obs); col++ {
                // col 0,1 correspond to tile variant 0, i.e.,
                // no-call; col 2,3 correspond to the most common
@@ -1577,29 +1658,53 @@ func (cmd *sliceNumpy) tv2homhet(cgs map[string]CompactGenome, maxv tileVariantI
                if col < 4 && !cmd.includeVariant1 {
                        continue
                }
-               p := pvalue(obs[col], cmd.chi2Cases)
+               if col&1 == 0 {
+                       maf = homhet2maf(obs[col : col+2])
+                       if maf < cmd.pvalueMinFrequency {
+                               // Skip both columns (hom and het) if
+                               // allele frequency is below threshold
+                               col++
+                               continue
+                       }
+                       if maf > cmd.maxFrequency {
+                               // Skip both columns if allele
+                               // frequency is above threshold
+                               col++
+                               continue
+                       }
+               }
+               atomic.AddInt64(&cmd.pvalueCallCount, 1)
+               p := cmd.pvalue(obs[col])
                if cmd.chi2PValue < 1 && !(p < cmd.chi2PValue) {
                        continue
                }
-               onehot = append(onehot, bool2int8(obs[col]))
+               onehot = append(onehot, outcols[col])
                xref = append(xref, onehotXref{
                        tag:     tag,
                        variant: tileVariantID(col >> 1),
                        hom:     col&1 == 0,
                        pvalue:  p,
+                       maf:     maf,
                })
        }
        return onehot, xref
 }
 
-func bool2int8(in []bool) []int8 {
-       out := make([]int8, len(in))
-       for i, v := range in {
-               if v {
-                       out[i] = 1
+func homhet2maf(onehot [][]bool) float64 {
+       if len(onehot[0]) == 0 {
+               return 0
+       }
+       n := 0
+       for i := range onehot[0] {
+               if onehot[0][i] {
+                       // hom
+                       n += 2
+               } else if onehot[1][i] {
+                       // het
+                       n += 1
                }
        }
-       return out
+       return float64(n) / float64(len(onehot[0])*2)
 }
 
 // convert a []onehotXref with length N to a numpy-style []int32
@@ -1610,7 +1715,7 @@ func bool2int8(in []bool) []int8 {
 // P-value row contains 1000000x actual p-value.
 func onehotXref2int32(xrefs []onehotXref) []int32 {
        xcols := len(xrefs)
-       xdata := make([]int32, 5*xcols)
+       xdata := make([]int32, 6*xcols)
        for i, xref := range xrefs {
                xdata[i] = int32(xref.tag)
                xdata[xcols+i] = int32(xref.variant)
@@ -1619,6 +1724,7 @@ func onehotXref2int32(xrefs []onehotXref) []int32 {
                }
                xdata[xcols*3+i] = int32(xref.pvalue * 1000000)
                xdata[xcols*4+i] = int32(-math.Log10(xref.pvalue) * 1000000)
+               xdata[xcols*5+i] = int32(xref.maf * 1000000)
        }
        return xdata
 }