+ if *onehotSingle || *onlyPCA {
+ nzCount := 0
+ for _, part := range onehotIndirect {
+ nzCount += len(part[0])
+ }
+ onehot := make([]uint32, nzCount*2) // [r,r,r,...,c,c,c,...]
+ var xrefs []onehotXref
+ chunkOffset := uint32(0)
+ outcol := 0
+ for i, part := range onehotIndirect {
+ for i := range part[1] {
+ part[1][i] += chunkOffset
+ }
+ copy(onehot[outcol:], part[0])
+ copy(onehot[outcol+nzCount:], part[1])
+ xrefs = append(xrefs, onehotXrefs[i]...)
+
+ outcol += len(part[0])
+ chunkOffset += onehotChunkSize[i]
+
+ part[0] = nil
+ part[1] = nil
+ onehotXrefs[i] = nil
+ debug.FreeOSMemory()
+ }
+ if *onehotSingle {
+ fnm := fmt.Sprintf("%s/onehot.npy", *outputDir)
+ err = writeNumpyUint32(fnm, onehot, 2, nzCount)
+ if err != nil {
+ return err
+ }
+ fnm = fmt.Sprintf("%s/onehot-columns.npy", *outputDir)
+ err = writeNumpyInt32(fnm, onehotXref2int32(xrefs), 5, len(xrefs))
+ if err != nil {
+ return err
+ }
+ fnm = fmt.Sprintf("%s/stats.json", *outputDir)
+ j, err := json.Marshal(map[string]interface{}{
+ "pvalueCallCount": cmd.pvalueCallCount,
+ })
+ if err != nil {
+ return err
+ }
+ err = os.WriteFile(fnm, j, 0777)
+ if err != nil {
+ return err
+ }
+ }
+ if *onlyPCA {
+ cols := 0
+ for _, c := range onehot[nzCount:] {
+ if int(c) >= cols {
+ cols = int(c) + 1
+ }
+ }
+ if cols == 0 {
+ return fmt.Errorf("cannot do PCA: one-hot matrix is empty")
+ }
+ log.Printf("have %d one-hot cols", cols)
+ stride := 1
+ for *maxPCATiles > 0 && cols > *maxPCATiles*2 {
+ cols = (cols + 1) / 2
+ stride = stride * 2
+ }
+ if cols%2 == 1 {
+ // we work with pairs of columns
+ cols++
+ }
+ log.Printf("creating full matrix (%d rows) and training matrix (%d rows) with %d cols, stride %d", len(cmd.cgnames), cmd.trainingSetSize, cols, stride)
+ mtxFull := mat.NewDense(len(cmd.cgnames), cols, nil)
+ mtxTrain := mat.NewDense(cmd.trainingSetSize, cols, nil)
+ for i, c := range onehot[nzCount:] {
+ if int(c/2)%stride == 0 {
+ outcol := int(c/2)/stride*2 + int(c)%2
+ mtxFull.Set(int(onehot[i]), outcol, 1)
+ if trainRow := cmd.trainingSet[int(onehot[i])]; trainRow >= 0 {
+ mtxTrain.Set(trainRow, outcol, 1)
+ }
+ }
+ }
+ log.Print("fitting")
+ transformer := nlp.NewPCA(cmd.pcaComponents)
+ transformer.Fit(mtxTrain.T())
+ log.Printf("transforming")
+ pca, err := transformer.Transform(mtxFull.T())
+ if err != nil {
+ return err
+ }
+ pca = pca.T()
+ outrows, outcols := pca.Dims()
+ log.Printf("copying result to numpy output array: %d rows, %d cols", outrows, outcols)
+ out := make([]float64, outrows*outcols)
+ for i := 0; i < outrows; i++ {
+ for j := 0; j < outcols; j++ {
+ out[i*outcols+j] = pca.At(i, j)
+ }
+ }
+ fnm := fmt.Sprintf("%s/pca.npy", *outputDir)
+ log.Printf("writing numpy: %s", fnm)
+ output, err := os.OpenFile(fnm, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0777)
+ if err != nil {
+ return err
+ }
+ npw, err := gonpy.NewWriter(nopCloser{output})
+ if err != nil {
+ return fmt.Errorf("gonpy.NewWriter: %w", err)
+ }
+ npw.Shape = []int{outrows, outcols}
+ err = npw.WriteFloat64(out)
+ if err != nil {
+ return fmt.Errorf("WriteFloat64: %w", err)
+ }
+ err = output.Close()
+ if err != nil {
+ return err
+ }
+ log.Print("done")
+
+ log.Print("copying pca components to sampleInfo")
+ for i := range cmd.samples {
+ cmd.samples[i].pcaComponents = make([]float64, outcols)
+ for c := 0; c < outcols; c++ {
+ cmd.samples[i].pcaComponents[c] = pca.At(i, c)
+ }
+ }
+ log.Print("done")
+
+ err = writeSampleInfo(cmd.samples, *outputDir)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ if !*mergeOutput && !*onehotChunked && !*onehotSingle && !*onlyPCA {
+ tagoffsetFilename := *outputDir + "/chunk-tag-offset.csv"
+ log.Infof("writing tag offsets to %s", tagoffsetFilename)
+ var f *os.File
+ f, err = os.Create(tagoffsetFilename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ for idx, offset := range chunkStartTag {
+ _, err = fmt.Fprintf(f, "%q,%d\n", fmt.Sprintf("matrix.%04d.npy", idx), offset)
+ if err != nil {
+ err = fmt.Errorf("write %s: %w", tagoffsetFilename, err)
+ return err
+ }
+ }
+ err = f.Close()
+ if err != nil {
+ err = fmt.Errorf("close %s: %w", tagoffsetFilename, err)
+ return err
+ }
+ }
+
+ return nil
+}
+
+type sampleInfo struct {
+ id string
+ isCase bool
+ isControl bool
+ isTraining bool
+ isValidation bool
+ pcaComponents []float64
+}
+
+// Read samples.csv file with case/control and training/validation
+// flags.
+func loadSampleInfo(samplesFilename string) ([]sampleInfo, error) {
+ var si []sampleInfo
+ f, err := open(samplesFilename)
+ if err != nil {
+ return nil, err
+ }
+ buf, err := io.ReadAll(f)
+ f.Close()
+ if err != nil {
+ return nil, err
+ }
+ lineNum := 0
+ for _, csv := range bytes.Split(buf, []byte{'\n'}) {
+ lineNum++
+ if len(csv) == 0 {
+ continue
+ }
+ split := strings.Split(string(csv), ",")
+ if len(split) < 4 {
+ return nil, fmt.Errorf("%d fields < 4 in %s line %d: %q", len(split), samplesFilename, lineNum, csv)
+ }
+ if split[0] == "Index" && split[1] == "SampleID" && split[2] == "CaseControl" && split[3] == "TrainingValidation" {
+ continue
+ }
+ idx, err := strconv.Atoi(split[0])
+ if err != nil {
+ if lineNum == 1 {
+ return nil, fmt.Errorf("header does not look right: %q", csv)
+ }
+ return nil, fmt.Errorf("%s line %d: index: %s", samplesFilename, lineNum, err)
+ }
+ if idx != len(si) {
+ return nil, fmt.Errorf("%s line %d: index %d out of order", samplesFilename, lineNum, idx)
+ }
+ var pcaComponents []float64
+ if len(split) > 4 {
+ for _, s := range split[4:] {
+ f, err := strconv.ParseFloat(s, 64)
+ if err != nil {
+ return nil, fmt.Errorf("%s line %d: cannot parse float %q: %s", samplesFilename, lineNum, s, err)
+ }
+ pcaComponents = append(pcaComponents, f)
+ }
+ }
+ si = append(si, sampleInfo{
+ id: split[1],
+ isCase: split[2] == "1",
+ isControl: split[2] == "0",
+ isTraining: split[3] == "1",
+ isValidation: split[3] == "0" && len(split[2]) > 0, // fix errant 0s in input
+ pcaComponents: pcaComponents,
+ })
+ }
+ return si, nil
+}
+
+func writeSampleInfo(samples []sampleInfo, outputDir string) error {
+ fnm := outputDir + "/samples.csv"
+ log.Infof("writing sample metadata to %s", fnm)
+ f, err := os.Create(fnm)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ pcaLabels := ""
+ if len(samples) > 0 {
+ for i := range samples[0].pcaComponents {
+ pcaLabels += fmt.Sprintf(",PCA%d", i)
+ }
+ }
+ _, err = fmt.Fprintf(f, "Index,SampleID,CaseControl,TrainingValidation%s\n", pcaLabels)
+ if err != nil {
+ return err
+ }
+ for i, si := range samples {
+ var cc, tv string
+ if si.isCase {
+ cc = "1"
+ } else if si.isControl {
+ cc = "0"
+ }
+ if si.isTraining {
+ tv = "1"
+ } else if si.isValidation {
+ tv = "0"
+ }
+ var pcavals string
+ for _, pcaval := range si.pcaComponents {
+ pcavals += fmt.Sprintf(",%f", pcaval)
+ }
+ _, err = fmt.Fprintf(f, "%d,%s,%s,%s%s\n", i, si.id, cc, tv, pcavals)
+ if err != nil {
+ return fmt.Errorf("write %s: %w", fnm, err)
+ }
+ }
+ err = f.Close()
+ if err != nil {
+ return fmt.Errorf("close %s: %w", fnm, err)
+ }
+ log.Print("done")
+ return nil
+}
+
+func (cmd *sliceNumpy) filterHGVScolpair(colpair [2][]int8) bool {
+ if cmd.chi2PValue >= 1 {
+ return true
+ }
+ col0 := make([]bool, 0, len(cmd.chi2Cases))
+ col1 := make([]bool, 0, len(cmd.chi2Cases))
+ cases := make([]bool, 0, len(cmd.chi2Cases))
+ for i, c := range cmd.chi2Cases {
+ if colpair[0][i] < 0 {
+ continue
+ }
+ col0 = append(col0, colpair[0][i] != 0)
+ col1 = append(col1, colpair[1][i] != 0)
+ cases = append(cases, c)
+ }
+ return len(cases) >= cmd.minCoverage &&
+ (pvalue(col0, cases) <= cmd.chi2PValue || pvalue(col1, cases) <= cmd.chi2PValue)
+}
+
+func writeNumpyUint32(fnm string, out []uint32, rows, cols int) error {
+ output, err := os.Create(fnm)
+ if err != nil {
+ return err
+ }
+ defer output.Close()
+ bufw := bufio.NewWriterSize(output, 1<<26)
+ npw, err := gonpy.NewWriter(nopCloser{bufw})
+ if err != nil {
+ return err
+ }
+ log.WithFields(log.Fields{
+ "filename": fnm,
+ "rows": rows,
+ "cols": cols,
+ "bytes": rows * cols * 4,
+ }).Infof("writing numpy: %s", fnm)
+ npw.Shape = []int{rows, cols}
+ npw.WriteUint32(out)
+ err = bufw.Flush()
+ if err != nil {
+ return err
+ }
+ return output.Close()
+}
+
+func writeNumpyInt32(fnm string, out []int32, rows, cols int) error {
+ output, err := os.Create(fnm)
+ if err != nil {
+ return err
+ }
+ defer output.Close()
+ bufw := bufio.NewWriterSize(output, 1<<26)
+ npw, err := gonpy.NewWriter(nopCloser{bufw})
+ if err != nil {
+ return err
+ }
+ log.WithFields(log.Fields{
+ "filename": fnm,
+ "rows": rows,
+ "cols": cols,
+ "bytes": rows * cols * 4,
+ }).Infof("writing numpy: %s", fnm)
+ npw.Shape = []int{rows, cols}
+ npw.WriteInt32(out)
+ err = bufw.Flush()
+ if err != nil {
+ return err
+ }
+ return output.Close()