File: ReadExperiment.hpp

package info (click to toggle)
salmon 0.7.2%2Bds1-2
  • links: PTS, VCS
  • area: main
  • in suites: stretch
  • size: 4,352 kB
  • ctags: 5,243
  • sloc: cpp: 42,341; ansic: 6,252; python: 228; makefile: 207; sh: 190
file content (727 lines) | stat: -rw-r--r-- 27,328 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
#ifndef EXPERIMENT_HPP
#define EXPERIMENT_HPP

extern "C" {
#include "bwa.h"
#include "bwamem.h"
#include "kvec.h"
#include "utils.h"
}

// Our includes
#include "ClusterForest.hpp"
#include "DistributionUtils.hpp"
#include "GCFragModel.hpp"
#include "Transcript.hpp"
#include "ReadLibrary.hpp"
#include "FragmentLengthDistribution.hpp"
#include "FragmentStartPositionDistribution.hpp"
#include "SequenceBiasModel.hpp"
#include "SalmonOpts.hpp"
#include "SalmonIndex.hpp"
#include "SalmonUtils.hpp"
#include "EquivalenceClassBuilder.hpp"
#include "SpinLock.hpp" // RapMap's with try_lock
#include "UtilityFunctions.hpp"
#include "ReadKmerDist.hpp"
#include "SBModel.hpp"
#include "SimplePosBias.hpp"

// Logger includes
#include "spdlog/spdlog.h"

// Boost includes
#include <boost/filesystem.hpp>
#include <boost/range/irange.hpp>

// Cereal includes
#include "cereal/archives/json.hpp"

// Standard includes
#include <vector>
#include <memory>
#include <fstream>


/**
  *  This class represents a library of alignments used to quantify
  *  a set of target transcripts.  The AlignmentLibrary contains info
  *  about both the alignment file and the target sequence (transcripts).
  *  It is used to group them together and track information about them
  *  during the quantification procedure.
  */
class ReadExperiment {

    public:

    ReadExperiment(std::vector<ReadLibrary>& readLibraries,
                   //const boost::filesystem::path& transcriptFile,
                   const boost::filesystem::path& indexDirectory,
		           SalmonOpts& sopt) :
        readLibraries_(readLibraries),
        //transcriptFile_(transcriptFile),
        transcripts_(std::vector<Transcript>()),
        totalAssignedFragments_(0),
        fragStartDists_(5),
        posBiasFW_(5),
        posBiasRC_(5),
        seqBiasModel_(1.0),
	eqBuilder_(sopt.jointLog),
        expectedBias_(constExprPow(4, readBias_[0].getK()), 1.0),
	expectedGC_( sopt.numConditionalGCBins,
		    sopt.numFragGCBins, distribution_utils::DistributionSpace::LOG),
        observedGC_( sopt.numConditionalGCBins,
		    sopt.numFragGCBins, distribution_utils::DistributionSpace::LOG)
    {
            namespace bfs = boost::filesystem;

            // Make sure the read libraries are valid.
            for (auto& rl : readLibraries_) { rl.checkValid(); }

            size_t maxFragLen = sopt.fragLenDistMax;
            size_t meanFragLen = sopt.fragLenDistPriorMean;
            size_t fragLenStd = sopt.fragLenDistPriorSD;
            size_t fragLenKernelN = 4;
            double fragLenKernelP = 0.5;
            fragLengthDist_.reset(new FragmentLengthDistribution(1.0, maxFragLen,
                    meanFragLen, fragLenStd,
                    fragLenKernelN,
                    fragLenKernelP, 1));

            // Make sure the transcript file exists.
            /*
            if (!bfs::exists(transcriptFile_)) {
                std::stringstream ss;
                ss << "The provided transcript file: " << transcriptFile_ <<
                    " does not exist!\n";
                throw std::invalid_argument(ss.str());
            }
            */

            // ==== Figure out the index type
            boost::filesystem::path versionPath = indexDirectory / "versionInfo.json";
            SalmonIndexVersionInfo versionInfo;
            versionInfo.load(versionPath);
            if (versionInfo.indexVersion() == 0) {
                fmt::MemoryWriter infostr;
                infostr << "Error: The index version file " << versionPath.string()
                    << " doesn't seem to exist.  Please try re-building the salmon "
                    "index.";
                throw std::invalid_argument(infostr.str());
            }
            // Check index version compatibility here
            auto indexType = versionInfo.indexType();
            // ==== Figure out the index type

            salmonIndex_.reset(new SalmonIndex(sopt.jointLog, indexType));
            salmonIndex_->load(indexDirectory);

	    // Now we'll have either an FMD-based index or a QUASI index
	    // dispatch on the correct type.

	    switch (salmonIndex_->indexType()) {
            case SalmonIndexType::QUASI:
                if (salmonIndex_->is64BitQuasi()) {
                    if (salmonIndex_->isPerfectHashQuasi()) {
                        loadTranscriptsFromQuasi(salmonIndex_->quasiIndexPerfectHash64(), sopt);
                    } else {
                        loadTranscriptsFromQuasi(salmonIndex_->quasiIndex64(), sopt);
                    }
                } else {
                    if (salmonIndex_->isPerfectHashQuasi()) {
                        loadTranscriptsFromQuasi(salmonIndex_->quasiIndexPerfectHash32(), sopt);
                    } else {
                        loadTranscriptsFromQuasi(salmonIndex_->quasiIndex32(), sopt);
                    }
                }
                break;
            case SalmonIndexType::FMD:
                loadTranscriptsFromFMD();
                break;
	    }


            // Create the cluster forest for this set of transcripts
            clusters_.reset(new ClusterForest(transcripts_.size(), transcripts_));
        }

    EquivalenceClassBuilder& equivalenceClassBuilder() {
        return eqBuilder_;
    }

    std::vector<Transcript>& transcripts() { return transcripts_; }
    const std::vector<Transcript>& transcripts() const { return transcripts_; }

    void updateTranscriptLengthsAtomic(std::atomic<bool>& done) {
        if (sl_.try_lock()) {
            if (!done) {
                auto fld = fragLengthDist_.get();
                // Convert the PMF to non-log scale
                std::vector<double> logPMF;
                size_t minVal;
                size_t maxVal;
                fld->dumpPMF(logPMF, minVal, maxVal);
                double sum = salmon::math::LOG_0;
                for (auto v : logPMF) {
                    sum = salmon::math::logAdd(sum, v);
                }
                for (auto& v : logPMF) {
                    v -= sum;
                }

                // Create the non-logged distribution.
                // Here, we multiply by 100 to discourage small
                // numbers in the correctionFactorsfromCounts call
                // below.
                std::vector<double> pmf(maxVal + 1, 0.0);
                for (size_t i = minVal; i < maxVal; ++i) {
                    pmf[i] = 100.0 * std::exp(logPMF[i - minVal]);
                }

		using distribution_utils::DistributionSpace;
		// We compute the factors in linear space (since we've de-logged the pmf)
                auto correctionFactors = distribution_utils::correctionFactorsFromMass(pmf, DistributionSpace::LINEAR);
		// Since we'll continue treating effective lengths in log space, populate them as such
		distribution_utils::computeSmoothedEffectiveLengths(pmf.size(), transcripts_, correctionFactors, DistributionSpace::LOG);

                /*
                // Update the effective length of *every* transcript
                for( auto& t : transcripts_ ) {
                    t.updateEffectiveLength(logPMF, logFLDMean, minVal, maxVal);
                }
                */
                // then declare that we are done
                done = true;
                sl_.unlock();
            }
        }
    }

    uint64_t numAssignedFragments() { return numAssignedFragments_; }
    uint64_t numMappedFragments() const { return numAssignedFragments_; }

    uint64_t upperBoundHits() { return upperBoundHits_; }
    void setUpperBoundHits(uint64_t ubh) { upperBoundHits_ = ubh; }

    std::atomic<uint64_t>& numAssignedFragmentsAtomic() { return numAssignedFragments_; }

    void setNumObservedFragments(uint64_t numObserved) { numObservedFragments_ = numObserved; }
    
    void updateShortFrags(salmon::utils::ShortFragStats& fs) { 
        sl_.lock();
        shortFragStats_.numTooShort += fs.numTooShort; 
        shortFragStats_.shortest = (fs.shortest < shortFragStats_.shortest) ? fs.shortest : shortFragStats_.shortest; 
        sl_.unlock();
    }

    salmon::utils::ShortFragStats getShortFragStats() const { return shortFragStats_; }

    uint64_t numObservedFragments() const {
        return numObservedFragments_;
    }

    double mappingRate() {
        if (quantificationPasses_ > 0) {
            return static_cast<double>(numAssignedFragsInFirstPass_) / numObservedFragsInFirstPass_;
        } else {
            return static_cast<double>(numAssignedFragments_) / numObservedFragments_;
        }
    }

    SalmonIndex* getIndex() { return salmonIndex_.get(); }

    template <typename QuasiIndexT>
    void loadTranscriptsFromQuasi(QuasiIndexT* idx_, const SalmonOpts& sopt) {
	    size_t numRecords = idx_->txpNames.size();
        auto log = spdlog::get("jointLog");

	    log->info("Index contained {} targets", numRecords);
	    //transcripts_.resize(numRecords);
	    double alpha = 0.005;
	    for (auto i : boost::irange(size_t(0), numRecords)) {
		    uint32_t id = i;
		    const char* name = idx_->txpNames[i].c_str();
		    uint32_t len = idx_->txpLens[i];
		    // copy over the length, then we're done.
		    transcripts_.emplace_back(id, name, len, alpha);
		    auto& txp = transcripts_.back();
		    // The transcript sequence
		    //auto txpSeq = idx_->seq.substr(idx_->txpOffsets[i], len);

		    // Set the transcript sequence
		    txp.setSequenceBorrowed(idx_->seq.c_str() + idx_->txpOffsets[i],
                                    sopt.gcBiasCorrect, sopt.gcSampFactor);
		    // Length classes taken from
            // https://github.com/cole-trapnell-lab/cufflinks/blob/master/src/biascorrection.cpp
		    // ======
		    // Roberts, Adam, et al.
		    // "Improving RNA-Seq expression estimates by correcting for fragment bias."
		    // Genome Biol 12.3 (2011): R22.
		    // ======
		    // perhaps, define these in a more data-driven way
            if (txp.RefLength <= 791) {
                txp.lengthClassIndex(0);
            } else if (txp.RefLength <= 1265) {
                txp.lengthClassIndex(1);
            } else if (txp.RefLength <= 1707) {
                txp.lengthClassIndex(2);
            } else if (txp.RefLength <= 2433) {
                txp.lengthClassIndex(3);
            } else {
                txp.lengthClassIndex(4);
            }
      }
	    // ====== Done loading the transcripts from file
    }

    void loadTranscriptsFromFMD() {
	    bwaidx_t* idx_ = salmonIndex_->bwaIndex();
	    size_t numRecords = idx_->bns->n_seqs;
	    std::vector<Transcript> transcripts_tmp;
        auto log = spdlog::get("jointLog");
        //transcripts_tmp.reserve(numRecords);
        //transcripts_.reserve(numRecords);

	    log->info("Index contained {} targets", numRecords);
	    //transcripts_.resize(numRecords);
	    for (auto i : boost::irange(size_t(0), numRecords)) {
		    uint32_t id = i;
		    char* name = idx_->bns->anns[i].name;
		    uint32_t len = idx_->bns->anns[i].len;
		    // copy over the length, then we're done.
		    transcripts_tmp.emplace_back(id, name, len);
	    }

	    std::sort(transcripts_tmp.begin(), transcripts_tmp.end(),
			    [](const Transcript& t1, const Transcript& t2) -> bool {
			    return t1.id < t2.id;
			    });


	    double alpha = 0.005;
	    char nucTab[256];
	    nucTab[0] = 'A'; nucTab[1] = 'C'; nucTab[2] = 'G'; nucTab[3] = 'T';
	    for (size_t i = 4; i < 256; ++i) { nucTab[i] = 'N'; }

        size_t tnum = 0;
	    // Load the transcript sequence from file
	    for (auto& t : transcripts_tmp) {
		    transcripts_.emplace_back(t.id, t.RefName.c_str(), t.RefLength, alpha);
		    /* from BWA */
		    uint8_t* rseq = nullptr;
		    int64_t tstart, tend, compLen, l_pac = idx_->bns->l_pac;
		    tstart  = idx_->bns->anns[t.id].offset;
		    tend = tstart + t.RefLength;
		    rseq = bns_get_seq(l_pac, idx_->pac, tstart, tend, &compLen);
		    if (compLen != t.RefLength) {
			    fmt::print(stderr,
					    "For transcript {}, stored length ({}) != computed length ({}) --- index may be corrupt. exiting\n",
					    t.RefName, compLen, t.RefLength);
			    std::exit(1);
		    }
		    std::string seq(t.RefLength, ' ');
		    if (rseq != 0) {
			    for (int64_t i = 0; i < compLen; ++i) { seq[i] = nucTab[rseq[i]]; }
		    }

            auto& txp = transcripts_.back();

            // allocate space for the new copy
            char* seqCopy = new char[seq.length()+1];
            std::strcpy(seqCopy, seq.c_str());
            txp.setSequenceOwned(seqCopy);
		    txp.setSAMSequenceOwned(salmon::stringtools::encodeSequenceInSAM(seq.c_str(), t.RefLength));

            // Length classes taken from
            // https://github.com/cole-trapnell-lab/cufflinks/blob/master/src/biascorrection.cpp
		    // ======
		    // Roberts, Adam, et al.
		    // "Improving RNA-Seq expression estimates by correcting for fragment bias."
		    // Genome Biol 12.3 (2011): R22.
		    // ======
		    // perhaps, define these in a more data-driven way
            if (txp.RefLength <= 791) {
                txp.lengthClassIndex(0);
            } else if (txp.RefLength <= 1265) {
                txp.lengthClassIndex(1);
            } else if (txp.RefLength <= 1707) {
                txp.lengthClassIndex(2);
            } else if (txp.RefLength <= 2433) {
                txp.lengthClassIndex(3);
            } else {
                txp.lengthClassIndex(4);
            }
		    free(rseq);
		    /* end BWA code */
            ++tnum;
	    }

	    // Since we have the de-coded reference sequences, we no longer need
	    // the encoded sequences, so free them.
	    /** TEST OPT **/
	    // free(idx_->pac); idx_->pac = nullptr;
	    /** END TEST OPT **/
	    transcripts_tmp.clear();
	    // ====== Done loading the transcripts from file
    }


    template <typename CallbackT>
    bool processReads(const uint32_t& numThreads, const SalmonOpts& sopt, CallbackT& processReadLibrary) {
        std::atomic<bool> burnedIn{totalAssignedFragments_ + numAssignedFragments_ > sopt.numBurninFrags};
        for (auto& rl : readLibraries_) {
            processReadLibrary(rl, salmonIndex_.get(), transcripts_, clusterForest(),
                               *(fragLengthDist_.get()), numAssignedFragments_,
                               numThreads, burnedIn);
        }
        return true;
    }

    ~ReadExperiment() {
        // ---- Get rid of things we no longer need --------
        // bwa_idx_destroy(idx_);
    }

    ClusterForest& clusterForest() { return *clusters_.get(); }

    std::string readFilesAsString() {
        std::stringstream sstr;
        size_t ln{0};
        size_t numReadLibraries{readLibraries_.size()};

        for (auto &rl : readLibraries_) {
            sstr << rl.readFilesAsString();
            if (ln++ < numReadLibraries) { sstr << "; "; }
        }
        return sstr.str();
    }

    uint64_t numAssignedFragsInFirstPass() {
        return numAssignedFragsInFirstPass_;
    }

    uint64_t numObservedFragsInFirstPass() {
        return numObservedFragsInFirstPass_;
    }

    double effectiveMappingRate() const {
        return effectiveMappingRate_;
    }

    void setEffectiveMappingRate(double emr) {
        effectiveMappingRate_ = emr;
    }

    std::vector<FragmentStartPositionDistribution>& fragmentStartPositionDistributions() {
        return fragStartDists_;
    }

    SequenceBiasModel& sequenceBiasModel() {
        return seqBiasModel_;
    }

    bool softReset() {
        if (quantificationPasses_ == 0) {
            numAssignedFragsInFirstPass_ = numAssignedFragments_;
            numObservedFragsInFirstPass_ = numObservedFragments_;
        }
        numObservedFragments_ = 0;
        totalAssignedFragments_ += numAssignedFragments_;
        numAssignedFragments_ = 0;
        quantificationPasses_++;
        return true;
    }

    bool reset() {
        namespace bfs = boost::filesystem;
        for (auto& rl : readLibraries_) {
            if (!rl.isRegularFile()) { return false; }
        }

        if (quantificationPasses_ == 0) {
            numAssignedFragsInFirstPass_ = numAssignedFragments_;
            numObservedFragsInFirstPass_ = numObservedFragments_;
        }

        numObservedFragments_ = 0;
        totalAssignedFragments_ += numAssignedFragments_;
        numAssignedFragments_ = 0;
        quantificationPasses_++;
        return true;
    }

    void summarizeLibraryTypeCounts(boost::filesystem::path& opath){
        LibraryFormat fmt1(ReadType::SINGLE_END, ReadOrientation::NONE, ReadStrandedness::U);
        LibraryFormat fmt2(ReadType::SINGLE_END, ReadOrientation::NONE, ReadStrandedness::U);

        std::ofstream os(opath.string());
        cereal::JSONOutputArchive oa(os);

        //std::ofstream ofile(opath.string());

        fmt::MemoryWriter errstr;

        auto log = spdlog::get("jointLog");

        uint64_t numFmt1{0};
        uint64_t numFmt2{0};
        uint64_t numAgree{0};
        uint64_t numDisagree{0};

        for (auto& rl : readLibraries_) {
            auto fmt = rl.format();
            auto& counts = rl.libTypeCounts();

            // If the format is un-stranded, check that
            // we have a similar number of mappings in both
            // directions and then aggregate the forward and
            // reverse counts.
            if (fmt.strandedness == ReadStrandedness::U) {
                std::vector<ReadStrandedness> strands;
                switch (fmt.orientation) {
                    case ReadOrientation::SAME:
                    case ReadOrientation::NONE:
                        strands.push_back(ReadStrandedness::S);
                        strands.push_back(ReadStrandedness::A);
                        break;
                    case ReadOrientation::AWAY:
                    case ReadOrientation::TOWARD:
                        strands.push_back(ReadStrandedness::AS);
                        strands.push_back(ReadStrandedness::SA);
                        break;
                }

                fmt1.type = fmt.type; fmt1.orientation = fmt.orientation;
                fmt1.strandedness = strands[0];
                fmt2.type = fmt.type; fmt2.orientation = fmt.orientation;
                fmt2.strandedness = strands[1];

                numFmt1 = 0;
                numFmt2 = 0;
                numAgree = 0;
                numDisagree = 0;

                for (size_t i = 0; i < counts.size(); ++i) {
                    if (i == fmt1.formatID()) {
                        numFmt1 = counts[i];
                    } else if (i == fmt2.formatID()) {
                        numFmt2 = counts[i];
                    } else {
                        numDisagree += counts[i];
                    }
                }
                numAgree = numFmt1 + numFmt2;
                double ratio = static_cast<double>(numFmt1) / (numFmt1 + numFmt2);

                if ( std::abs(ratio - 0.5) > 0.01) {
                    errstr << "NOTE: Read Lib [" << rl.readFilesAsString() << "] :\n";
                    errstr << "\nDetected a *potential* strand bias > 1\% in an unstranded protocol "
                           << "check the file: " << opath.string() << " for details\n";

                    log->warn(errstr.str());
                    errstr.clear();
                }
                

                oa(cereal::make_nvp("read_files", rl.readFilesAsString()));
                std::string expectedFormat = rl.format().toString();
                oa(cereal::make_nvp("expected_format", expectedFormat));

                double compatFragmentRatio = rl.numCompat() / static_cast<double>(numAssignedFragments_);
                oa(cereal::make_nvp("compatible_fragment_ratio", compatFragmentRatio));
                oa(cereal::make_nvp("num_compatible_fragments", rl.numCompat()));
                oa(cereal::make_nvp("num_assigned_fragments", numAssignedFragments_.load()));

                oa(cereal::make_nvp("num_consistent_mappings", numAgree));
                oa(cereal::make_nvp("num_inconsistent_mappings", numDisagree));
                oa(cereal::make_nvp("strand_mapping_bias", ratio));
            } else {
                numAgree = 0;
                numDisagree = 0;

                for (size_t i = 0; i < counts.size(); ++i) {
                    if (i == fmt.formatID()) {
                        numAgree = counts[i];
                    } else {
                        numDisagree += counts[i];
                    }
                } // end for

                oa(cereal::make_nvp("read_files", rl.readFilesAsString()));
                std::string expectedFormat = rl.format().toString();
                oa(cereal::make_nvp("expected_format", expectedFormat));

                double compatFragmentRatio = rl.numCompat() / static_cast<double>(numAssignedFragments_);
                oa(cereal::make_nvp("compatible_fragment_ratio", compatFragmentRatio));
                oa(cereal::make_nvp("num_compatible_fragments", rl.numCompat()));
                oa(cereal::make_nvp("num_assigned_fragments", numAssignedFragments_.load()));

                oa(cereal::make_nvp("num_consistent_mappings", numAgree));
                oa(cereal::make_nvp("num_inconsistent_mappings", numDisagree));
            } //end else


            double compatFragmentRatio = rl.numCompat() / static_cast<double>(numAssignedFragments_);
            double disagreeRatio = 1.0 - compatFragmentRatio;
            if (disagreeRatio > 0.05) {
                errstr << "NOTE: Read Lib [" << rl.readFilesAsString() << "] :\n";
                errstr << "\nGreater than 5\% of the fragments "
                       << "disagreed with the provided library type; "
                       << "check the file: " << opath.string() << " for details\n";

                log->warn(errstr.str());
                errstr.clear();
            }

            for (size_t i = 0; i < counts.size(); ++i) {
                std::string desc = LibraryFormat::formatFromID(i).toString();
                if (!desc.empty()) {
                    oa(cereal::make_nvp(desc, counts[i].load()));
                }
            }
        }
    }

    std::vector<ReadLibrary>& readLibraries() { return readLibraries_; }
    const std::vector<ReadLibrary>& readLibraries() const { return readLibraries_; }
    FragmentLengthDistribution* fragmentLengthDistribution() const { return fragLengthDist_.get(); }

    void setGCFracForward(double fracForward) { gcFracFwd_ = fracForward; }

    double gcFracFwd() const { return gcFracFwd_; }
    double gcFracRC() const { return 1.0 - gcFracFwd_; }


    std::vector<double>& expectedSeqBias() {
        return expectedBias_;
    }

    const std::vector<double>& expectedSeqBias() const {
        return expectedBias_;
    }

    void setExpectedGCBias(const GCFragModel& expectedBiasIn) {
        expectedGC_ = expectedBiasIn;
    }

    GCFragModel& expectedGCBias() {
        return expectedGC_;
    }

    const GCFragModel& expectedGCBias() const {
        return expectedGC_;
    }

    const GCFragModel& observedGC() const {
        return observedGC_;
    }

    GCFragModel& observedGC() {
        return observedGC_;
    }

    std::vector<SimplePosBias>& posBias(salmon::utils::Direction dir) { 
        return (dir == salmon::utils::Direction::FORWARD) ? posBiasFW_ : posBiasRC_; 
    }
    const std::vector<SimplePosBias>& posBias(salmon::utils::Direction dir) const { 
        return (dir == salmon::utils::Direction::FORWARD) ? posBiasFW_ : posBiasRC_; 
    }

    ReadKmerDist<6, std::atomic<uint32_t>>& readBias(salmon::utils::Direction dir) { 
        return (dir == salmon::utils::Direction::FORWARD) ? readBias_[0] : readBias_[1]; 
    }
    const ReadKmerDist<6, std::atomic<uint32_t>>& readBias(salmon::utils::Direction dir) const { 
        return (dir == salmon::utils::Direction::FORWARD) ? readBias_[0] : readBias_[1]; 
    }

    SBModel& readBiasModelObserved(salmon::utils::Direction dir) { 
        return (dir == salmon::utils::Direction::FORWARD) ? readBiasModelObserved_[0] : readBiasModelObserved_[1]; 
    }
    const SBModel& readBiasModelObserved(salmon::utils::Direction dir) const { 
        return (dir == salmon::utils::Direction::FORWARD) ? readBiasModelObserved_[0] : readBiasModelObserved_[1]; 
    }

    SBModel& readBiasModelExpected(salmon::utils::Direction dir) { 
	return (dir == salmon::utils::Direction::FORWARD) ? readBiasModelExpected_[0] : readBiasModelExpected_[1]; 
    }
    const SBModel& readBiasModelExpected(salmon::utils::Direction dir) const { 
	return (dir == salmon::utils::Direction::FORWARD) ? readBiasModelExpected_[0] : readBiasModelExpected_[1]; 
   }
    void setReadBiasModelExpected(SBModel&& model, salmon::utils::Direction dir) {
        size_t idx = (dir == salmon::utils::Direction::FORWARD) ? 0 : 1;
	readBiasModelExpected_[idx] = std::move(model);
    }
  
    private:
    /**
     * The file from which the alignments will be read.
     * This can be a SAM or BAM file, and can be a regular
     * file or a fifo.
     */
    std::vector<ReadLibrary> readLibraries_;
    /**
     * The file from which the transcripts are read.
     * This is expected to be a FASTA format file.
     */
    //boost::filesystem::path transcriptFile_;
    /**
     * The targets (transcripts) to be quantified.
     */
    std::vector<Transcript> transcripts_;
    /**
     * The index we've built on the set of transcripts.
     */
    std::unique_ptr<SalmonIndex> salmonIndex_{nullptr};
    //bwaidx_t *idx_{nullptr};
    /**
     * The cluster forest maintains the dynamic relationship
     * defined by transcripts and reads --- if two transcripts
     * share an ambiguously mapped read, then they are placed
     * in the same cluster.
     */
    std::unique_ptr<ClusterForest> clusters_;
    /**
      *
      *
      */
    std::vector<FragmentStartPositionDistribution> fragStartDists_;

    SequenceBiasModel seqBiasModel_;

    /** Keeps track of the number of passes that have been
     *  made through the alignment file.
     */
    salmon::utils::ShortFragStats shortFragStats_;
    std::atomic<uint64_t> numObservedFragments_{0};
    std::atomic<uint64_t> numAssignedFragments_{0};
    uint64_t totalAssignedFragments_{0};
    size_t quantificationPasses_{0};
    uint64_t numAssignedFragsInFirstPass_{0};
    uint64_t numObservedFragsInFirstPass_{0};
    uint64_t upperBoundHits_{0};
    double effectiveMappingRate_{0.0};
    SpinLock sl_;
    std::unique_ptr<FragmentLengthDistribution> fragLengthDist_;
    EquivalenceClassBuilder eqBuilder_;

    /** Positional bias things**/
    std::vector<SimplePosBias> posBiasFW_;
    std::vector<SimplePosBias> posBiasRC_;
 
    /** GC-fragment bias things **/
    // One bin for each percentage GC content
    double gcFracFwd_{-1.0};
    GCFragModel observedGC_;
    GCFragModel expectedGC_;

    /** Sequence specific bias things **/
    // Since multiple threads can touch this dist, we
    // need atomic counters.
    std::array<ReadKmerDist<6, std::atomic<uint32_t>>, 2> readBias_;
    std::array<SBModel, 2> readBiasModelObserved_;
    std::array<SBModel, 2> readBiasModelExpected_;
    //std::array<std::vector<double>, 2> expectedBias_;
    std::vector<double> expectedBias_;
};

#endif // EXPERIMENT_HPP