Skip to content

Commit

Permalink
Change vector<double> llk{0,1}_ to vector<log_double_t> siteLikelihoo…
Browse files Browse the repository at this point in the history
…ds{0,1}_
  • Loading branch information
bredelings committed Mar 29, 2021
1 parent 7ad8cac commit 200eddb
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 43 deletions.
45 changes: 23 additions & 22 deletions src/updateHap.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -206,30 +206,29 @@ void UpdateSingleHap::calcExpectedWsaf( vector <double> & expectedWsaf, vector <


void UpdateSingleHap::buildEmission( double missCopyProb ) {
vector <double> t1omu = vecSum(llk0_, log(1.0 - missCopyProb)); // t1 one minus u
vector <double> t2omu = vecSum(llk1_, log(1.0 - missCopyProb)); // t2 one minus u
auto t1omu = vecProd(siteLikelihoods0_, log_double_t(1.0 - missCopyProb)); // t1 one minus u
auto t2omu = vecProd(siteLikelihoods1_, log_double_t(1.0 - missCopyProb)); // t2 one minus u

vector <double> t1u = vecSum(llk0_, log(missCopyProb));
vector <double> t2u = vecSum(llk1_, log(missCopyProb));
auto t1u = vecProd(siteLikelihoods0_, log_double_t(missCopyProb));
auto t2u = vecProd(siteLikelihoods1_, log_double_t(missCopyProb));

emission_.clear();
for ( size_t i = 0; i < this->nLoci_; i++) {
vector <double> tmp ({t1omu[i], t2omu[i], t1u[i], t2u[i]});
double tmaxTmp = max_value(tmp);
vector <double> emissRow ({exp(t1omu[i] - tmaxTmp) + exp(t2u[i] - tmaxTmp),
exp(t2omu[i] - tmaxTmp) + exp(t1u[i] - tmaxTmp)});
vector <log_double_t> tmp ({t1omu[i], t2omu[i], t1u[i], t2u[i]});
auto tmaxTmp = max_value(tmp);
vector <double> emissRow ({(t1omu[i] / tmaxTmp) + (t2u[i] / tmaxTmp),
(t2omu[i] / tmaxTmp) + (t1u[i] / tmaxTmp)});

this->emission_.push_back(emissRow);
}

}


void UpdateSingleHap::buildEmissionBasicVersion( double missCopyProb ) {
emission_.clear();
for ( size_t i = 0; i < this->nLoci_; i++) {
vector <double> emissRow ({exp(llk0_[i])*(1.0-missCopyProb) + exp(llk1_[i])*missCopyProb,
exp(llk1_[i])*(1.0-missCopyProb) + exp(llk0_[i])*missCopyProb});
vector <double> emissRow ({siteLikelihoods0_[i]*(1.0-missCopyProb) + siteLikelihoods1_[i]*missCopyProb,
siteLikelihoods1_[i]*(1.0-missCopyProb) + siteLikelihoods0_[i]*missCopyProb});

this->emission_.push_back(emissRow);
}
Expand All @@ -242,6 +241,7 @@ void UpdateSingleHap::calcFwdProbs() {
vector <double> fwd1st (this->nPanel_, 0.0);
for ( size_t i = 0 ; i < this->nPanel_; i++) {
fwd1st[i] = this->emission_[0][this->panel_->content_[hapIndex][i]];
assert(fwd1st[i] >= 0);
}
(void)normalizeBySum(fwd1st);
this->fwdProbs_.push_back(fwd1st);
Expand All @@ -257,6 +257,7 @@ void UpdateSingleHap::calcFwdProbs() {
vector <double> fwdTmp (this->nPanel_, 0.0);
for ( size_t i = 0 ; i < this->nPanel_; i++) {
fwdTmp[i] = this->emission_[j][this->panel_->content_[hapIndex][i]] * (fwdProbs_.back()[i] * pNoRec + massFromRec);
assert(fwdTmp[i] >= 0);
//if ( i >= this->panel_->truePanelSize() ) {
//fwdTmp[i] = this->emission_[j][this->panel_->content_[hapIndex][i]] * (fwdProbs_.back()[i] * pNoRec + massFromRec) * inbreedProb;
//} else {
Expand All @@ -272,10 +273,10 @@ void UpdateSingleHap::calcFwdProbs() {

void UpdateSingleHap::calcHapLLKs( vector <double> &refCount,
vector <double> &altCount) {
this->llk0_ = calcLLKs( refCount, altCount, expectedWsaf0_, this->segmentStartIndex_, this->nLoci_, this->scalingFactor() );
this->llk1_ = calcLLKs( refCount, altCount, expectedWsaf1_, this->segmentStartIndex_, this->nLoci_, this->scalingFactor() );
assert( this->llk0_.size() == this->nLoci_ );
assert( this->llk1_.size() == this->nLoci_ );
this->siteLikelihoods0_ = calcSiteLikelihoods( refCount, altCount, expectedWsaf0_, this->segmentStartIndex_, this->nLoci_, this->scalingFactor() );
this->siteLikelihoods1_ = calcSiteLikelihoods( refCount, altCount, expectedWsaf1_, this->segmentStartIndex_, this->nLoci_, this->scalingFactor() );
assert( this->siteLikelihoods0_.size() == this->nLoci_ );
assert( this->siteLikelihoods1_.size() == this->nLoci_ );
}


Expand Down Expand Up @@ -315,8 +316,8 @@ void UpdateSingleHap::samplePaths() {
void UpdateSingleHap::addMissCopying( double missCopyProb ) {
this->hap_.clear();
for ( size_t i = 0; i < this->nLoci_; i++) {
double tmpMax = max_value ( vector <double>({this->llk0_[i], this->llk1_[i]}));
vector <double> emissionTmp ({exp(this->llk0_[i]-tmpMax), exp(this->llk1_[i]-tmpMax)});
auto tmpMax = std::max(siteLikelihoods0_[i], siteLikelihoods1_[i]);
vector <log_double_t> emissionTmp ({siteLikelihoods0_[i]/tmpMax, siteLikelihoods1_[i]/tmpMax});
vector <double> sameDiffDist ({emissionTmp[path_[i]]*(1.0 - missCopyProb), // probability of the same
emissionTmp[(size_t)(1 -path_[i])] * missCopyProb }); // probability of differ

Expand All @@ -336,9 +337,9 @@ void UpdateSingleHap::sampleHapIndependently( vector <double> &plaf ) {
this->hap_.clear();
size_t plafIndex = this->segmentStartIndex_;
for ( size_t i = 0; i < this->nLoci_; i++) {
double tmpMax = max_value ( vector <double> ( {llk0_[i], llk1_[i]} ) ) ;
vector <double> tmpDist ( {exp(llk0_[i] - tmpMax) * (1.0-plaf[plafIndex]),
exp(llk1_[i] - tmpMax) * plaf[plafIndex] } );
auto tmpMax = std::max( siteLikelihoods0_[i], siteLikelihoods1_[i] );
vector <double> tmpDist ( {siteLikelihoods0_[i]/tmpMax * (1.0-plaf[plafIndex]),
siteLikelihoods1_[i]/tmpMax * plaf[plafIndex] } );
(void)normalizeBySum(tmpDist);
this->hap_.push_back ( sampleIndexGivenProp(this->recombRg_, tmpDist) );
plafIndex++;
Expand All @@ -351,9 +352,9 @@ void UpdateSingleHap::updateLLK() {
newLLK = vector <double> (this->nLoci_, 0.0);
for ( size_t i = 0; i < this->nLoci_; i++) {
if ( this->hap_[i] == 0) {
newLLK[i] = llk0_[i];
newLLK[i] = log(siteLikelihoods0_[i]);
} else if (this->hap_[i] == 1) {
newLLK[i] = llk1_[i];
newLLK[i] = log(siteLikelihoods1_[i]);
} else {
throw ShouldNotBeCalled();
}
Expand Down
4 changes: 2 additions & 2 deletions src/updateHap.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,8 @@ class UpdateSingleHap : public UpdateHap{
size_t strainIndex_;
vector <double> expectedWsaf0_;
vector <double> expectedWsaf1_;
vector <double> llk0_;
vector <double> llk1_;
vector <log_double_t> siteLikelihoods0_;
vector <log_double_t> siteLikelihoods1_;

vector <int> path_;
vector <int> hap_;
Expand Down
38 changes: 19 additions & 19 deletions tests/unittest/test_updateSingleHap.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -174,8 +174,8 @@ class TestUpdateSingleHap : public CppUnit::TestCase {
//CPPUNIT_ASSERT_NO_THROW( this->updateSingleHapPanel1_->calcExpectedWsaf( this->expectedWsaf_, this->proportion_, this->haplotypes_) );
//CPPUNIT_ASSERT_NO_THROW( this->updateSingleHapPanel1_->calcHapLLKs (this->refCount_, this->altCount_) );

this->updateSingleHapPanel1_->llk0_ = vector <double> (7, log(0.1));
this->updateSingleHapPanel1_->llk1_ = vector <double> (7, log(0.9));
this->updateSingleHapPanel1_->siteLikelihoods0_ = vector <log_double_t> (7, log_double_t(0.1));
this->updateSingleHapPanel1_->siteLikelihoods1_ = vector <log_double_t> (7, log_double_t(0.9));

CPPUNIT_ASSERT_NO_THROW( this->updateSingleHapPanel1_->buildEmission ( 0.0 ) );
CPPUNIT_ASSERT_NO_THROW( this->updateSingleHapPanel1_->calcFwdBwdProbs() );
Expand All @@ -191,45 +191,45 @@ class TestUpdateSingleHap : public CppUnit::TestCase {
}

void testEmissionProb0 (){
double llk0_1 = log(0.05), llk0_2 = log(0.03), llk0_3 = log(0);
double llk1_1 = log(0.5), llk1_2 = log(0.0003), llk1_3 = log(1);
this->updateSingleHapPanel1_->llk0_ = vector <double> ({llk0_1, llk0_2, llk0_3});
this->updateSingleHapPanel1_->llk1_ = vector <double> ({llk1_1, llk1_2, llk1_3});
log_double_t lk0_1 = 0.05, lk0_2 = 0.03, lk0_3 = 0;
log_double_t lk1_1 = 0.5, lk1_2 = 0.0003, lk1_3 = 1;
this->updateSingleHapPanel1_->siteLikelihoods0_ = {lk0_1, lk0_2, lk0_3};
this->updateSingleHapPanel1_->siteLikelihoods1_ = {lk1_1, lk1_2, lk1_3};
this->updateSingleHapPanel1_->nLoci_ = 3;
CPPUNIT_ASSERT_NO_THROW ( this->updateSingleHapPanel1_->buildEmission ( 0.0 ) );
CPPUNIT_ASSERT_EQUAL ( (size_t)2, this->updateSingleHapPanel1_->emission_[0].size() );
CPPUNIT_ASSERT_EQUAL ( (size_t)2, this->updateSingleHapPanel1_->emission_[1].size() );
CPPUNIT_ASSERT_EQUAL ( (size_t)2, this->updateSingleHapPanel1_->emission_[2].size() );
CPPUNIT_ASSERT_EQUAL ( (size_t)2, this->updateSingleHapPanel1_->emission_.back().size() );
CPPUNIT_ASSERT_DOUBLES_EQUAL(exp(llk0_1-llk1_1), updateSingleHapPanel1_->emission_[0][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(lk0_1/lk1_1, updateSingleHapPanel1_->emission_[0][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(0.05/0.5, updateSingleHapPanel1_->emission_[0][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(1.0, updateSingleHapPanel1_->emission_[0][1], epsilon3);

CPPUNIT_ASSERT_DOUBLES_EQUAL(1.0, updateSingleHapPanel1_->emission_[1][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(0.0003/0.03, updateSingleHapPanel1_->emission_[1][1], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(exp(llk1_2-llk0_2), updateSingleHapPanel1_->emission_[1][1], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(lk1_2/lk0_2, updateSingleHapPanel1_->emission_[1][1], epsilon3);

CPPUNIT_ASSERT_DOUBLES_EQUAL(exp(llk0_3-llk1_3), updateSingleHapPanel1_->emission_[2][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(lk0_3/lk1_3, updateSingleHapPanel1_->emission_[2][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(0.0, updateSingleHapPanel1_->emission_[2][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(1.0, updateSingleHapPanel1_->emission_[2][1], epsilon3);
}


void testEmissionBasicVersion0 (){
double llk0_1 = log(0.05), llk0_2 = log(0.03), llk0_3 = log(0);
double llk1_1 = log(0.5), llk1_2 = log(0.0003), llk1_3 = log(1);
this->updateSingleHapPanel1_->llk0_ = vector <double> ({llk0_1, llk0_2, llk0_3});
this->updateSingleHapPanel1_->llk1_ = vector <double> ({llk1_1, llk1_2, llk1_3});
log_double_t lk0_1 = 0.05, lk0_2 = 0.03, lk0_3 = 0;
log_double_t lk1_1 = 0.5, lk1_2 = 0.0003, lk1_3 = 1;
this->updateSingleHapPanel1_->siteLikelihoods0_ = {lk0_1, lk0_2, lk0_3};
this->updateSingleHapPanel1_->siteLikelihoods1_ = {lk1_1, lk1_2, lk1_3};
this->updateSingleHapPanel1_->nLoci_ = 3;
CPPUNIT_ASSERT_NO_THROW ( this->updateSingleHapPanel1_->buildEmissionBasicVersion ( 0.0 ) );
CPPUNIT_ASSERT_DOUBLES_EQUAL(exp(llk0_1), updateSingleHapPanel1_->emission_[0][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(exp(llk1_1), updateSingleHapPanel1_->emission_[0][1], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(lk0_1, updateSingleHapPanel1_->emission_[0][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(lk1_1, updateSingleHapPanel1_->emission_[0][1], epsilon3);

CPPUNIT_ASSERT_DOUBLES_EQUAL(exp(llk0_2), updateSingleHapPanel1_->emission_[1][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(exp(llk1_2), updateSingleHapPanel1_->emission_[1][1], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(lk0_2, updateSingleHapPanel1_->emission_[1][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(lk1_2, updateSingleHapPanel1_->emission_[1][1], epsilon3);

CPPUNIT_ASSERT_DOUBLES_EQUAL(exp(llk0_3), updateSingleHapPanel1_->emission_[2][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(exp(llk1_3), updateSingleHapPanel1_->emission_[2][1], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(lk0_3, updateSingleHapPanel1_->emission_[2][0], epsilon3);
CPPUNIT_ASSERT_DOUBLES_EQUAL(lk1_3, updateSingleHapPanel1_->emission_[2][1], epsilon3);
}


Expand Down

0 comments on commit 200eddb

Please sign in to comment.