fix bug in TableFactor when trying to convert to DecisionTreeFactor
parent
0820fcb7b2
commit
094b76df2d
|
|
@ -252,6 +252,11 @@ DecisionTreeFactor TableFactor::operator*(const DecisionTreeFactor& f) const {
|
||||||
DecisionTreeFactor TableFactor::toDecisionTreeFactor() const {
|
DecisionTreeFactor TableFactor::toDecisionTreeFactor() const {
|
||||||
DiscreteKeys dkeys = discreteKeys();
|
DiscreteKeys dkeys = discreteKeys();
|
||||||
|
|
||||||
|
// If no keys, then return empty DecisionTreeFactor
|
||||||
|
if (dkeys.size() == 0) {
|
||||||
|
return DecisionTreeFactor(dkeys, AlgebraicDecisionTree<Key>());
|
||||||
|
}
|
||||||
|
|
||||||
std::vector<double> table;
|
std::vector<double> table;
|
||||||
for (auto i = 0; i < sparse_table_.size(); i++) {
|
for (auto i = 0; i < sparse_table_.size(); i++) {
|
||||||
table.push_back(sparse_table_.coeff(i));
|
table.push_back(sparse_table_.coeff(i));
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue