aGrUM 2.3.2
a C++ library for (probabilistic) graphical models
BNLearner.h
Go to the documentation of this file.
1/****************************************************************************
2 * This file is part of the aGrUM/pyAgrum library. *
3 * *
4 * Copyright (c) 2005-2025 by *
5 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
6 * - Christophe GONZALES(_at_AMU) *
7 * *
8 * The aGrUM/pyAgrum library is free software; you can redistribute it *
9 * and/or modify it under the terms of either : *
10 * *
11 * - the GNU Lesser General Public License as published by *
12 * the Free Software Foundation, either version 3 of the License, *
13 * or (at your option) any later version, *
14 * - the MIT license (MIT), *
15 * - or both in dual license, as here. *
16 * *
17 * (see https://agrum.gitlab.io/articles/dual-licenses-lgplv3mit.html) *
18 * *
19 * This aGrUM/pyAgrum library is distributed in the hope that it will be *
20 * useful, but WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, *
21 * INCLUDING BUT NOT LIMITED TO THE WARRANTIES MERCHANTABILITY or FITNESS *
22 * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
23 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, *
25 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR *
26 * OTHER DEALINGS IN THE SOFTWARE. *
27 * *
28 * See LICENCES for more details. *
29 * *
30 * SPDX-FileCopyrightText: Copyright 2005-2025 *
31 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
32 * - Christophe GONZALES(_at_AMU) *
33 * SPDX-License-Identifier: LGPL-3.0-or-later OR MIT *
34 * *
35 * Contact : info_at_agrum_dot_org *
36 * homepage : http://agrum.gitlab.io *
37 * gitlab : https://gitlab.com/agrumery/agrum *
38 * *
39 ****************************************************************************/
40
41
50#ifndef GUM_LEARNING_BN_LEARNER_H
51#define GUM_LEARNING_BN_LEARNER_H
52
53#include <algorithm>
54#include <sstream>
55#include <vector>
56
57#include <agrum/agrum.h>
58
61
62namespace gum {
63 namespace learning {
65
73 template < typename GUM_SCALAR >
74 class BNLearner final: public IBNLearner {
75 public:
76 // ##########################################################################
78 // ##########################################################################
80
82
95 BNLearner(const std::string& filename,
96 const std::vector< std::string >& missingSymbols = {"?"},
97 const bool induceTypes = true);
98
100
107
112 BNLearner(const std::string& filename,
114 const std::vector< std::string >& missing_symbols = {"?"});
115
118
121
123 virtual ~BNLearner();
124
126
127 // ##########################################################################
129 // ##########################################################################
131
134
137
139
141 BayesNet< GUM_SCALAR > learnBN();
142
164 BayesNet< GUM_SCALAR > learnParameters(const DAG& dag, bool takeIntoAccountScore = true);
165
196 BayesNet< GUM_SCALAR > learnParameters(const BayesNet< GUM_SCALAR >& bn,
197 bool takeIntoAccountScore = true);
198
221 BayesNet< GUM_SCALAR > learnParameters(bool take_into_account_score = true);
222
224 std::string toString() const;
225
227 std::vector< std::tuple< std::string, std::string, std::string > > state() const;
228
233 void copyState(const BNLearner< GUM_SCALAR >& learner);
234
235 //=== === add return to certain methods in order to chain command
236 BNLearner< GUM_SCALAR >& setInitialDAG(const DAG& dag) {
238 return *this;
239 }
240
261 BNLearner< GUM_SCALAR >& useEM(const double epsilon, const double noise = default_EM_noise) {
263 return *this;
264 }
265
284 const double noise = default_EM_noise) {
286 return *this;
287 }
288
304 const double noise = default_EM_noise) {
306 return *this;
307 }
308
312 return *this;
313 }
314
325 return *this;
326 }
327
333
343
354 return *this;
355 }
356
362
372
380 return *this;
381 }
382
388
394
400 BNLearner< GUM_SCALAR >& EMsetMaxTime(const double timeout) {
402 return *this;
403 }
404
410
416
423 return *this;
424 }
425
426 // for pyagrum support
428
432 return *this;
433 }
434
439
442 return *this;
443 }
444
449
454
457 return *this;
458 }
459
464
467 return *this;
468 }
469
470 BNLearner< GUM_SCALAR >& useBDeuPrior(double weight = 1.0) {
472 return *this;
473 }
474
477 return *this;
478 }
479
480 BNLearner< GUM_SCALAR >& useDirichletPrior(const std::string& filename, double weight = 1) {
481 IBNLearner::useDirichletPrior(filename, weight);
482 return *this;
483 }
484
486 double weight = 1) {
487 if (weight < 0) { GUM_ERROR(OutOfBounds, "the weight of the prior must be positive") }
488
491 _setPriorWeight_(weight);
492
494 return *this;
495 }
496
501
503 Size nb_decrease = 2) {
504 IBNLearner::useLocalSearchWithTabuList(tabu_size, nb_decrease);
505 return *this;
506 }
507
509 IBNLearner::useK2(order);
510 return *this;
511 }
512
513 BNLearner< GUM_SCALAR >& useK2(const std::vector< NodeId >& order) {
514 IBNLearner::useK2(order);
515 return *this;
516 }
517
520 return *this;
521 }
522
527
532
537
539 IBNLearner::setMaxIndegree(max_indegree);
540 return *this;
541 }
542
544 IBNLearner::setSliceOrder(slice_order);
545 return *this;
546 }
547
549 setSliceOrder(const std::vector< std::vector< std::string > >& slices) {
551 return *this;
552 }
553
556 return *this;
557 }
558
561 return *this;
562 }
563
565 IBNLearner::addForbiddenArc(tail, head);
566 return *this;
567 }
568
569 BNLearner< GUM_SCALAR >& addForbiddenArc(const std::string& tail, const std::string& head) {
570 IBNLearner::addForbiddenArc(tail, head);
571 return *this;
572 }
573
576 return *this;
577 }
578
581 return *this;
582 }
583
584 BNLearner< GUM_SCALAR >& eraseForbiddenArc(const std::string& tail, const std::string& head) {
586 return *this;
587 }
588
591 return *this;
592 }
593
595 IBNLearner::addMandatoryArc(tail, head);
596 return *this;
597 }
598
599 BNLearner< GUM_SCALAR >& addMandatoryArc(const std::string& tail, const std::string& head) {
600 IBNLearner::addMandatoryArc(tail, head);
601 return *this;
602 }
603
606 return *this;
607 }
608
611 return *this;
612 }
613
614 BNLearner< GUM_SCALAR >& eraseMandatoryArc(const std::string& tail, const std::string& head) {
616 return *this;
617 }
618
621 return *this;
622 }
623
625 IBNLearner::addPossibleEdge(tail, head);
626 return *this;
627 }
628
629 BNLearner< GUM_SCALAR >& addPossibleEdge(const std::string& tail, const std::string& head) {
630 IBNLearner::addPossibleEdge(tail, head);
631 return *this;
632 }
633
636 return *this;
637 }
638
641 return *this;
642 }
643
644 BNLearner< GUM_SCALAR >& erasePossibleEdge(const std::string& tail, const std::string& head) {
646 return *this;
647 }
648
651 return *this;
652 }
653
656 return *this;
657 }
658
661 return *this;
662 }
663
668
669 BNLearner< GUM_SCALAR >& addNoParentNode(const std::string& name) {
671 return *this;
672 }
673
678
679 BNLearner< GUM_SCALAR >& eraseNoParentNode(const std::string& name) {
681 return *this;
682 }
683
688
689 BNLearner< GUM_SCALAR >& addNoChildrenNode(const std::string& name) {
691 return *this;
692 }
693
698
701 return *this;
702 }
703
705
706 bool isScoreBased() const { return IBNLearner::isScoreBased(); }
707
708 protected:
711
712 private:
713 BayesNet< GUM_SCALAR > _prior_bn_;
714
717 const BayesNet< GUM_SCALAR >& src);
718
725 void _checkDAGCompatibility_(const DAG& dag);
726
745 BayesNet< GUM_SCALAR > _learnParameters_(const DAG& dag, bool takeIntoAccountScore);
746
766 std::pair< std::shared_ptr< ParamEstimator >, std::shared_ptr< ParamEstimator > >
767 _initializeEMParameterLearning_(const DAG& dag, bool takeIntoAccountScore);
768
789 BayesNet< GUM_SCALAR > _learnParametersWithEM_(const DAG& dag, bool takeIntoAccountScore);
790
817 BayesNet< GUM_SCALAR > _learnParametersWithEM_(const BayesNet< GUM_SCALAR >& bn,
818 bool takeIntoAccountScore);
819 };
820
822 template < typename GUM_SCALAR >
823 std::ostream& operator<<(std::ostream& output, const BNLearner< GUM_SCALAR >& learner);
824 } /* namespace learning */
825} /* namespace gum */
826
829
830#endif /* GUM_LEARNING_BN_LEARNER_H */
A pack of learning algorithms that can easily be used.
A dirichlet priori: computes its N'_ijk from a bayesian network.
A class for generic framework of learning algorithms that can easily be used.
The base class for all directed edges.
Class representing a Bayesian network.
Definition BayesNet.h:93
Base class for dag.
Definition DAG.h:121
The base class for all undirected edges.
Exception : out of bound.
Base class for undirected graphs.
Definition undiGraph.h:128
A class that redirects gum_signal from algorithms to the listeners of BNLearn.
A pack of learning algorithms that can easily be used.
Definition BNLearner.h:74
BNLearner< GUM_SCALAR > & useNoPrior()
Definition BNLearner.h:465
BNLearner< GUM_SCALAR > & EMdisableMaxIter()
Disable stopping criterion on max iterations.
Definition BNLearner.h:384
BNLearner< GUM_SCALAR > & EMenableMinEpsilonRate()
Enable the log-likelihood evolution rate stopping criterion.
Definition BNLearner.h:368
BNLearner< GUM_SCALAR > & addForbiddenArc(const Arc &arc)
Definition BNLearner.h:559
BNLearner< GUM_SCALAR > & useScoreAIC()
Definition BNLearner.h:435
BNLearner< GUM_SCALAR > & eraseMandatoryArc(const Arc &arc)
Definition BNLearner.h:604
BNLearner< GUM_SCALAR > & EMdisableEpsilon()
Disable the min log-likelihood diff stopping criterion.
Definition BNLearner.h:329
BNLearner< GUM_SCALAR > & addPossibleEdge(const Edge &edge)
Definition BNLearner.h:619
BNLearner< GUM_SCALAR > & useScoreBD()
Definition BNLearner.h:440
std::vector< std::tuple< std::string, std::string, std::string > > state() const
NodeProperty< Sequence< std::string > > _labelsFromBN_(const std::string &filename, const BayesNet< GUM_SCALAR > &src)
read the first line of a file to find column names
BNLearner< GUM_SCALAR > & addPossibleEdge(NodeId tail, NodeId head)
Definition BNLearner.h:624
BNLearner< GUM_SCALAR > & addMandatoryArc(const Arc &arc)
Definition BNLearner.h:589
BNLearner< GUM_SCALAR > & useBDeuPrior(double weight=1.0)
Definition BNLearner.h:470
BNLearner< GUM_SCALAR > & useScoreBIC()
Definition BNLearner.h:450
BayesNet< GUM_SCALAR > _learnParametersWithEM_(const BayesNet< GUM_SCALAR > &bn, bool takeIntoAccountScore)
learns a BN (its parameters) with the structure passed in argument using the EM algorithm initialized...
BNLearner< GUM_SCALAR > & addForbiddenArc(const std::string &tail, const std::string &head)
Definition BNLearner.h:569
BNLearner< GUM_SCALAR > & useNoCorrection()
Definition BNLearner.h:533
std::pair< std::shared_ptr< ParamEstimator >, std::shared_ptr< ParamEstimator > > _initializeEMParameterLearning_(const DAG &dag, bool takeIntoAccountScore)
initializes EM and returns a pair containing, first, a bootstrap estimator and, second,...
BNLearner< GUM_SCALAR > & useSmoothingPrior(double weight=1)
Definition BNLearner.h:475
BNLearner< GUM_SCALAR > & EMdisableMinEpsilonRate()
Disable the log-likelihood evolution rate stopping criterion.
Definition BNLearner.h:358
BNLearner< GUM_SCALAR > & erasePossibleEdge(const std::string &tail, const std::string &head)
Definition BNLearner.h:644
BNLearner(const BNLearner &)
copy constructor
BNLearner< GUM_SCALAR > & erasePossibleEdge(const Edge &edge)
Definition BNLearner.h:634
BNLearner< GUM_SCALAR > & addNoChildrenNode(const std::string &name)
Definition BNLearner.h:689
BNLearner< GUM_SCALAR > & useK2(const std::vector< NodeId > &order)
Definition BNLearner.h:513
BNLearner & operator=(BNLearner &&) noexcept
move operator
BNLearner< GUM_SCALAR > & setPossibleSkeleton(const UndiGraph &skeleton)
Definition BNLearner.h:659
BayesNet< GUM_SCALAR > _learnParameters_(const DAG &dag, bool takeIntoAccountScore)
learns a BN (its parameters) with the structure passed in argument using a single pass estimation (no...
BNLearner(BNLearner &&)
move constructor
BNLearner< GUM_SCALAR > & EMenableEpsilon()
Enable the log-likelihood min diff stopping criterion in EM.
Definition BNLearner.h:339
virtual ~BNLearner()
destructor
BNLearner< GUM_SCALAR > & eraseNoChildrenNode(const std::string &name)
Definition BNLearner.h:699
BNLearner< GUM_SCALAR > & setPossibleEdges(const EdgeSet &set)
Definition BNLearner.h:654
BNLearner< GUM_SCALAR > & addMandatoryArc(NodeId tail, NodeId head)
Definition BNLearner.h:594
BNLearner< GUM_SCALAR > & eraseForbiddenArc(const std::string &tail, const std::string &head)
Definition BNLearner.h:584
BNLearner< GUM_SCALAR > & EMenableMaxTime()
enable EM's timeout stopping criterion
Definition BNLearner.h:412
BayesNet< GUM_SCALAR > _learnParametersWithEM_(const DAG &dag, bool takeIntoAccountScore)
learns a BN (its parameters) with the structure passed in argument using the EM algorithm initialized...
BNLearner< GUM_SCALAR > & addNoChildrenNode(NodeId node)
Definition BNLearner.h:684
BNLearner< GUM_SCALAR > & EMsetEpsilon(const double eps)
sets the stopping criterion of EM as being the minimal difference between two consecutive log-likelih...
Definition BNLearner.h:323
BNLearner< GUM_SCALAR > & EMdisableMaxTime()
Disable EM's timeout stopping criterion.
Definition BNLearner.h:406
BNLearner< GUM_SCALAR > & addPossibleEdge(const std::string &tail, const std::string &head)
Definition BNLearner.h:629
BNLearner & operator=(const BNLearner &)
copy operator
BNLearner< GUM_SCALAR > & erasePossibleEdge(NodeId tail, NodeId head)
Definition BNLearner.h:639
BNLearner< GUM_SCALAR > & useGreedyHillClimbing()
Definition BNLearner.h:497
BNLearner< GUM_SCALAR > & forbidEM()
prevent using the EM algorithm for parameter learning
Definition BNLearner.h:310
BNLearner< GUM_SCALAR > & useK2(const Sequence< NodeId > &order)
Definition BNLearner.h:508
BNLearner< GUM_SCALAR > & useScoreK2()
Definition BNLearner.h:455
BNLearner< GUM_SCALAR > & setMandatoryArcs(const ArcSet &set)
Definition BNLearner.h:649
BNLearner< GUM_SCALAR > & eraseNoChildrenNode(NodeId node)
Definition BNLearner.h:694
BNLearner< GUM_SCALAR > & useMIIC()
Definition BNLearner.h:518
BNLearner< GUM_SCALAR > & EMsetPeriodSize(const Size p)
how many samples between 2 stoppings isEnabled
Definition BNLearner.h:421
BayesNet< GUM_SCALAR > learnBN()
learn a Bayes Net from a file (must have read the db before)
BNLearner< GUM_SCALAR > & setForbiddenArcs(const ArcSet &set)
Definition BNLearner.h:554
bool isConstraintBased() const
Definition BNLearner.h:704
BNLearner< GUM_SCALAR > & useEMWithRateCriterion(const double epsilon, const double noise=default_EM_noise)
use The EM algorithm to learn parameters with the rate stopping criterion
Definition BNLearner.h:283
BNLearner< GUM_SCALAR > & setInitialDAG(const DAG &dag)
Definition BNLearner.h:236
BNLearner< GUM_SCALAR > & addNoParentNode(NodeId node)
Definition BNLearner.h:664
BNLearner< GUM_SCALAR > & addForbiddenArc(NodeId tail, NodeId head)
Definition BNLearner.h:564
BNLearner< GUM_SCALAR > & useEMWithDiffCriterion(const double epsilon, const double noise=default_EM_noise)
use The EM algorithm to learn parameters with the diff stopping criterion
Definition BNLearner.h:303
BNLearner< GUM_SCALAR > & useScoreLog2Likelihood()
Definition BNLearner.h:460
BNLearner< GUM_SCALAR > & addMandatoryArc(const std::string &tail, const std::string &head)
Definition BNLearner.h:599
BayesNet< GUM_SCALAR > _prior_bn_
Definition BNLearner.h:713
BNLearner< GUM_SCALAR > & EMsetMaxIter(const Size max)
add a max iteration stopping criterion
Definition BNLearner.h:378
BNLearner< GUM_SCALAR > & useEM(const double epsilon, const double noise=default_EM_noise)
use The EM algorithm to learn parameters
Definition BNLearner.h:261
BNLearner< GUM_SCALAR > & setSliceOrder(const std::vector< std::vector< std::string > > &slices)
Definition BNLearner.h:549
bool isScoreBased() const
Definition BNLearner.h:706
BNLearner< GUM_SCALAR > & useLocalSearchWithTabuList(Size tabu_size=100, Size nb_decrease=2)
Definition BNLearner.h:502
BNLearner(const DatabaseTable &db)
default constructor
BNLearner< GUM_SCALAR > & EMsetMinEpsilonRate(const double rate)
sets the stopping criterion of EM as being the minimal log-likelihood's evolution rate
Definition BNLearner.h:352
BNLearner< GUM_SCALAR > & useDirichletPrior(const std::string &filename, double weight=1)
Definition BNLearner.h:480
BNLearner< GUM_SCALAR > & useMDLCorrection()
Definition BNLearner.h:528
BNLearner< GUM_SCALAR > & eraseNoParentNode(NodeId node)
Definition BNLearner.h:674
BNLearner< GUM_SCALAR > & EMsetVerbosity(const bool v)
sets or unsets EM's verbosity
Definition BNLearner.h:430
BNLearner< GUM_SCALAR > & eraseForbiddenArc(const Arc &arc)
Definition BNLearner.h:574
BayesNet< GUM_SCALAR > learnParameters(const DAG &dag, bool takeIntoAccountScore=true)
learns a BN (its parameters) with the structure passed in argument
void _checkDAGCompatibility_(const DAG &dag)
check that the database contains the nodes of the dag, else raise an exception
BNLearner< GUM_SCALAR > & useNMLCorrection()
Definition BNLearner.h:523
BNLearner< GUM_SCALAR > & eraseMandatoryArc(NodeId tail, NodeId head)
Definition BNLearner.h:609
BNLearner< GUM_SCALAR > & setSliceOrder(const NodeProperty< NodeId > &slice_order)
Definition BNLearner.h:543
BNLearner< GUM_SCALAR > & useDirichletPrior(const gum::BayesNet< GUM_SCALAR > &bn, double weight=1)
Definition BNLearner.h:485
std::string toString() const
BNLearner(const std::string &filename, const gum::BayesNet< GUM_SCALAR > &src, const std::vector< std::string > &missing_symbols={"?"})
Wrapper for BNLearner (filename,modalities,parse_database) using a bn to find those modalities and no...
BNLearner< GUM_SCALAR > & EMsetMaxTime(const double timeout)
add a stopping criterion on timeout
Definition BNLearner.h:400
void copyState(const BNLearner< GUM_SCALAR > &learner)
copy the states of the BNLearner
BNLearner< GUM_SCALAR > & eraseMandatoryArc(const std::string &tail, const std::string &head)
Definition BNLearner.h:614
BNLearner< GUM_SCALAR > & eraseForbiddenArc(NodeId tail, NodeId head)
Definition BNLearner.h:579
BNLearner< GUM_SCALAR > & eraseNoParentNode(const std::string &name)
Definition BNLearner.h:679
BNLearner< GUM_SCALAR > & EMenableMaxIter()
Enable stopping criterion on max iterations.
Definition BNLearner.h:390
BNLearner(const std::string &filename, const std::vector< std::string > &missingSymbols={"?"}, const bool induceTypes=true)
default constructor
BNLearner< GUM_SCALAR > & addNoParentNode(const std::string &name)
Definition BNLearner.h:669
BNLearner< GUM_SCALAR > & useScoreBDeu()
Definition BNLearner.h:445
void createPrior_()
create the prior used for learning
BNLearner< GUM_SCALAR > & setMaxIndegree(Size max_indegree)
Definition BNLearner.h:538
The class representing a tabular database as used by learning tasks.
void eraseNoChildrenNode(NodeId node)
BNLearnerPriorType priorType_
the a priorselected for the score and parameters
Definition IBNLearner.h:956
void EMenableEpsilon()
Enable the log-likelihood min diff stopping criterion in EM.
void useGreedyHillClimbing()
indicate that we wish to use a greedy hill climbing algorithm
void useScoreBDeu()
indicate that we wish to use a BDeu score
void addNoParentNode(NodeId node)
void setSliceOrder(const NodeProperty< NodeId > &slice_order)
sets a partial order on the nodes
IBNLearner(const std::string &filename, const std::vector< std::string > &missingSymbols, bool induceTypes=true)
read the database file for the score / parameter estimation and var names
void setForbiddenArcs(const ArcSet &set)
assign a set of forbidden arcs
Size EMPeriodSize() const
sets the stopping criterion of EM as being the minimal difference between two consecutive log-likelih...
void EMdisableMaxTime()
Disable EM's timeout stopping criterion.
void EMsetMaxIter(Size max)
add a max iteration stopping criterion
std::string checkScorePriorCompatibility() const
checks whether the current score and prior are compatible
void useBDeuPrior(double weight=1.0)
use the BDeu prior
void setMandatoryArcs(const ArcSet &set)
assign a set of mandatory arcs
void EMdisableMaxIter()
Disable stopping criterion on max iterations.
void EMsetVerbosity(bool v)
sets or unsets EM's verbosity
void EMdisableEpsilon()
Disable the min log-likelihood diff stopping criterion for EM.
void addMandatoryArc(const Arc &arc)
void setMaxIndegree(Size max_indegree)
sets the max indegree
void addPossibleEdge(const Edge &edge)
void setInitialDAG(const DAG &)
sets an initial DAG structure
void useK2(const Sequence< NodeId > &order)
indicate that we wish to use K2
bool isScoreBased() const
indicate if the selected algorithm is score-based
Definition IBNLearner.h:788
void EMenableMinEpsilonRate()
Enable the log-likelihood evolution rate stopping criterion.
void EMsetMaxTime(double timeout)
add a stopping criterion on timeout
void EMdisableMinEpsilonRate()
Disable the log-likelihood evolution rate stopping criterion.
void erasePossibleEdge(const Edge &edge)
bool isConstraintBased() const
indicate if the selected algorithm is constraint-based
Definition IBNLearner.h:777
void useScoreBIC()
indicate that we wish to use a BIC score
void setPossibleEdges(const EdgeSet &set)
assign a set of possible edges
void useNoPrior()
use no prior
void eraseForbiddenArc(const Arc &arc)
void EMenableMaxIter()
Enable stopping criterion on max iterations.
void useSmoothingPrior(double weight=1)
use the prior smoothing
void EMsetMinEpsilonRate(double rate)
sets the stopping criterion of EM as being the minimal log-likelihood's evolution rate
void useLocalSearchWithTabuList(Size tabu_size=100, Size nb_decrease=2)
indicate that we wish to use a local search with tabu list
void useScoreK2()
indicate that we wish to use a K2 score
void _setPriorWeight_(double weight)
sets the prior weight
void setPossibleSkeleton(const UndiGraph &skeleton)
assign a set of possible edges
void useEMWithRateCriterion(const double epsilon, const double noise=default_EM_noise)
use The EM algorithm to learn parameters with the rate stopping criterion
void useNMLCorrection()
indicate that we wish to use the NML correction for and MIIC
void useEM(const double epsilon, const double noise=default_EM_noise)
use The EM algorithm to learn parameters
void useEMWithDiffCriterion(const double epsilon, const double noise=default_EM_noise)
use The EM algorithm to learn parameters with the diff stopping criterion
void forbidEM()
prevent using the EM algorithm for parameter learning
double epsilon() const override
Get the value of epsilon.
void EMsetPeriodSize(Size p)
how many samples between 2 stoppings isEnabled
void useNoCorrection()
indicate that we wish to use the NoCorr correction for MIIC
void useScoreLog2Likelihood()
indicate that we wish to use a Log2Likelihood score
void useMDLCorrection()
indicate that we wish to use the MDL correction for MIIC
void useDirichletPrior(const std::string &filename, double weight=1)
use the Dirichlet prior from a database
void addForbiddenArc(const Arc &arc)
void addNoChildrenNode(NodeId node)
void useScoreAIC()
indicate that we wish to use an AIC score
void eraseMandatoryArc(const Arc &arc)
void useMIIC()
indicate that we wish to use MIIC
static constexpr double default_EM_noise
the default noise amount added to CPTs during EM's initialization (see method useEM())
Definition IBNLearner.h:120
void eraseNoParentNode(NodeId node)
void EMsetEpsilon(double eps)
sets the stopping criterion of EM as being the minimal difference between two consecutive log-likelih...
void useScoreBD()
indicate that we wish to use a BD score
void EMenableMaxTime()
sets the stopping criterion of EM as being the minimal difference between two consecutive log-likelih...
#define GUM_ERROR(type, msg)
Definition exceptions.h:72
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition types.h:74
Set< Edge > EdgeSet
Some typdefs and define for shortcuts ...
Size NodeId
Type for node ids.
Set< Arc > ArcSet
Some typdefs and define for shortcuts ...
HashTable< NodeId, VAL > NodeProperty
Property on graph elements.
include the inlined functions if necessary
Definition CSVParser.h:54
std::ostream & operator<<(std::ostream &stream, const IdCondSet &idset)
the display operator
gum is the global namespace for all aGrUM entities
Definition agrum.h:46
STL namespace.