aGrUM 2.3.2
a C++ library for (probabilistic) graphical models
IBNLearner_inl.h
Go to the documentation of this file.
1/****************************************************************************
2 * This file is part of the aGrUM/pyAgrum library. *
3 * *
4 * Copyright (c) 2005-2025 by *
5 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
6 * - Christophe GONZALES(_at_AMU) *
7 * *
8 * The aGrUM/pyAgrum library is free software; you can redistribute it *
9 * and/or modify it under the terms of either : *
10 * *
11 * - the GNU Lesser General Public License as published by *
12 * the Free Software Foundation, either version 3 of the License, *
13 * or (at your option) any later version, *
14 * - the MIT license (MIT), *
15 * - or both in dual license, as here. *
16 * *
17 * (see https://agrum.gitlab.io/articles/dual-licenses-lgplv3mit.html) *
18 * *
19 * This aGrUM/pyAgrum library is distributed in the hope that it will be *
20 * useful, but WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, *
21 * INCLUDING BUT NOT LIMITED TO THE WARRANTIES MERCHANTABILITY or FITNESS *
22 * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
23 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, *
25 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR *
26 * OTHER DEALINGS IN THE SOFTWARE. *
27 * *
28 * See LICENCES for more details. *
29 * *
30 * SPDX-FileCopyrightText: Copyright 2005-2025 *
31 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
32 * - Christophe GONZALES(_at_AMU) *
33 * SPDX-License-Identifier: LGPL-3.0-or-later OR MIT *
34 * *
35 * Contact : info_at_agrum_dot_org *
36 * homepage : http://agrum.gitlab.io *
37 * gitlab : https://gitlab.com/agrumery/agrum *
38 * *
39 ****************************************************************************/
40#pragma once
41
42
51
52// to help IDE parser
55
56namespace gum::learning {
57
58
59 // returns the row filter
61
62 // returns the modalities of the variables
63 INLINE const std::vector< std::size_t >& IBNLearner::Database::domainSizes() const {
64 return _domain_sizes_;
65 }
66
67 // returns the names of the variables in the database
68 INLINE const std::vector< std::string >& IBNLearner::Database::names() const {
69 return _database_.variableNames();
70 }
71
73 INLINE void IBNLearner::Database::setDatabaseWeight(const double new_weight) {
74 if (_database_.nbRows() == std::size_t(0)) return;
75 const double weight = new_weight / double(_database_.nbRows());
76 _database_.setAllRowsWeight(weight);
77 }
78
79 // returns the node id corresponding to a variable name
80 INLINE NodeId IBNLearner::Database::idFromName(const std::string& var_name) const {
81 try {
82 const auto cols = _database_.columnsFromVariableName(var_name);
83 return _nodeId2cols_.first(cols[0]);
84 } catch (...) {
86 "Variable " << var_name << " could not be found in the database")
87 }
88 }
89
90 // returns the variable name corresponding to a given node id
91 INLINE const std::string& IBNLearner::Database::nameFromId(NodeId id) const {
92 try {
93 return _database_.variableName(_nodeId2cols_.second(id));
94 } catch (...) {
96 "Variable of Id " << id << " could not be found in the database")
97 }
98 }
99
102
104 INLINE const std::vector< std::string >& IBNLearner::Database::missingSymbols() const {
105 return _database_.missingSymbols();
106 }
107
112
114 INLINE std::size_t IBNLearner::Database::nbRows() const { return _database_.nbRows(); }
115
117 INLINE std::size_t IBNLearner::Database::size() const { return _database_.size(); }
118
120 INLINE void IBNLearner::Database::setWeight(const std::size_t i, const double weight) {
121 _database_.setWeight(i, weight);
122 }
123
125 INLINE double IBNLearner::Database::weight(const std::size_t i) const {
126 return _database_.weight(i);
127 }
128
130 INLINE double IBNLearner::Database::weight() const { return _database_.weight(); }
131
132 // ===========================================================================
133
134 // returns the node id corresponding to a variable name
135 INLINE NodeId IBNLearner::idFromName(const std::string& var_name) const {
136 return scoreDatabase_.idFromName(var_name);
137 }
138
139 // returns the variable name corresponding to a given node id
140 INLINE const std::string& IBNLearner::nameFromId(NodeId id) const {
141 return scoreDatabase_.nameFromId(id);
142 }
143
145 INLINE void IBNLearner::setDatabaseWeight(const double new_weight) {
146 scoreDatabase_.setDatabaseWeight(new_weight);
147 }
148
150 INLINE void IBNLearner::setRecordWeight(const std::size_t i, const double new_weight) {
151 scoreDatabase_.setWeight(i, new_weight);
152 }
153
155 INLINE double IBNLearner::recordWeight(const std::size_t i) const {
156 return scoreDatabase_.weight(i);
157 }
158
160 INLINE double IBNLearner::databaseWeight() const { return scoreDatabase_.weight(); }
161
162 // sets an initial DAG structure
163 INLINE void IBNLearner::setInitialDAG(const DAG& dag) { initialDag_ = dag; }
164
166
167 // indicate that we wish to use an AIC score
172
173 // indicate that we wish to use a BD score
178
179 // indicate that we wish to use a BDeu score
184
185 // indicate that we wish to use a BIC score
190
191 // indicate that we wish to use a K2 score
196
197 // indicate that we wish to use a Log2Likelihood score
202
203 // sets the max indegree
204 INLINE void IBNLearner::setMaxIndegree(Size max_indegree) {
205 constraintIndegree_.setMaxIndegree(max_indegree);
206 }
207
208 // indicate that we wish to use MIIC with constraints
210
215
220
225
227 INLINE std::vector< Arc > IBNLearner::latentVariables() const {
228 return algoMiic_.latentVariables();
229 }
230
231 // indicate that we wish to use a K2 algorithm
232 INLINE void IBNLearner::useK2(const Sequence< NodeId >& order) {
234 algoK2_.setOrder(order);
235 }
236
237 // indicate that we wish to use a K2 algorithm
238 INLINE void IBNLearner::useK2(const std::vector< NodeId >& order) {
240 algoK2_.setOrder(order);
241 }
242
243 // indicate that we wish to use a greedy hill climbing algorithm
247
248 // indicate that we wish to use a local search with tabu list
249 INLINE void IBNLearner::useLocalSearchWithTabuList(Size tabu_size, Size nb_decrease) {
251 nbDecreasingChanges_ = nb_decrease;
252 constraintTabuList_.setTabuListSize(tabu_size);
253 localSearchWithTabuList_.setMaxNbDecreasingChanges(nb_decrease);
254 }
255
257 INLINE void IBNLearner::useEM(const double epsilon, const double noise) {
258 if (epsilon < 0.0)
259 GUM_ERROR(OutOfBounds, "EM's min log-likelihood evolution rate must be non-negative");
260 if ((noise < 0.0) || (noise > 1.0))
261 GUM_ERROR(OutOfBounds, "EM's noise must belong to interval [0,1]");
262 if (epsilon > 0) {
263 useEM_ = true;
264 dag2BN_.setMinEpsilonRate(epsilon);
265 dag2BN_.setNoise(noise);
266 noiseEM_ = noise;
267 } else {
268 useEM_ = false; // epsilon == 0
269 }
270 }
271
273 INLINE void IBNLearner::useEMWithRateCriterion(const double epsilon, const double noise) {
274 if (epsilon <= 0.0)
275 GUM_ERROR(OutOfBounds, "EM's min log-likelihood evolution rate must be positive");
276 useEM(epsilon, noise);
277 }
278
280 INLINE void IBNLearner::useEMWithDiffCriterion(const double epsilon, const double noise) {
281 if (epsilon <= 0.0)
282 GUM_ERROR(OutOfBounds, "EM's min log-likelihood differences must be positive");
283 if ((noise < 0.0) || (noise > 1.0))
284 GUM_ERROR(OutOfBounds, "EM's noise must belong to interval [0,1]");
285 useEM_ = true;
286 dag2BN_.setEpsilon(epsilon);
287 dag2BN_.setNoise(noise);
288 noiseEM_ = noise;
289 }
290
292 INLINE void IBNLearner::forbidEM() { useEM_ = false; }
293
295 INLINE bool IBNLearner::isUsingEM() const { return useEM_; }
296
299 if (useEM_) return dag2BN_;
300 else GUM_ERROR(NotFound, "EM is currently forbidden. Please enable it with useEM()")
301 }
302
308
310 INLINE std::string IBNLearner::EMStateMessage() const {
311 if (useEM_) return dag2BN_.messageApproximationScheme();
312 else return "EM is currently forbidden. Please enable it with useEM()";
313 }
314
315 INLINE bool IBNLearner::hasMissingValues() const {
316 return scoreDatabase_.databaseTable().hasMissingValues();
317 }
318
319 // assign a set of forbidden edges
320 INLINE void IBNLearner::setPossibleEdges(const EdgeSet& set) {
321 constraintPossibleEdges_.setEdges(set);
322 }
323
324 // assign a set of forbidden edges from an UndiGraph
328
329 // assign a new possible edge
330 INLINE void IBNLearner::addPossibleEdge(const Edge& edge) {
331 constraintPossibleEdges_.addEdge(edge);
332 }
333
334 // remove a forbidden edge
335 INLINE void IBNLearner::erasePossibleEdge(const Edge& edge) {
336 constraintPossibleEdges_.eraseEdge(edge);
337 }
338
339 // assign a new forbidden edge
340 INLINE void IBNLearner::addPossibleEdge(const NodeId tail, const NodeId head) {
341 addPossibleEdge(Edge(tail, head));
342 }
343
344 // remove a forbidden edge
345 INLINE void IBNLearner::erasePossibleEdge(const NodeId tail, const NodeId head) {
346 erasePossibleEdge(Edge(tail, head));
347 }
348
349 // assign a new forbidden edge
350 INLINE void IBNLearner::addPossibleEdge(const std::string& tail, const std::string& head) {
352 }
353
354 // remove a forbidden edge
355 INLINE void IBNLearner::erasePossibleEdge(const std::string& tail, const std::string& head) {
357 }
358
359 // assign a set of forbidden arcs
360 INLINE void IBNLearner::setForbiddenArcs(const ArcSet& set) {
361 constraintForbiddenArcs_.setArcs(set);
362 }
363
364 // assign a new forbidden arc
365 INLINE void IBNLearner::addForbiddenArc(const Arc& arc) { constraintForbiddenArcs_.addArc(arc); }
366
367 // remove a forbidden arc
368 INLINE void IBNLearner::eraseForbiddenArc(const Arc& arc) {
369 constraintForbiddenArcs_.eraseArc(arc);
370 }
371
372 // assign a new forbidden arc
373 INLINE void IBNLearner::addForbiddenArc(const NodeId tail, const NodeId head) {
374 addForbiddenArc(Arc(tail, head));
375 }
376
377 // remove a forbidden arc
378 INLINE void IBNLearner::eraseForbiddenArc(const NodeId tail, const NodeId head) {
379 eraseForbiddenArc(Arc(tail, head));
380 }
381
382 // assign a new forbidden arc
383 INLINE void IBNLearner::addForbiddenArc(const std::string& tail, const std::string& head) {
385 }
386
387 // remove a forbidden arc
388 INLINE void IBNLearner::eraseForbiddenArc(const std::string& tail, const std::string& head) {
390 }
391
392 // assign a set of forbidden arcs
393 INLINE void IBNLearner::setMandatoryArcs(const ArcSet& set) {
394 constraintMandatoryArcs_.setArcs(set);
395 }
396
397 // assign a new forbidden arc
398 INLINE void IBNLearner::addMandatoryArc(const Arc& arc) { constraintMandatoryArcs_.addArc(arc); }
399
400 // remove a forbidden arc
401 INLINE void IBNLearner::eraseMandatoryArc(const Arc& arc) {
402 constraintMandatoryArcs_.eraseArc(arc);
403 }
404
406
407 INLINE void IBNLearner::addNoParentNode(const std::string& name) {
409 }
410
412 constraintNoParentNodes_.eraseNode(node);
413 }
414
415 INLINE void IBNLearner::eraseNoParentNode(const std::string& name) {
417 }
418
420 constraintNoChildrenNodes_.addNode(node);
421 }
422
423 INLINE void IBNLearner::addNoChildrenNode(const std::string& name) {
425 }
426
428 constraintNoChildrenNodes_.eraseNode(node);
429 }
430
431 INLINE void IBNLearner::eraseNoChildrenNode(const std::string& name) {
433 }
434
435 // assign a new forbidden arc
436 INLINE void IBNLearner::addMandatoryArc(const std::string& tail, const std::string& head) {
438 }
439
440 // remove a forbidden arc
441 INLINE void IBNLearner::eraseMandatoryArc(const std::string& tail, const std::string& head) {
443 }
444
445 // assign a new forbidden arc
446 INLINE void IBNLearner::addMandatoryArc(NodeId tail, NodeId head) {
447 addMandatoryArc(Arc(tail, head));
448 }
449
450 // remove a forbidden arc
452 eraseMandatoryArc(Arc(tail, head));
453 }
454
455 // sets a partial order on the nodes
459
460 INLINE void IBNLearner::setSliceOrder(const std::vector< std::vector< std::string > >& slices) {
461 NodeProperty< NodeId > slice_order;
462 NodeId rank = 0;
463 for (const auto& slice: slices) {
464 for (const auto& name: slice) {
465 slice_order.insert(idFromName(name), rank);
466 }
467 rank++;
468 }
469 setSliceOrder(slice_order);
470 }
471
472 // sets the prior weight
473 INLINE void IBNLearner::_setPriorWeight_(double weight) {
474 if (weight < 0) { GUM_ERROR(OutOfBounds, "the weight of the prior must be positive") }
475
476 priorWeight_ = weight;
478 }
479
480 // use the prior smoothing
485
486 // use the prior smoothing
487 INLINE void IBNLearner::useSmoothingPrior(double weight) {
488 if (weight < 0) { GUM_ERROR(OutOfBounds, "the weight of the prior must be positive") }
489
491 _setPriorWeight_(weight);
492
494 }
495
496 // use the Dirichlet prior
497 INLINE void IBNLearner::useDirichletPrior(const std::string& filename, double weight) {
498 if (weight < 0) { GUM_ERROR(OutOfBounds, "the weight of the prior must be positive") }
499
500 priorDbname_ = filename;
502 _setPriorWeight_(weight);
503
505 }
506
507 // use the prior BDeu
508 INLINE void IBNLearner::useBDeuPrior(double weight) {
509 if (weight < 0) { GUM_ERROR(OutOfBounds, "the weight of the prior must be positive") }
510
512 _setPriorWeight_(weight);
513
515 }
516
517 // returns the type (as a string) of a given prior
519 switch (priorType_) {
521 case NO_prior : return PriorType::NoPriorType;
525 case BDEU : return PriorType::BDeuPriorType;
526 default :
528 "IBNLearner getPriorType does "
529 "not support yet this prior")
530 }
531 }
532
533 // returns the names of the variables in the database
534 INLINE const std::vector< std::string >& IBNLearner::names() const {
535 return scoreDatabase_.names();
536 }
537
538 // returns the modalities of the variables in the database
539 INLINE const std::vector< std::size_t >& IBNLearner::domainSizes() const {
540 return scoreDatabase_.domainSizes();
541 }
542
543 // returns the modalities of a variable in the database
544 INLINE Size IBNLearner::domainSize(NodeId var) const { return scoreDatabase_.domainSizes()[var]; }
545
546 // returns the modalities of a variables in the database
547 INLINE Size IBNLearner::domainSize(const std::string& var) const {
548 return scoreDatabase_.domainSizes()[idFromName(var)];
549 }
550
552 INLINE const std::vector< std::pair< std::size_t, std::size_t > >&
554 return ranges_;
555 }
556
558 INLINE void IBNLearner::clearDatabaseRanges() { ranges_.clear(); }
559
561 INLINE const DatabaseTable& IBNLearner::database() const {
562 return scoreDatabase_.databaseTable();
563 }
564
565 INLINE Size IBNLearner::nbCols() const { return scoreDatabase_.domainSizes().size(); }
566
567 INLINE Size IBNLearner::nbRows() const { return scoreDatabase_.databaseTable().size(); }
568
569 // sets the number max of threads that can be used
572 if (score_ != nullptr) score_->setNumberOfThreads(nb);
573 }
574
575 /* namespace learning */
576} // namespace gum::learning
A class for generic framework of learning algorithms that can easily be used.
The base class for all directed edges.
Base class for dag.
Definition DAG.h:121
const EdgeSet & edges() const
returns the set of edges stored within the EdgeGraphPart
The base class for all undirected edges.
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
ApproximationSchemeSTATE
The different state of an approximation scheme.
Error: A name of variable is not found in the database.
Exception : the element we looked for cannot be found.
Exception : operation not allowed.
Exception : out of bound.
virtual void setNumberOfThreads(Size nb)
sets the number max of threads to be used by the class containing this ThreadNumberManager
Base class for undirected graphs.
Definition undiGraph.h:128
the class used to read a row in the database and to transform it into a set of DBRow instances that c...
The class representing a tabular database as used by learning tasks.
A class for parameterizing EM's parameter learning approximations.
const std::vector< std::string > & missingSymbols() const
returns the set of missing symbols taken into account
const DatabaseTable & databaseTable() const
returns the internal database table
std::size_t size() const
returns the number of records in the database
NodeId idFromName(const std::string &var_name) const
returns the node id corresponding to a variable name
std::vector< std::size_t > _domain_sizes_
the domain sizes of the variables (useful to speed-up computations)
Definition IBNLearner.h:265
DatabaseTable _database_
the database itself
Definition IBNLearner.h:259
const std::string & nameFromId(NodeId id) const
returns the variable name corresponding to a given node id
double weight(const std::size_t i) const
returns the weight of the ith record
Bijection< NodeId, std::size_t > _nodeId2cols_
a bijection assigning to each variable name its NodeId
Definition IBNLearner.h:268
const std::vector< std::string > & names() const
returns the names of the variables in the database
void setWeight(const std::size_t i, const double weight)
sets the weight of the ith record
const Bijection< NodeId, std::size_t > & nodeId2Columns() const
returns the mapping between node ids and their columns in the database
DBRowGeneratorParser & parser()
returns the parser for the database
DBRowGeneratorParser * _parser_
the parser used for reading the database
Definition IBNLearner.h:262
void setDatabaseWeight(const double new_weight)
assign a weight to all the rows of the database so that the sum of their weights is equal to new_weig...
std::size_t nbRows() const
returns the number of records in the database
const std::vector< std::size_t > & domainSizes() const
returns the domain sizes of the variables
double weight() const
returns the weight of the whole database
StructuralConstraintPossibleEdges constraintPossibleEdges_
the constraint on possible Edges
Definition IBNLearner.h:979
StructuralConstraintNoParentNodes constraintNoParentNodes_
the constraint on no parent nodes
Definition IBNLearner.h:985
void eraseNoChildrenNode(NodeId node)
double recordWeight(const std::size_t i) const
returns the weight of the ith record
BNLearnerPriorType priorType_
the a priorselected for the score and parameters
Definition IBNLearner.h:956
const std::vector< std::size_t > & domainSizes() const
returns the domain sizes of the variables in the database
void useGreedyHillClimbing()
indicate that we wish to use a greedy hill climbing algorithm
void useScoreBDeu()
indicate that we wish to use a BDeu score
void addNoParentNode(NodeId node)
void setSliceOrder(const NodeProperty< NodeId > &slice_order)
sets a partial order on the nodes
bool isUsingEM() const
indicates whether we use EM for parameter learning
void setForbiddenArcs(const ArcSet &set)
assign a set of forbidden arcs
std::string priorDbname_
the filename for the Dirichlet a priori, if any
double priorWeight_
the weight of the prior
Definition IBNLearner.h:964
double noiseEM_
the noise factor (in (0,1)) used by EM for perturbing the CPT during init
Definition IBNLearner.h:950
std::vector< std::pair< std::size_t, std::size_t > > ranges_
the set of rows' ranges within the database in which learning is done
void setDatabaseWeight(const double new_weight)
assign a weight to all the rows of the learning database so that the sum of their weights is equal to...
std::vector< Arc > latentVariables() const
get the list of arcs hiding latent variables
void clearDatabaseRanges()
reset the ranges to the one range corresponding to the whole database
std::string checkScorePriorCompatibility() const
checks whether the current score and prior are compatible
void useBDeuPrior(double weight=1.0)
use the BDeu prior
void setMandatoryArcs(const ArcSet &set)
assign a set of mandatory arcs
ApproximationSchemeSTATE EMState() const
returns the state of the last EM algorithm executed
const std::string & nameFromId(NodeId id) const
returns the variable name corresponding to a given node id
double databaseWeight() const
returns the weight of the whole database
K2 algoK2_
the K2 algorithm
Definition IBNLearner.h:995
void addMandatoryArc(const Arc &arc)
AlgoType selectedAlgo_
the selected learning algorithm
Definition IBNLearner.h:992
const std::vector< std::pair< std::size_t, std::size_t > > & databaseRanges() const
returns the current database rows' ranges used for learning
void setMaxIndegree(Size max_indegree)
sets the max indegree
void addPossibleEdge(const Edge &edge)
void setInitialDAG(const DAG &)
sets an initial DAG structure
void useK2(const Sequence< NodeId > &order)
indicate that we wish to use K2
Database scoreDatabase_
the database to be used by the scores and parameter estimators
bool useEM_
a Boolean indicating whether we should use EM for parameter learning or not
Definition IBNLearner.h:947
DAG2BNLearner dag2BN_
the parametric EM
BNLearnerPriorType
an enumeration to select the prior
Definition IBNLearner.h:108
void erasePossibleEdge(const Edge &edge)
void setNumberOfThreads(Size nb) override
sets the number max of threads that can be used
void useScoreBIC()
indicate that we wish to use a BIC score
StructuralConstraintNoChildrenNodes constraintNoChildrenNodes_
the constraint on no children nodes
Definition IBNLearner.h:988
DAG initialDAG()
returns the initial DAG structure
void setPossibleEdges(const EdgeSet &set)
assign a set of possible edges
void useNoPrior()
use no prior
ScoreType scoreType_
the score selected for learning
Definition IBNLearner.h:938
void eraseForbiddenArc(const Arc &arc)
void useSmoothingPrior(double weight=1)
use the prior smoothing
PriorType getPriorType_() const
returns the type (as a string) of a given prior
NodeId idFromName(const std::string &var_name) const
returns the node id corresponding to a variable name
void useLocalSearchWithTabuList(Size tabu_size=100, Size nb_decrease=2)
indicate that we wish to use a local search with tabu list
void useScoreK2()
indicate that we wish to use a K2 score
StructuralConstraintIndegree constraintIndegree_
the constraint for indegrees
Definition IBNLearner.h:970
void _setPriorWeight_(double weight)
sets the prior weight
void setPossibleSkeleton(const UndiGraph &skeleton)
assign a set of possible edges
void useEMWithRateCriterion(const double epsilon, const double noise=default_EM_noise)
use The EM algorithm to learn parameters with the rate stopping criterion
void useNMLCorrection()
indicate that we wish to use the NML correction for and MIIC
void useEM(const double epsilon, const double noise=default_EM_noise)
use The EM algorithm to learn parameters
void useEMWithDiffCriterion(const double epsilon, const double noise=default_EM_noise)
use The EM algorithm to learn parameters with the diff stopping criterion
bool hasMissingValues() const
returns true if the learner's database has missing values
void forbidEM()
prevent using the EM algorithm for parameter learning
double epsilon() const override
Get the value of epsilon.
Score * score_
the score used
Definition IBNLearner.h:941
StructuralConstraintMandatoryArcs constraintMandatoryArcs_
the constraint on mandatory arcs
Definition IBNLearner.h:982
Miic algoMiic_
the Constraint MIIC algorithm
EMApproximationScheme & EM()
returns the EM parameter learning approximation scheme if EM is enabled
void useNoCorrection()
indicate that we wish to use the NoCorr correction for MIIC
StructuralConstraintForbiddenArcs constraintForbiddenArcs_
the constraint on forbidden arcs
Definition IBNLearner.h:976
void useScoreLog2Likelihood()
indicate that we wish to use a Log2Likelihood score
void setRecordWeight(const std::size_t i, const double weight)
sets the weight of the ith record of the database
void useMDLCorrection()
indicate that we wish to use the MDL correction for MIIC
void useDirichletPrior(const std::string &filename, double weight=1)
use the Dirichlet prior from a database
StructuralConstraintTabuList constraintTabuList_
the constraint for tabu lists
Definition IBNLearner.h:973
void addForbiddenArc(const Arc &arc)
DAG initialDag_
an initial DAG given to learners
void addNoChildrenNode(NodeId node)
Size domainSize(NodeId var) const
learn a structure from a file (must have read the db before)
void useScoreAIC()
indicate that we wish to use an AIC score
const std::vector< std::string > & names() const
returns the names of the variables in the database
void eraseMandatoryArc(const Arc &arc)
void useMIIC()
indicate that we wish to use MIIC
LocalSearchWithTabuList localSearchWithTabuList_
the local search with tabu list algorithm
StructuralConstraintSliceOrder constraintSliceOrder_
the constraint for 2TBNs
Definition IBNLearner.h:967
const DatabaseTable & database() const
returns the database used by the BNLearner
void eraseNoParentNode(NodeId node)
CorrectedMutualInformation::KModeTypes kmodeMiic_
the penalty used in MIIC
void useScoreBD()
indicate that we wish to use a BD score
std::string EMStateMessage() const
returns the state of the EM algorithm
the structural constraint imposing a partial order over nodes
#define GUM_ERROR(type, msg)
Definition exceptions.h:72
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition types.h:74
Set< Edge > EdgeSet
Some typdefs and define for shortcuts ...
Size NodeId
Type for node ids.
Set< Arc > ArcSet
Some typdefs and define for shortcuts ...
HashTable< NodeId, VAL > NodeProperty
Property on graph elements.
include the inlined functions if necessary
Definition CSVParser.h:54
Base classes for undirected graphs.