aGrUM 2.3.2
a C++ library for (probabilistic) graphical models
IBNLearner.cpp
Go to the documentation of this file.
1/****************************************************************************
2 * This file is part of the aGrUM/pyAgrum library. *
3 * *
4 * Copyright (c) 2005-2025 by *
5 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
6 * - Christophe GONZALES(_at_AMU) *
7 * *
8 * The aGrUM/pyAgrum library is free software; you can redistribute it *
9 * and/or modify it under the terms of either : *
10 * *
11 * - the GNU Lesser General Public License as published by *
12 * the Free Software Foundation, either version 3 of the License, *
13 * or (at your option) any later version, *
14 * - the MIT license (MIT), *
15 * - or both in dual license, as here. *
16 * *
17 * (see https://agrum.gitlab.io/articles/dual-licenses-lgplv3mit.html) *
18 * *
19 * This aGrUM/pyAgrum library is distributed in the hope that it will be *
20 * useful, but WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, *
21 * INCLUDING BUT NOT LIMITED TO THE WARRANTIES MERCHANTABILITY or FITNESS *
22 * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
23 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, *
25 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR *
26 * OTHER DEALINGS IN THE SOFTWARE. *
27 * *
28 * See LICENCES for more details. *
29 * *
30 * SPDX-FileCopyrightText: Copyright 2005-2025 *
31 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
32 * - Christophe GONZALES(_at_AMU) *
33 * SPDX-License-Identifier: LGPL-3.0-or-later OR MIT *
34 * *
35 * Contact : info_at_agrum_dot_org *
36 * homepage : http://agrum.gitlab.io *
37 * gitlab : https://gitlab.com/agrumery/agrum *
38 * *
39 ****************************************************************************/
40
41
50
51#include <algorithm>
52#include <iterator>
53
54#include <agrum/agrum.h>
55
61
63
64// include the inlined functions if necessary
65#ifdef GUM_NO_INLINE
67#endif /* GUM_NO_INLINE */
68
69namespace gum::learning {
71 // get the variables names
72 const auto& var_names = _database_.variableNames();
73 const std::size_t nb_vars = var_names.size();
74 for (auto dom: _database_.domainSizes())
75 _domain_sizes_.push_back(dom);
76 for (std::size_t i = 0; i < nb_vars; ++i) {
77 _nodeId2cols_.insert(NodeId(i), i);
78 }
79
80 // create the parser
82 }
83
84 IBNLearner::Database::Database(const std::string& filename,
85 const std::vector< std::string >& missing_symbols,
86 const bool induceTypes) :
87 Database(IBNLearner::readFile_(filename, missing_symbols)) {
88 // if the usr wants the best translators to be inferred, just do it
89 if (induceTypes) {
90 for (const auto& [first, second]: _database_.betterTranslators()) {
91 // change the translator
92 _database_.changeTranslator(*second, first);
93 // recompute the domain size
94 _domain_sizes_[first] = second->domainSize();
95 }
96 }
97 }
98
99 IBNLearner::Database::Database(const std::string& CSV_filename,
100 const Database& score_database,
101 const std::vector< std::string >& missing_symbols) {
102 // assign to each column name in the CSV file its column
103 IBNLearner::isCSVFileName_(CSV_filename);
104 DBInitializerFromCSV initializer(CSV_filename);
105 const auto& prior_names = initializer.variableNames();
106 std::size_t prior_nb_vars = prior_names.size();
107 HashTable< std::string, std::size_t > prior_names2col(prior_nb_vars);
108 for (auto i = std::size_t(0); i < prior_nb_vars; ++i)
109 prior_names2col.insert(prior_names[i], i);
110
111 // check that there are at least as many variables in the a priori
112 // database as those in the score_database
113 if (prior_nb_vars < score_database._database_.nbVariables()) {
115 "the a prior database has fewer variables "
116 "than the observed database")
117 }
118
119 // get the mapping from the columns of score_database to those of
120 // the CSV file
121 const std::vector< std::string >& score_names = score_database.databaseTable().variableNames();
122 const std::size_t score_nb_vars = score_names.size();
123 HashTable< std::size_t, std::size_t > mapping(score_nb_vars);
124 for (auto i = std::size_t(0); i < score_nb_vars; ++i) {
125 try {
126 mapping.insert(i, prior_names2col[score_names[i]]);
127 } catch (Exception const&) {
129 "Variable " << score_names[i]
130 << " of the observed database does not belong to the "
131 << "prior database")
132 }
133 }
134
135 // create the translators for CSV database
136 for (auto i = std::size_t(0); i < score_nb_vars; ++i) {
137 const Variable& var = score_database.databaseTable().variable(i);
138 _database_.insertTranslator(var, mapping[i], missing_symbols);
139 }
140
141 // fill the database
142 initializer.fillDatabase(_database_);
143
144 // get the domain sizes of the variables
145 for (auto dom: _database_.domainSizes())
146 _domain_sizes_.push_back(dom);
147
148 // compute the mapping from node ids to column indices
149 _nodeId2cols_ = score_database.nodeId2Columns();
150
151 // create the parser
153 }
154
158 // create the parser
160 }
161
163 _database_(std::move(from._database_)), _domain_sizes_(std::move(from._domain_sizes_)),
164 _nodeId2cols_(std::move(from._nodeId2cols_)) {
165 // create the parser
167 }
168
170
172 if (this != &from) {
173 delete _parser_;
174 _database_ = from._database_;
177
178 // create the parser
180 }
181
182 return *this;
183 }
184
186 if (this != &from) {
187 delete _parser_;
188 _database_ = std::move(from._database_);
189 _domain_sizes_ = std::move(from._domain_sizes_);
190 _nodeId2cols_ = std::move(from._nodeId2cols_);
191
192 // create the parser
194 }
195
196 return *this;
197 }
198
199 // ===========================================================================
200
201 IBNLearner::IBNLearner(const std::string& filename,
202 const std::vector< std::string >& missing_symbols,
203 const bool induceTypes) :
204 inducedTypes_(induceTypes), scoreDatabase_(filename, missing_symbols, induceTypes),
205 filename_(filename) {
206 noPrior_ = new NoPrior(scoreDatabase_.databaseTable());
207
208 GUM_CONSTRUCTOR(IBNLearner)
209 }
210
212 noPrior_ = new NoPrior(scoreDatabase_.databaseTable());
213 GUM_CONSTRUCTOR(IBNLearner)
214 }
215
237
249 selectedAlgo_(from.selectedAlgo_), algoK2_(std::move(from.algoK2_)),
250 algoSimpleMiic_(std::move(from.algoSimpleMiic_)), algoMiic_(std::move(from.algoMiic_)),
251 kmodeMiic_(from.kmodeMiic_), dag2BN_(std::move(from.dag2BN_)),
254 scoreDatabase_(std::move(from.scoreDatabase_)), ranges_(std::move(from.ranges_)),
255 priorDbname_(std::move(from.priorDbname_)), initialDag_(std::move(from.initialDag_)),
256 filename_(std::move(from.filename_)),
258 noPrior_ = new NoPrior(scoreDatabase_.databaseTable());
259
260 GUM_CONS_MOV(IBNLearner)
261 }
262
264 if (score_) delete score_;
265
266 if (prior_) delete prior_;
267
268 if (noPrior_) delete noPrior_;
269
270 if (priorDatabase_) delete priorDatabase_;
271
272 if (mutualInfo_) delete mutualInfo_;
273
274 GUM_DESTRUCTOR(IBNLearner)
275 }
276
278 if (this != &from) {
279 if (score_) {
280 delete score_;
281 score_ = nullptr;
282 }
283
284 if (prior_) {
285 delete prior_;
286 prior_ = nullptr;
287 }
288
289 if (priorDatabase_) {
290 delete priorDatabase_;
291 priorDatabase_ = nullptr;
292 }
293
294 if (mutualInfo_) {
295 delete mutualInfo_;
296 mutualInfo_ = nullptr;
297 }
298
300 scoreType_ = from.scoreType_;
302 useEM_ = from.useEM_;
303 noiseEM_ = from.noiseEM_;
304 priorType_ = from.priorType_;
314 algoK2_ = from.algoK2_;
316 algoMiic_ = from.algoMiic_;
317 kmodeMiic_ = from.kmodeMiic_;
318 dag2BN_ = from.dag2BN_;
322 ranges_ = from.ranges_;
325 filename_ = from.filename_;
327 currentAlgorithm_ = nullptr;
328 }
329
330 return *this;
331 }
332
334 if (this != &from) {
335 if (score_) {
336 delete score_;
337 score_ = nullptr;
338 }
339
340 if (prior_) {
341 delete prior_;
342 prior_ = nullptr;
343 }
344
345 if (priorDatabase_) {
346 delete priorDatabase_;
347 priorDatabase_ = nullptr;
348 }
349
350 if (mutualInfo_) {
351 delete mutualInfo_;
352 mutualInfo_ = nullptr;
353 }
354
355 ThreadNumberManager::operator=(std::move(from));
356 scoreType_ = from.scoreType_;
357 paramEstimatorType_ = from.paramEstimatorType_;
358 useEM_ = from.useEM_;
359 noiseEM_ = from.noiseEM_;
360 priorType_ = from.priorType_;
361 priorWeight_ = from.priorWeight_;
362 constraintSliceOrder_ = std::move(from.constraintSliceOrder_);
363 constraintIndegree_ = std::move(from.constraintIndegree_);
364 constraintTabuList_ = std::move(from.constraintTabuList_);
365 constraintForbiddenArcs_ = std::move(from.constraintForbiddenArcs_);
366 constraintNoParentNodes_ = std::move(from.constraintNoParentNodes_);
367 constraintNoChildrenNodes_ = std::move(from.constraintNoChildrenNodes_);
368 constraintMandatoryArcs_ = std::move(from.constraintMandatoryArcs_);
369 selectedAlgo_ = from.selectedAlgo_;
370 algoK2_ = from.algoK2_;
371 algoSimpleMiic_ = std::move(from.algoSimpleMiic_);
372 algoMiic_ = std::move(from.algoMiic_);
373 kmodeMiic_ = from.kmodeMiic_;
374 dag2BN_ = std::move(from.dag2BN_);
375 greedyHillClimbing_ = std::move(from.greedyHillClimbing_);
376 localSearchWithTabuList_ = std::move(from.localSearchWithTabuList_);
377 scoreDatabase_ = std::move(from.scoreDatabase_);
378 ranges_ = std::move(from.ranges_);
379 priorDbname_ = std::move(from.priorDbname_);
380 filename_ = std::move(from.filename_);
381 initialDag_ = std::move(from.initialDag_);
382 nbDecreasingChanges_ = std::move(from.nbDecreasingChanges_);
383 currentAlgorithm_ = nullptr;
384 }
385
386 return *this;
387 }
388
389 DatabaseTable readFile(const std::string& filename) {
390 // get the extension of the file
391 if (auto filename_size = Size(filename.size()); filename_size < 4) {
393 "IBNLearner could not determine the "
394 "file type of the database '"
395 << filename << "'")
396 }
397
398 std::string extension = filename.substr(filename.size() - 4);
399 std::transform(extension.begin(), extension.end(), extension.begin(), ::tolower);
400
401 if (extension != ".csv") {
403 "IBNLearner does not support yet this type ('" << extension
404 << "')"
405 "of database file")
406 }
407
408 DBInitializerFromCSV initializer(filename);
409
410 const auto& var_names = initializer.variableNames();
411 const std::size_t nb_vars = var_names.size();
412
413 DBTranslatorSet translator_set;
415 for (std::size_t i = 0; i < nb_vars; ++i) {
416 translator_set.insertTranslator(translator, i);
417 }
418
419 DatabaseTable database(translator_set);
420 database.setVariableNames(initializer.variableNames());
421 initializer.fillDatabase(database);
422
423 return database;
424 }
425
426 void IBNLearner::isCSVFileName_(const std::string& filename) {
427 // get the extension of the file
428
429 if (auto filename_size = Size(filename.size()); filename_size < 4) {
431 "IBNLearner could not determine the "
432 "file type of the database")
433 }
434
435 auto extension = filename.substr(filename.size() - 4);
436 std::transform(extension.begin(), extension.end(), extension.begin(), ::tolower);
437
438 if (extension != ".csv") {
439 GUM_ERROR(OperationNotAllowed, "IBNLearner does not support yet this type of database file")
440 }
441 }
442
443 DatabaseTable IBNLearner::readFile_(const std::string& filename,
444 const std::vector< std::string >& missing_symbols) {
445 // get the extension of the file
446 isCSVFileName_(filename);
447
448 DBInitializerFromCSV initializer(filename);
449
450 const auto& var_names = initializer.variableNames();
451 const std::size_t nb_vars = var_names.size();
452
453 DBTranslatorSet translator_set;
454 DBTranslator4LabelizedVariable translator(missing_symbols);
455 for (std::size_t i = 0; i < nb_vars; ++i) {
456 translator_set.insertTranslator(translator, i);
457 }
458
459 DatabaseTable database(missing_symbols, translator_set);
460 database.setVariableNames(initializer.variableNames());
461 initializer.fillDatabase(database);
462
463 database.reorder();
464
465 return database;
466 }
467
469 // first, save the old score, to be delete if everything is ok
470 Score* old_score = score_;
471
472 // create the new scoring function
473 switch (scoreType_) {
474 case ScoreType::AIC :
475 score_ = new ScoreAIC(scoreDatabase_.parser(),
476 *prior_,
477 ranges_,
478 scoreDatabase_.nodeId2Columns());
479 break;
480
481 case ScoreType::BD :
482 score_ = new ScoreBD(scoreDatabase_.parser(),
483 *prior_,
484 ranges_,
485 scoreDatabase_.nodeId2Columns());
486 break;
487
488 case ScoreType::BDeu :
489 score_ = new ScoreBDeu(scoreDatabase_.parser(),
490 *prior_,
491 ranges_,
492 scoreDatabase_.nodeId2Columns());
493 break;
494
495 case ScoreType::BIC :
496 score_ = new ScoreBIC(scoreDatabase_.parser(),
497 *prior_,
498 ranges_,
499 scoreDatabase_.nodeId2Columns());
500 break;
501
502 case ScoreType::K2 :
503 score_ = new ScoreK2(scoreDatabase_.parser(),
504 *prior_,
505 ranges_,
506 scoreDatabase_.nodeId2Columns());
507 break;
508
511 *prior_,
512 ranges_,
513 scoreDatabase_.nodeId2Columns());
514 break;
515
516 default : GUM_ERROR(OperationNotAllowed, "IBNLearner does not support yet this score")
517 }
518
519 // remove the old score, if any
520 if (old_score != nullptr) delete old_score;
521
522 // assign the number of threads
523 score_->setNumberOfThreads(this->isGumNumberOfThreadsOverriden() ? this->getNumberOfThreads()
524 : 0);
525 }
526
528 bool take_into_account_score) {
529 ParamEstimator* param_estimator = nullptr;
530
531 // create the new estimator
532 switch (paramEstimatorType_) {
534 if (take_into_account_score && (score_ != nullptr)) {
535 param_estimator = new ParamEstimatorML(parser,
536 *prior_,
537 score_->internalPrior(),
538 ranges_,
539 scoreDatabase_.nodeId2Columns());
540 } else {
541 param_estimator = new ParamEstimatorML(parser,
542 *prior_,
543 *noPrior_,
544 ranges_,
545 scoreDatabase_.nodeId2Columns());
546 }
547
548 break;
549
550 default :
552 "IBNLearner does not support " << "yet this parameter estimator")
553 }
554
555 // assign the number of threads
556 param_estimator->setNumberOfThreads(
558
559 // assign the set of ranges
560 param_estimator->setRanges(ranges_);
561
562 return param_estimator;
563 }
564
565 /* /// prepares the initial graph for miic
566 MixedGraph IBNLearner::prepareSimpleMiic_() {
567 // Initialize the mixed graph to the fully connected graph
568 MixedGraph mgraph;
569 for (Size i = 0; i < scoreDatabase_.databaseTable().nbVariables(); ++i) {
570 mgraph.addNodeWithId(i);
571 for (Size j = 0; j < i; ++j) {
572 mgraph.addEdge(j, i);
573 }
574 }
575
576 // translating the constraints for miic
577 HashTable< std::pair< NodeId, NodeId >, char > initial_marks;
578 for (const auto& arc: constraintMandatoryArcs_.arcs()) {
579 initial_marks.insert({arc.tail(), arc.head()}, '>');
580 }
581
582 for (const auto& arc: constraintForbiddenArcs_.arcs()) {
583 initial_marks.insert({arc.tail(), arc.head()}, '-');
584 }
585 algoSimpleMiic_.addConstraints(initial_marks);
586
587 // create the mutual entropy object
588 createCorrectedMutualInformation_();
589
590 return mgraph;
591 }*/
592
593 // prepares the initial graph for constraintMiic
595 // Initialize the mixed graph to the fully connected graph
596 MixedGraph mgraph;
597 DiGraph forbiddenGraph;
598 DAG mandatoryGraph;
599
600 // GUM_CHECKPOINT
601 for (Size i = 0; i < scoreDatabase_.databaseTable().nbVariables(); ++i) {
602 mgraph.addNodeWithId(i);
603 forbiddenGraph.addNodeWithId(i);
604 mandatoryGraph.addNodeWithId(i);
605 }
606
607 const EdgeSet& possible_edges = constraintPossibleEdges_.edges();
608
609 if (possible_edges.empty()) {
610 for (const NodeId i: mgraph.nodes()) {
611 for (NodeId j = 0; j < i; ++j) {
612 // contiguous nodeIds !
613 mgraph.addEdge(j, i);
614 }
615 }
616 } else {
617 for (const auto& edge: possible_edges) {
618 mgraph.addEdge(edge.first(), edge.second());
619 }
620 }
621 // GUM_CHECKPOINT
622
623 // translating the mandatory arcs for constraintMiic
624 HashTable< std::pair< NodeId, NodeId >, char > initial_marks;
625 const ArcSet& mandatory_arcs = constraintMandatoryArcs_.arcs();
626
627 // GUM_CHECKPOINT
628 for (const auto& arc: mandatory_arcs) {
629 mandatoryGraph.addArc(arc.tail(), arc.head());
630 forbiddenGraph.addArc(arc.head(), arc.head());
631 }
632
633 // GUM_CHECKPOINT
634 // translating the forbidden arcs for constraintMiic
635 const ArcSet& forbidden_arcs = constraintForbiddenArcs_.arcs();
636 for (const auto& arc: forbidden_arcs) {
637 forbiddenGraph.addArc(arc.tail(), arc.head());
638 }
639
640 // GUM_CHECKPOINT
641 const gum::NodeProperty< gum::Size > sliceOrder = constraintSliceOrder_.sliceOrder();
642 gum::NodeProperty< gum::Size > copyOrder = gum::HashTable(sliceOrder);
643 for (const auto& [n1, r1]: sliceOrder) {
644 for (const auto& [n2, r2]: copyOrder) {
645 if (r1 > r2) {
646 forbiddenGraph.addArc(n1, n2);
647 // initial_marks.insert({n1, n2}, '-');
648 } else if (r2 > r1) {
649 forbiddenGraph.addArc(n2, n1);
650 // initial_marks.insert({n2, n1}, '-');
651 }
652 }
653 copyOrder.erase(n1);
654 }
655
656 for (const auto node: constraintNoParentNodes_.nodes()) {
657 for (const auto node2: mgraph.nodes()) {
658 if (node != node2) { forbiddenGraph.addArc(node2, node); }
659 }
660 }
661
662 for (const auto node: constraintNoChildrenNodes_.nodes()) {
663 for (const auto node2: mgraph.nodes()) {
664 if (node != node2) { forbiddenGraph.addArc(node, node2); }
665 }
666 }
667
668 // GUM_CHECKPOINT
669 algoMiic_.setMaxIndegree(constraintIndegree_.maxIndegree());
670 algoMiic_.addConstraints(initial_marks);
671 algoMiic_.setMandatoryGraph(mandatoryGraph);
672 algoMiic_.setForbiddenGraph(forbiddenGraph);
673
674 // GUM_CHECKPOINT
675 // create the mutual entropy object
676 // if ( _mutual_info_ == nullptr) { this->useNMLCorrection(); }
678
679 // GUM_CHECKPOINT
680 return mgraph;
681 }
682
686 "Score-based algorithms do not build PDAG. Please use a constraint-based "
687 "algorithm instead")
688 }
689 // check that the database does not contain any missing value
690 if (scoreDatabase_.databaseTable().hasMissingValues()) {
692 "For the moment, the BNLearner is unable to learn "
693 << "structures with missing values in databases")
694 }
695
696 BNLearnerListener listener(this, algoMiic_);
697 // create the mixedGraph_constraint_MandatoryArcs.arcs
698 MixedGraph mgraph = this->prepareMiic_();
699 return algoMiic_.learnPDAG(*mutualInfo_, mgraph);
700 }
701
703 // create the score and the prior
704 createPrior_();
705 createScore_();
706
707 return learnDag_();
708 }
709
711 if (mutualInfo_ != nullptr) delete mutualInfo_;
712
714 *noPrior_,
715 ranges_,
716 scoreDatabase_.nodeId2Columns());
717 switch (kmodeMiic_) {
719 case MDL : mutualInfo_->useMDL(); break;
720 case NML : mutualInfo_->useNML(); break;
721 case NoCorr : mutualInfo_->useNoCorr(); break;
722 default :
724 "The BNLearner's corrected mutual information class does "
725 << "not implement yet this correction : " << int(kmodeMiic_))
726 }
727 }
728
730 // check that the database does not contain any missing value
731 if (scoreDatabase_.databaseTable().hasMissingValues()
732 || ((priorDatabase_ != nullptr)
734 && priorDatabase_->databaseTable().hasMissingValues())) {
736 "For the moment, the BNLearner is unable to cope "
737 "with missing values in databases")
738 }
739 // add the mandatory arcs to the initial dag and remove the forbidden ones
740 // from the initial graph
741 DAG init_graph = initialDag_;
742
743 for (const auto& arc: constraintMandatoryArcs_.arcs()) {
744 if (!init_graph.exists(arc.tail())) init_graph.addNodeWithId(arc.tail());
745
746 if (!init_graph.exists(arc.head())) init_graph.addNodeWithId(arc.head());
747
748 init_graph.addArc(arc.tail(), arc.head());
749 }
750
751 for (const auto& arc: constraintForbiddenArcs_.arcs())
752 init_graph.eraseArc(arc);
753
754
755 switch (selectedAlgo_) {
756 // ========================================================================
757 case AlgoType::MIIC : {
758 BNLearnerListener listener(this, algoMiic_);
759 // create the mixedGraph and the corrected mutual information
760 MixedGraph mgraph = this->prepareMiic_();
761
762 return algoMiic_.learnStructure(*mutualInfo_, mgraph);
763 }
764
765 // ========================================================================
774 gen_constraint;
775 static_cast< StructuralConstraintMandatoryArcs& >(gen_constraint)
777 static_cast< StructuralConstraintForbiddenArcs& >(gen_constraint)
779 static_cast< StructuralConstraintPossibleEdges& >(gen_constraint)
781 static_cast< StructuralConstraintSliceOrder& >(gen_constraint) = constraintSliceOrder_;
782 static_cast< StructuralConstraintNoParentNodes& >(gen_constraint)
784 static_cast< StructuralConstraintNoChildrenNodes& >(gen_constraint)
786
787 GraphChangesGenerator4DiGraph op_set(gen_constraint);
788
790 sel_constraint;
791 static_cast< StructuralConstraintIndegree& >(sel_constraint) = constraintIndegree_;
792
793 GraphChangesSelector4DiGraph selector(*score_, sel_constraint, op_set);
794
795 return greedyHillClimbing_.learnStructure(selector, init_graph);
796 }
797
798 // ========================================================================
807 gen_constraint;
808 static_cast< StructuralConstraintMandatoryArcs& >(gen_constraint)
810 static_cast< StructuralConstraintForbiddenArcs& >(gen_constraint)
812 static_cast< StructuralConstraintPossibleEdges& >(gen_constraint)
814 static_cast< StructuralConstraintSliceOrder& >(gen_constraint) = constraintSliceOrder_;
815 static_cast< StructuralConstraintNoParentNodes& >(gen_constraint)
817 static_cast< StructuralConstraintNoChildrenNodes& >(gen_constraint)
819
820 GraphChangesGenerator4DiGraph op_set(gen_constraint);
821
825 sel_constraint;
826 static_cast< StructuralConstraintTabuList& >(sel_constraint) = constraintTabuList_;
827 static_cast< StructuralConstraintIndegree& >(sel_constraint) = constraintIndegree_;
828
829 GraphChangesSelector4DiGraph selector(*score_, sel_constraint, op_set);
830
831 return localSearchWithTabuList_.learnStructure(selector, init_graph);
832 }
833
834 // ========================================================================
835 case AlgoType::K2 : {
836 BNLearnerListener listener(this, algoK2_.approximationScheme());
842 gen_constraint;
843 static_cast< StructuralConstraintMandatoryArcs& >(gen_constraint)
845 static_cast< StructuralConstraintForbiddenArcs& >(gen_constraint)
847 static_cast< StructuralConstraintPossibleEdges& >(gen_constraint)
849 ;
850 static_cast< StructuralConstraintNoParentNodes& >(gen_constraint)
852 ;
853 static_cast< StructuralConstraintNoChildrenNodes& >(gen_constraint)
855
856 GraphChangesGenerator4K2 op_set(gen_constraint);
857
858 // if some mandatory arcs are incompatible with the order, use a DAG
859 // constraint instead of a DiGraph constraint to avoid cycles
860 const ArcSet& mandatory_arcs
861 = static_cast< StructuralConstraintMandatoryArcs& >(gen_constraint).arcs();
862 const Sequence< NodeId >& order = algoK2_.order();
863 bool order_compatible = true;
864
865 for (const auto& arc: mandatory_arcs) {
866 if (order.pos(arc.tail()) >= order.pos(arc.head())) {
867 order_compatible = false;
868 break;
869 }
870 }
871
872 if (order_compatible) {
874 sel_constraint;
875 static_cast< StructuralConstraintIndegree& >(sel_constraint) = constraintIndegree_;
876
877 GraphChangesSelector4DiGraph selector(*score_, sel_constraint, op_set);
878
879 return algoK2_.learnStructure(selector, init_graph);
880 } else {
882 sel_constraint;
883 static_cast< StructuralConstraintIndegree& >(sel_constraint) = constraintIndegree_;
884
885 GraphChangesSelector4DiGraph selector(*score_, sel_constraint, op_set);
886
887 return algoK2_.learnStructure(selector, init_graph);
888 }
889 }
890
891 // ========================================================================
892 default :
894 "the learnDAG method has not been implemented for this "
895 "learning algorithm")
896 }
897 }
898
900 if (this->isConstraintBased()) return "";
901
902 const auto prior = getPriorType_();
903
904 switch (scoreType_) {
906 case AIC : return ScoreAIC::isPriorCompatible(prior, priorWeight_);
907
908 case BD : return ScoreBD::isPriorCompatible(prior, priorWeight_);
909
910 case BDeu : return ScoreBDeu::isPriorCompatible(prior, priorWeight_);
911
912 case BIC : return ScoreBIC::isPriorCompatible(prior, priorWeight_);
913
914 case K2 : return ScoreK2::isPriorCompatible(prior, priorWeight_);
915
917
918 default : return "IBNLearner does not support yet this score";
919 }
920 }
921
923 std::pair< std::size_t, std::size_t >
924 IBNLearner::useCrossValidationFold(const std::size_t learning_fold,
925 const std::size_t k_fold) {
926 if (k_fold == 0) { GUM_ERROR(OutOfBounds, "K-fold cross validation with k=0 is forbidden") }
927
928 if (learning_fold >= k_fold) {
930 "In " << k_fold << "-fold cross validation, the learning "
931 << "fold should be strictly lower than " << k_fold
932 << " but, here, it is equal to " << learning_fold)
933 }
934
935 const std::size_t db_size = scoreDatabase_.databaseTable().nbRows();
936 if (k_fold >= db_size) {
938 "In " << k_fold << "-fold cross validation, the database's "
939 << "size should be strictly greater than " << k_fold
940 << " but, here, the database has only " << db_size << "rows")
941 }
942
943 // create the ranges of rows of the test database
944 const std::size_t foldSize = db_size / k_fold;
945 const std::size_t unfold_deb = learning_fold * foldSize;
946 const std::size_t unfold_end = unfold_deb + foldSize;
947
948 ranges_.clear();
949 if (learning_fold == std::size_t(0)) {
950 ranges_.push_back(std::pair< std::size_t, std::size_t >(unfold_end, db_size));
951 } else {
952 ranges_.push_back(std::pair< std::size_t, std::size_t >(std::size_t(0), unfold_deb));
953
954 if (learning_fold != k_fold - 1) {
955 ranges_.push_back(std::pair< std::size_t, std::size_t >(unfold_end, db_size));
956 }
957 }
958
959 return std::pair< std::size_t, std::size_t >(unfold_deb, unfold_end);
960 }
961
962 std::pair< double, double >
963 IBNLearner::chi2(const NodeId id1, const NodeId id2, const std::vector< NodeId >& knowing) {
964 createPrior_();
966
967 return chi2score.statistics(id1, id2, knowing);
968 }
969
970 std::pair< double, double > IBNLearner::chi2(const std::string& name1,
971 const std::string& name2,
972 const std::vector< std::string >& knowing) {
973 std::vector< NodeId > knowingIds;
974 std::transform(knowing.begin(),
975 knowing.end(),
976 std::back_inserter(knowingIds),
977 [this](const std::string& c) { return this->idFromName(c); });
978 return chi2(idFromName(name1), idFromName(name2), knowingIds);
979 }
980
981 std::pair< double, double >
982 IBNLearner::G2(const NodeId id1, const NodeId id2, const std::vector< NodeId >& knowing) {
983 createPrior_();
985 return g2score.statistics(id1, id2, knowing);
986 }
987
988 std::pair< double, double > IBNLearner::G2(const std::string& name1,
989 const std::string& name2,
990 const std::vector< std::string >& knowing) {
991 std::vector< NodeId > knowingIds;
992 std::transform(knowing.begin(),
993 knowing.end(),
994 std::back_inserter(knowingIds),
995 [this](const std::string& c) { return this->idFromName(c); });
996 return G2(idFromName(name1), idFromName(name2), knowingIds);
997 }
998
999 double IBNLearner::logLikelihood(const std::vector< NodeId >& vars,
1000 const std::vector< NodeId >& knowing) {
1001 createPrior_();
1003
1004 std::vector< NodeId > total(vars);
1005 total.insert(total.end(), knowing.begin(), knowing.end());
1006 double LLtotal = ll2score.score(IdCondSet(total, false, true));
1007 if (knowing.size() == (Size)0) {
1008 return LLtotal;
1009 } else {
1010 double LLknw = ll2score.score(IdCondSet(knowing, false, true));
1011 return LLtotal - LLknw;
1012 }
1013 }
1014
1015 double IBNLearner::logLikelihood(const std::vector< std::string >& vars,
1016 const std::vector< std::string >& knowing) {
1017 std::vector< NodeId > ids;
1018 std::vector< NodeId > knowingIds;
1019
1020 auto mapper = [this](const std::string& c) { return this->idFromName(c); };
1021
1022 std::transform(vars.begin(), vars.end(), std::back_inserter(ids), mapper);
1023 std::transform(knowing.begin(), knowing.end(), std::back_inserter(knowingIds), mapper);
1024
1025 return logLikelihood(ids, knowingIds);
1026 }
1027
1029 const NodeId id2,
1030 const std::vector< NodeId >& knowing) {
1031 createPrior_();
1033 *prior_,
1034 databaseRanges());
1035
1036 switch (kmodeMiic_) {
1038 case MDL : cmi.useMDL(); break;
1039
1040 case NML : cmi.useNML(); break;
1041
1042 case NoCorr : cmi.useNoCorr(); break;
1043
1044 default :
1046 "The BNLearner's corrected mutual information class does "
1047 << "not implement yet this correction : " << int(kmodeMiic_))
1048 }
1049 if (knowing.size() == (Size)0) return cmi.score(id1, id2) / scoreDatabase_.weight();
1050 else return cmi.score(id1, id2, knowing) / scoreDatabase_.weight();
1051 }
1052
1053 double IBNLearner::correctedMutualInformation(const std::string& var1,
1054 const std::string& var2,
1055 const std::vector< std::string >& knowing) {
1056 std::vector< NodeId > knowingIds;
1057
1058 auto mapper = [this](const std::string& c) { return this->idFromName(c); };
1059
1060 std::transform(knowing.begin(), knowing.end(), std::back_inserter(knowingIds), mapper);
1061
1062 return correctedMutualInformation(this->idFromName(var1), this->idFromName(var2), knowingIds);
1063 }
1064
1066 const NodeId id2,
1067 const std::vector< NodeId >& knowing) {
1068 const auto prior = NoPrior(scoreDatabase_.databaseTable(), scoreDatabase_.nodeId2Columns());
1070 cmi.useNoCorr();
1071
1072 if (knowing.size() == (Size)0) return cmi.score(id1, id2) / scoreDatabase_.weight();
1073 else return cmi.score(id1, id2, knowing) / scoreDatabase_.weight();
1074 }
1075
1076 double IBNLearner::mutualInformation(const std::string& var1,
1077 const std::string& var2,
1078 const std::vector< std::string >& knowing) {
1079 std::vector< NodeId > knowingIds;
1080
1081 auto mapper = [this](const std::string& c) { return this->idFromName(c); };
1082
1083 std::transform(knowing.begin(), knowing.end(), std::back_inserter(knowingIds), mapper);
1084
1085 return mutualInformation(this->idFromName(var1), this->idFromName(var2), knowingIds);
1086 }
1087
1088 double IBNLearner::score(const NodeId var, const std::vector< NodeId >& knowing) {
1089 createPrior_();
1090 createScore_();
1091
1092 return score_->score(var, knowing);
1093 }
1094
1095 double IBNLearner::score(const std::string& var, const std::vector< std::string >& knowing) {
1096 auto mapper = [this](const std::string& c) { return this->idFromName(c); };
1097
1098 const NodeId id = this->idFromName(var);
1099 std::vector< NodeId > knowingIds;
1100 knowingIds.reserve(knowing.size());
1101 std::transform(knowing.begin(), knowing.end(), std::back_inserter(knowingIds), mapper);
1102
1103 return score(id, knowingIds);
1104 }
1105
1106 std::vector< double > IBNLearner::rawPseudoCount(const std::vector< NodeId >& vars) {
1107 if (this->hasMissingValues()) {
1109 "BNLearner cannot compute pseudo-counts with missing values in the database")
1110 }
1111 if (vars.empty()) {
1112 GUM_ERROR(OutOfBounds, "BNLearner::rawPseudoCount called with an empty vector of variables")
1113 }
1114 Tensor< double > res;
1115
1116 createPrior_();
1118 return count.get(vars);
1119 }
1120
1121 std::vector< double > IBNLearner::rawPseudoCount(const std::vector< std::string >& vars) {
1122 std::vector< NodeId > ids;
1123
1124 auto mapper = [this](const std::string& c) { return this->idFromName(c); };
1125
1126 std::transform(vars.begin(), vars.end(), std::back_inserter(ids), mapper);
1127 return rawPseudoCount(ids);
1128 }
1129
1132 const std::vector< std::pair< std::size_t, std::size_t > >& new_ranges) {
1133 // use a score to detect whether the ranges are ok
1135 score.setRanges(new_ranges);
1136 ranges_ = score.ranges();
1137 }
1138} // namespace gum::learning
A listener that allows BNLearner to be used as a proxy for its inner algorithms.
A class for generic framework of learning algorithms that can easily be used.
A pack of learning algorithms that can easily be used.
virtual void eraseArc(const Arc &arc)
removes an arc from the ArcGraphPart
Base class for dag.
Definition DAG.h:121
void addArc(NodeId tail, NodeId head) final
insert a new arc into the directed graph
Definition DAG_inl.h:63
Base class for all oriented graphs.
Definition diGraph.h:130
virtual void addArc(const NodeId tail, const NodeId head)
insert a new arc into the directed graph
Definition diGraph_inl.h:55
Base class for all aGrUM's exceptions.
Definition exceptions.h:118
Exception : a I/O format was not found.
The class for generic Hash Tables.
Definition hashTable.h:637
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
Exception: at least one argument passed to a function is not what was expected.
Error: The database contains some missing values.
Error: A name of variable is not found in the database.
Base class for mixed graphs.
Definition mixedGraph.h:146
const NodeGraphPart & nodes() const
return *this as a NodeGraphPart
bool exists(const NodeId id) const
alias for existsNode
virtual void addNodeWithId(const NodeId id)
try to insert a node with the given id
Exception : there is something wrong with an implementation.
Exception : operation not allowed.
Exception : out of bound.
Base class for partially directed acyclic graphs.
Definition PDAG.h:130
bool empty() const noexcept
Indicates whether the set is the empty set.
Definition set_tpl.h:642
aGrUM's Tensor is a multi-dimensional array with tensor operators.
Definition tensor.h:85
bool isGumNumberOfThreadsOverriden() const
indicates whether the class containing this ThreadNumberManager set its own number of threads
virtual Size getNumberOfThreads() const
returns the current max number of threads used by the class containing this ThreadNumberManager
ThreadNumberManager(Size nb_threads=0)
default constructor
ThreadNumberManager & operator=(const ThreadNumberManager &from)
copy operator
void addEdge(NodeId first, NodeId second) override
insert a new edge into the undirected graph
Base class for every random variable.
Definition variable.h:79
A class that redirects gum_signal from algorithms to the listeners of BNLearn.
The class computing n times the corrected mutual information, as used in the MIIC algorithm.
KModeTypes
the description type for the complexity correction
double score(NodeId var1, NodeId var2)
returns the 2-point mutual information corresponding to a given nodeset
void useNML()
use the kNML penalty function
void useNoCorr()
use no correction/penalty function
void useMDL()
use the MDL penalty function
The class for initializing DatabaseTable and RawDatabaseTable instances from CSV files.
the class used to read a row in the database and to transform it into a set of DBRow instances that c...
The class used to pack sets of generators.
The databases' cell translators for labelized variables.
the class for packing together the translators used to preprocess the datasets
std::size_t insertTranslator(const DBTranslator &translator, const std::size_t column, const bool unique_column=true)
inserts a new translator at the end of the translator set
The class representing a tabular database as used by learning tasks.
void setVariableNames(const std::vector< std::string > &names, const bool from_external_object=true) override
sets the names of the variables
const Variable & variable(const std::size_t k, const bool k_is_input_col=false) const
returns either the kth variable of the database table or the first one corresponding to the kth colum...
The basic class for computing the next graph changes possible in a structure learning algorithm.
The basic class for computing the next graph changes possible in a structure learning algorithm.
The mecanism to compute the next available graph changes for directed structure learning search algor...
a helper to easily read databases
Definition IBNLearner.h:123
Database(const std::string &file, const std::vector< std::string > &missing_symbols, const bool induceTypes=false)
default constructor
const DatabaseTable & databaseTable() const
returns the internal database table
std::vector< std::size_t > _domain_sizes_
the domain sizes of the variables (useful to speed-up computations)
Definition IBNLearner.h:265
DatabaseTable _database_
the database itself
Definition IBNLearner.h:259
Bijection< NodeId, std::size_t > _nodeId2cols_
a bijection assigning to each variable name its NodeId
Definition IBNLearner.h:268
const Bijection< NodeId, std::size_t > & nodeId2Columns() const
returns the mapping between node ids and their columns in the database
Database & operator=(const Database &from)
copy operator
DBRowGeneratorParser * _parser_
the parser used for reading the database
Definition IBNLearner.h:262
StructuralConstraintPossibleEdges constraintPossibleEdges_
the constraint on possible Edges
Definition IBNLearner.h:979
StructuralConstraintNoParentNodes constraintNoParentNodes_
the constraint on no parent nodes
Definition IBNLearner.h:985
BNLearnerPriorType priorType_
the a priorselected for the score and parameters
Definition IBNLearner.h:956
IBNLearner(const std::string &filename, const std::vector< std::string > &missingSymbols, bool induceTypes=true)
read the database file for the score / parameter estimation and var names
std::string priorDbname_
the filename for the Dirichlet a priori, if any
double priorWeight_
the weight of the prior
Definition IBNLearner.h:964
double noiseEM_
the noise factor (in (0,1)) used by EM for perturbing the CPT during init
Definition IBNLearner.h:950
std::vector< std::pair< std::size_t, std::size_t > > ranges_
the set of rows' ranges within the database in which learning is done
std::string checkScorePriorCompatibility() const
checks whether the current score and prior are compatible
virtual void createPrior_()=0
create the prior used for learning
static DatabaseTable readFile_(const std::string &filename, const std::vector< std::string > &missing_symbols)
reads a file and returns a databaseVectInRam
K2 algoK2_
the K2 algorithm
Definition IBNLearner.h:995
AlgoType selectedAlgo_
the selected learning algorithm
Definition IBNLearner.h:992
const std::vector< std::pair< std::size_t, std::size_t > > & databaseRanges() const
returns the current database rows' ranges used for learning
double logLikelihood(const std::vector< NodeId > &vars, const std::vector< NodeId > &knowing={})
Return the loglikelihood of vars in the base, conditioned by knowing for the BNLearner.
Database scoreDatabase_
the database to be used by the scores and parameter estimators
ScoreType
an enumeration enabling to select easily the score we wish to use
Definition IBNLearner.h:101
bool useEM_
a Boolean indicating whether we should use EM for parameter learning or not
Definition IBNLearner.h:947
DAG2BNLearner dag2BN_
the parametric EM
Prior * prior_
the prior used
Definition IBNLearner.h:959
void useDatabaseRanges(const std::vector< std::pair< std::size_t, std::size_t > > &new_ranges)
use a new set of database rows' ranges to perform learning
virtual ~IBNLearner()
destructor
std::pair< double, double > chi2(NodeId id1, NodeId id2, const std::vector< NodeId > &knowing={})
Return the <statistic,pvalue> pair for chi2 test in the database.
CorrectedMutualInformation * mutualInfo_
the selected correction for miic
Definition IBNLearner.h:953
bool isConstraintBased() const
indicate if the selected algorithm is constraint-based
Definition IBNLearner.h:777
StructuralConstraintNoChildrenNodes constraintNoChildrenNodes_
the constraint on no children nodes
Definition IBNLearner.h:988
ParamEstimatorType paramEstimatorType_
the type of the parameter estimator
Definition IBNLearner.h:944
ScoreType scoreType_
the score selected for learning
Definition IBNLearner.h:938
std::pair< double, double > G2(NodeId id1, NodeId id2, const std::vector< NodeId > &knowing={})
Return the <statistic,pvalue> pair for for G2 test in the database.
const ApproximationScheme * currentAlgorithm_
DAG learnDag_()
returns the DAG learnt
Database * priorDatabase_
the database used by the Dirichlet a priori
double mutualInformation(NodeId id1, NodeId id2, const std::vector< NodeId > &knowing={})
Return the mutual information of id1 and id2 in the base, conditioned by knowing for the BNLearner.
void createScore_()
create the score used for learning
PriorType getPriorType_() const
returns the type (as a string) of a given prior
NodeId idFromName(const std::string &var_name) const
returns the node id corresponding to a variable name
StructuralConstraintIndegree constraintIndegree_
the constraint for indegrees
Definition IBNLearner.h:970
PDAG learnPDAG()
learn a partial structure from a file (must have read the db before and must have selected miic)
std::string filename_
the filename database
bool hasMissingValues() const
returns true if the learner's database has missing values
SimpleMiic algoSimpleMiic_
the MIIC algorithm
Definition IBNLearner.h:998
Score * score_
the score used
Definition IBNLearner.h:941
StructuralConstraintMandatoryArcs constraintMandatoryArcs_
the constraint on mandatory arcs
Definition IBNLearner.h:982
Miic algoMiic_
the Constraint MIIC algorithm
void createCorrectedMutualInformation_()
create the Corrected Mutual Information instance for Miic
StructuralConstraintForbiddenArcs constraintForbiddenArcs_
the constraint on forbidden arcs
Definition IBNLearner.h:976
GreedyHillClimbing greedyHillClimbing_
the greedy hill climbing algorithm
DAG learnDAG()
learn a structure from a file (must have read the db before)
double score(NodeId vars, const std::vector< NodeId > &knowing={})
Return the value of the score currently in use by the BNLearner of a variable given a set of other va...
StructuralConstraintTabuList constraintTabuList_
the constraint for tabu lists
Definition IBNLearner.h:973
DAG initialDag_
an initial DAG given to learners
IBNLearner & operator=(const IBNLearner &)
copy operator
static void isCSVFileName_(const std::string &filename)
checks whether the extension of a CSV filename is correct
MixedGraph prepareMiic_()
prepares the initial graph for miic
LocalSearchWithTabuList localSearchWithTabuList_
the local search with tabu list algorithm
std::pair< std::size_t, std::size_t > useCrossValidationFold(const std::size_t learning_fold, const std::size_t k_fold)
sets the ranges of rows to be used for cross-validation learning
ParamEstimator * createParamEstimator_(const DBRowGeneratorParser &parser, bool take_into_account_score=true)
create the parameter estimator used for learning
StructuralConstraintSliceOrder constraintSliceOrder_
the constraint for 2TBNs
Definition IBNLearner.h:967
const DatabaseTable & database() const
returns the database used by the BNLearner
std::vector< double > rawPseudoCount(const std::vector< NodeId > &vars)
Return the pseudo-counts of NodeIds vars in the base in a raw array.
bool inducedTypes_
the policy for typing variables
Definition IBNLearner.h:935
CorrectedMutualInformation::KModeTypes kmodeMiic_
the penalty used in MIIC
double correctedMutualInformation(NodeId id1, NodeId id2, const std::vector< NodeId > &knowing={})
Return the mutual information of id1 and id2 in the base, conditioned by knowing for the BNLearner.
void fillDatabase(DATABASE &database, const bool retry_insertion=false)
fills the rows of the database table
const std::vector< std::string > & variableNames()
returns the names of the variables in the input dataset
const DBVector< std::string > & variableNames() const noexcept
returns the variable names for all the columns of the database
std::size_t nbVariables() const noexcept
returns the number of variables (columns) of the database
A class for storing a pair of sets of NodeIds, the second one corresponding to a conditional set.
Definition idCondSet.h:214
the class for computing Chi2 independence test scores
std::pair< double, double > statistics(NodeId var1, NodeId var2, const std::vector< NodeId > &rhs_ids={})
get the pair <chi2 statistic,pvalue> for a test var1 indep var2 given rhs_ids
the class for computing G2 independence test scores
Definition indepTestG2.h:67
std::pair< double, double > statistics(NodeId var1, NodeId var2, const std::vector< NodeId > &rhs_ids={})
get the pair <G2statistic,pvalue> for a test var1 indep var2 given rhs_ids
The K2 algorithm.
Definition K2.h:63
the no a priorclass: corresponds to 0 weight-sample
Definition noPrior.h:65
The class for estimating parameters of CPTs using Maximum Likelihood.
The base class for estimating parameters of CPTs.
void setRanges(const std::vector< std::pair< std::size_t, std::size_t > > &new_ranges)
sets new ranges to perform the counts used by the parameter estimator
virtual void setNumberOfThreads(Size nb)
sets the number max of threads that can be used
The class for giving access to pseudo count : count in the database + prior.
Definition pseudoCount.h:67
std::vector< double > get(const std::vector< NodeId > &ids)
returns the pseudo-count of a pair of nodes given some other nodes
the class for computing AIC scores
Definition scoreAIC.h:71
virtual std::string isPriorCompatible() const final
indicates whether the prior is compatible (meaningful) with the score
the class for computing Bayesian Dirichlet (BD) log2 scores
Definition scoreBD.h:84
virtual std::string isPriorCompatible() const final
indicates whether the prior is compatible (meaningful) with the score
the class for computing BDeu scores
Definition scoreBDeu.h:78
std::string isPriorCompatible() const final
indicates whether the prior is compatible (meaningful) with the score
the class for computing BIC scores
Definition scoreBIC.h:71
virtual std::string isPriorCompatible() const final
indicates whether the prior is compatible (meaningful) with the score
the class for computing K2 scores (actually their log2 value)
Definition scoreK2.h:80
virtual std::string isPriorCompatible() const final
indicates whether the prior is compatible (meaningful) with the score
the class for computing Log2-likelihood scores
double score(const IdCondSet &idset)
returns the score for a given IdCondSet
virtual std::string isPriorCompatible() const final
indicates whether the prior is compatible (meaningful) with the score
The base class for all the scores used for learning (BIC, BDeu, etc).
Definition score.h:68
The base class for structural constraints imposed by DAGs.
the structural constraint for forbidding the creation of some arcs during structure learning
the class for structural constraints limiting the number of parents of nodes in a directed graph
the structural constraint indicating that some arcs shall never be removed or reversed
the structural constraint for forbidding children for some nodes
the structural constraint for forbidding parents for some nodes
the structural constraint for forbidding the creation of some arcs except those defined in the class ...
the "meta-programming" class for storing structural constraints
the structural constraint imposing a partial order over nodes
The class imposing a N-sized tabu list as a structural constraints for learning algorithms.
#define GUM_ERROR(type, msg)
Definition exceptions.h:72
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition types.h:74
Set< Edge > EdgeSet
Some typdefs and define for shortcuts ...
Size NodeId
Type for node ids.
Set< Arc > ArcSet
Some typdefs and define for shortcuts ...
HashTable< NodeId, VAL > NodeProperty
Property on graph elements.
the class for computing Chi2 scores
the class for computing G2 scores
include the inlined functions if necessary
Definition CSVParser.h:54
DatabaseTable readFile(const std::string &filename)
STL namespace.
the base class for all the independence tests used for learning
the class for computing Log2-likelihood scores