aGrUM 2.3.2
a C++ library for (probabilistic) graphical models
BayesNet_tpl.h
Go to the documentation of this file.
1/****************************************************************************
2 * This file is part of the aGrUM/pyAgrum library. *
3 * *
4 * Copyright (c) 2005-2025 by *
5 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
6 * - Christophe GONZALES(_at_AMU) *
7 * *
8 * The aGrUM/pyAgrum library is free software; you can redistribute it *
9 * and/or modify it under the terms of either : *
10 * *
11 * - the GNU Lesser General Public License as published by *
12 * the Free Software Foundation, either version 3 of the License, *
13 * or (at your option) any later version, *
14 * - the MIT license (MIT), *
15 * - or both in dual license, as here. *
16 * *
17 * (see https://agrum.gitlab.io/articles/dual-licenses-lgplv3mit.html) *
18 * *
19 * This aGrUM/pyAgrum library is distributed in the hope that it will be *
20 * useful, but WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, *
21 * INCLUDING BUT NOT LIMITED TO THE WARRANTIES MERCHANTABILITY or FITNESS *
22 * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
23 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, *
25 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR *
26 * OTHER DEALINGS IN THE SOFTWARE. *
27 * *
28 * See LICENCES for more details. *
29 * *
30 * SPDX-FileCopyrightText: Copyright 2005-2025 *
31 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
32 * - Christophe GONZALES(_at_AMU) *
33 * SPDX-License-Identifier: LGPL-3.0-or-later OR MIT *
34 * *
35 * Contact : info_at_agrum_dot_org *
36 * homepage : http://agrum.gitlab.io *
37 * gitlab : https://gitlab.com/agrumery/agrum *
38 * *
39 ****************************************************************************/
40#pragma once
41
42
49
50#include <algorithm>
51#include <limits>
52#include <set>
53
70#include <agrum/BN/BayesNet.h>
72
74
75namespace gum {
76 template < typename GUM_SCALAR >
78 const std::string& node,
79 const std::string& default_domain) {
80 auto v = fastVariable< GUM_SCALAR >(node, default_domain);
81
82 NodeId res;
83 try {
84 res = bn.idFromName(v->name());
85 } catch (gum::NotFound const&) { res = bn.add(*v); }
86 return res;
87 }
88
89 template < typename GUM_SCALAR >
90 BayesNet< GUM_SCALAR > BayesNet< GUM_SCALAR >::fastPrototype(const std::string& dotlike,
91 Size domainSize) {
92 return fastPrototype(dotlike, "[" + std::to_string(domainSize) + "]");
93 }
94
95 template < typename GUM_SCALAR >
96 BayesNet< GUM_SCALAR > BayesNet< GUM_SCALAR >::fastPrototype(const std::string& dotlike,
97 const std::string& domain) {
99
100 for (const auto& chaine: split(remove_newline(dotlike), ";")) {
101 NodeId lastId = 0;
102 bool notfirst = false;
103 for (const auto& souschaine: split(chaine, "->")) {
104 bool forward = true;
105 for (auto& node: split(souschaine, "<-")) {
106 auto idVar = build_node(bn, node, domain);
107 if (notfirst) {
108 if (forward) {
109 bn.addArc(lastId, idVar);
110 forward = false;
111 } else {
112 bn.addArc(idVar, lastId);
113 }
114 } else {
115 notfirst = true;
116 forward = false;
117 }
118 lastId = idVar;
120 }
121 }
122 bn.generateCPTs();
123 bn.setProperty("name", "fastPrototype");
124 return bn;
125 }
126
127 template < typename GUM_SCALAR >
128 INLINE BayesNet< GUM_SCALAR >::BayesNet() : IBayesNet< GUM_SCALAR >() {
129 GUM_CONSTRUCTOR(BayesNet)
130 }
132 template < typename GUM_SCALAR >
133 INLINE BayesNet< GUM_SCALAR >::BayesNet(std::string name) : IBayesNet< GUM_SCALAR >(name) {
134 GUM_CONSTRUCTOR(BayesNet)
135 }
136
137 template < typename GUM_SCALAR >
138 BayesNet< GUM_SCALAR >::BayesNet(const BayesNet< GUM_SCALAR >& source) :
139 IBayesNet< GUM_SCALAR >(source), _varMap_(source._varMap_) {
140 GUM_CONS_CPY(BayesNet)
141
142 _copyTensors_(source);
144
145 template < typename GUM_SCALAR >
146 BayesNet< GUM_SCALAR >& BayesNet< GUM_SCALAR >::operator=(const BayesNet< GUM_SCALAR >& source) {
147 if (this != &source) {
149 _varMap_ = source._varMap_;
150
152 _copyTensors_(source);
153 }
154
155 return *this;
156 }
157
158 template < typename GUM_SCALAR >
160 GUM_DESTRUCTOR(BayesNet)
161 for (const auto& p: _probaMap_) {
162 delete p.second;
163 }
164 }
165
166 template < typename GUM_SCALAR >
168 return _varMap_.get(id);
169 }
170
171 template < typename GUM_SCALAR >
172 INLINE void BayesNet< GUM_SCALAR >::changeVariableName(NodeId id, const std::string& new_name) {
173 _varMap_.changeName(id, new_name);
174 }
175
176 template < typename GUM_SCALAR >
178 const std::string& old_label,
179 const std::string& new_label) {
180 if (variable(id).varType() != VarType::LABELIZED)
181 GUM_ERROR(NotFound, "Variable " << id << " is not a LabelizedVariable.")
182
183 LabelizedVariable const* var
184 = dynamic_cast< LabelizedVariable* >(const_cast< DiscreteVariable* >(&variable(id)));
185
186 var->changeLabel(var->posLabel(old_label), new_label);
187 }
188
189 template < typename GUM_SCALAR >
191 return _varMap_.get(var);
192 }
193
194 template < typename GUM_SCALAR >
196 auto ptr = new MultiDimArray< GUM_SCALAR >();
197 try {
198 return add(var, ptr);
199 } catch (Exception const&) {
200 delete ptr;
201 throw;
202 }
203 }
204
205 template < typename GUM_SCALAR >
206 INLINE NodeId BayesNet< GUM_SCALAR >::add(const std::string& fast_description,
207 unsigned int default_nbrmod) {
208 auto v = fastVariable< GUM_SCALAR >(fast_description, default_nbrmod);
209 if (v->domainSize() < 2) GUM_ERROR(OperationNotAllowed, v->name() << " has a domain size <2")
210 return add(*v);
211 }
212
213 template < typename GUM_SCALAR >
214 INLINE NodeId BayesNet< GUM_SCALAR >::add(const DiscreteVariable& var,
215 MultiDimImplementation< GUM_SCALAR >* aContent) {
216 NodeId proposedId = dag().nextNodeId();
217
218 return add(var, aContent, proposedId);
219 }
220
221 template < typename GUM_SCALAR >
223 auto ptr = new MultiDimArray< GUM_SCALAR >();
224
225 try {
226 return add(var, ptr, id);
227 } catch (Exception const&) {
228 delete ptr;
229 throw;
230 }
231 }
232
233 template < typename GUM_SCALAR >
236 NodeId id) {
237 _varMap_.insert(id, var);
238 this->dag_.addNodeWithId(id);
239
240 auto cpt = new Tensor< GUM_SCALAR >(aContent);
241 (*cpt) << variable(id);
242 _probaMap_.insert(id, cpt);
243 return id;
244 }
245
246 template < typename GUM_SCALAR >
247 INLINE NodeId BayesNet< GUM_SCALAR >::idFromName(const std::string& name) const {
248 return _varMap_.idFromName(name);
249 }
250
251 template < typename GUM_SCALAR >
252 INLINE const DiscreteVariable&
253 BayesNet< GUM_SCALAR >::variableFromName(const std::string& name) const {
254 return _varMap_.variableFromName(name);
255 }
256
257 template < typename GUM_SCALAR >
258 INLINE const Tensor< GUM_SCALAR >& BayesNet< GUM_SCALAR >::cpt(NodeId varId) const {
259 return *(_probaMap_[varId]);
260 }
261
262 template < typename GUM_SCALAR >
264 return _varMap_;
265 }
266
267 template < typename GUM_SCALAR >
269 erase(_varMap_.get(var));
270 }
271
272 template < typename GUM_SCALAR >
274 if (_varMap_.exists(varId)) {
275 // Reduce the variable child's CPT
276 for (const NodeSet& children = this->children(varId); const auto c: children) {
277 _probaMap_[c]->erase(variable(varId));
278 }
279
280 delete _probaMap_[varId];
281
282 _probaMap_.erase(varId);
283 _varMap_.erase(varId);
284 this->dag_.eraseNode(varId);
285 }
286 }
288 template < typename GUM_SCALAR >
290 if (!this->empty()) {
291 auto l = this->nodes();
292 for (const auto no: l) {
293 this->erase(no);
294 }
295 }
296 }
297
298 template < typename GUM_SCALAR >
300 if (this->dag_.existsArc(tail, head)) {
301 GUM_ERROR(DuplicateElement, "The arc (" << tail << "," << head << ") already exists.")
302 }
303
304 this->dag_.addArc(tail, head);
305 // Add parent in the child's CPT
306 (*(_probaMap_[head])) << variable(tail);
307 }
308
309 template < typename GUM_SCALAR >
310 INLINE void BayesNet< GUM_SCALAR >::addArc(const std::string& tail, const std::string& head) {
311 try {
312 addArc(this->idFromName(tail), this->idFromName(head));
313 } catch (DuplicateElement const&) {
314 GUM_ERROR(DuplicateElement, "The arc " << tail << "->" << head << " already exists.")
315 }
316 }
317
318 template < typename GUM_SCALAR >
319 INLINE void BayesNet< GUM_SCALAR >::eraseArc(const Arc& arc) {
320 if (_varMap_.exists(arc.tail()) && _varMap_.exists(arc.head())) {
321 NodeId head = arc.head();
322 NodeId tail = arc.tail();
323 this->dag_.eraseArc(arc);
324 // Remove parent from child's CPT
325 (*(_probaMap_[head])) >> variable(tail);
326 }
327 }
329 template < typename GUM_SCALAR >
331 eraseArc(Arc(tail, head));
332 }
333
334 template < typename GUM_SCALAR >
336 // check that the arc exists
337 if (!_varMap_.exists(arc.tail()) || !_varMap_.exists(arc.head()) || !dag().existsArc(arc)) {
338 GUM_ERROR(InvalidArc, "a non-existing arc cannot be reversed")
339 }
340
341 NodeId tail = arc.tail();
342 NodeId head = arc.head();
343
344 // check that the reversal does not induce a cycle
345 try {
346 DAG d = dag();
347 d.eraseArc(arc);
348 d.addArc(head, tail);
349 } catch (Exception const&) {
350 GUM_ERROR(InvalidArc, "this arc reversal would induce a directed cycle")
351 }
352
353 // with the same notations as Shachter (1986), "evaluating influence
354 // diagrams", p.878, we shall first compute the product of probabilities:
355 // pi_j^old (x_j | x_c^old(j) ) * pi_i^old (x_i | x_c^old(i) )
356 Tensor< GUM_SCALAR > prod{cpt(tail) * cpt(head)};
357
358 // modify the topology of the graph: add to tail all the parents of head
359 // and add to head all the parents of tail
360 beginTopologyTransformation();
361 NodeSet new_parents;
362 for (const auto node: this->parents(tail))
363 new_parents.insert(node);
364 for (const auto node: this->parents(head))
365 new_parents.insert(node);
366 // remove arc (head, tail)
367 eraseArc(arc);
368
369 // add the necessary arcs to the tail
370 for (const auto p: new_parents) {
371 if ((p != tail) && !dag().existsArc(p, tail)) { addArc(p, tail); }
372 }
373
374 addArc(head, tail);
375 // add the necessary arcs to the head
376 new_parents.erase(tail);
377
378 for (const auto p: new_parents) {
379 if ((p != head) && !dag().existsArc(p, head)) { addArc(p, head); }
380 }
381
382 endTopologyTransformation();
383
384 // update the conditional distributions of head and tail
386 del_vars << &(variable(tail));
387 Tensor< GUM_SCALAR > new_cpt_head = prod.sumOut(del_vars).putFirst(&variable(head));
388
389 auto& cpt_head = const_cast< Tensor< GUM_SCALAR >& >(cpt(head));
390 cpt_head = std::move(new_cpt_head);
391
392 Tensor< GUM_SCALAR > new_cpt_tail{(prod / cpt_head).putFirst(&variable(tail))};
393 auto& cpt_tail = const_cast< Tensor< GUM_SCALAR >& >(cpt(tail));
394 cpt_tail = std::move(new_cpt_tail);
396
397 template < typename GUM_SCALAR >
399 reverseArc(Arc(tail, head));
400 }
401
402 //==============================================
403 // Aggregators
404 //=============================================
405 template < typename GUM_SCALAR >
409
410 template < typename GUM_SCALAR >
412 if (var.domainSize() > 2) GUM_ERROR(SizeError, "an AND has to be boolean")
413
414 return add(var, new aggregator::And< GUM_SCALAR >());
415 }
416
417 template < typename GUM_SCALAR >
419 return add(var, new aggregator::Count< GUM_SCALAR >(value));
420 }
421
422 template < typename GUM_SCALAR >
424 if (var.domainSize() > 2) GUM_ERROR(SizeError, "an EXISTS has to be boolean")
425
426 return add(var, new aggregator::Exists< GUM_SCALAR >(value));
427 }
428
429 template < typename GUM_SCALAR >
431 if (var.domainSize() > 2) GUM_ERROR(SizeError, "an EXISTS has to be boolean")
432
433 return add(var, new aggregator::Forall< GUM_SCALAR >(value));
434 }
435
436 template < typename GUM_SCALAR >
440
441 template < typename GUM_SCALAR >
445
446 template < typename GUM_SCALAR >
450
451 template < typename GUM_SCALAR >
453 if (var.domainSize() > 2) GUM_ERROR(SizeError, "an OR has to be boolean")
454
455 return add(var, new aggregator::Or< GUM_SCALAR >());
456 }
457
458 template < typename GUM_SCALAR >
462
463 //================================
464 // ICIModels
465 //================================
466 template < typename GUM_SCALAR >
468 GUM_SCALAR external_weight) {
469 return addNoisyORCompound(var, external_weight);
471
472 template < typename GUM_SCALAR >
474 GUM_SCALAR external_weight) {
475 return add(var, new MultiDimNoisyORCompound< GUM_SCALAR >(external_weight));
476 }
477
478 template < typename GUM_SCALAR >
480 GUM_SCALAR external_weight) {
481 return add(var, new MultiDimNoisyORNet< GUM_SCALAR >(external_weight));
482 }
484 template < typename GUM_SCALAR >
486 GUM_SCALAR external_weight) {
487 return add(var, new MultiDimNoisyAND< GUM_SCALAR >(external_weight));
488 }
489
490 template < typename GUM_SCALAR >
492 GUM_SCALAR external_weight) {
493 return add(var, new MultiDimLogit< GUM_SCALAR >(external_weight));
494 }
495
496 template < typename GUM_SCALAR >
498 GUM_SCALAR external_weight,
499 NodeId id) {
500 return addNoisyORCompound(var, external_weight, id);
501 }
502
503 template < typename GUM_SCALAR >
505 GUM_SCALAR external_weight,
506 NodeId id) {
507 return add(var, new MultiDimNoisyAND< GUM_SCALAR >(external_weight), id);
508 }
509
510 template < typename GUM_SCALAR >
512 GUM_SCALAR external_weight,
513 NodeId id) {
514 return add(var, new MultiDimLogit< GUM_SCALAR >(external_weight), id);
517 template < typename GUM_SCALAR >
519 GUM_SCALAR external_weight,
520 NodeId id) {
521 return add(var, new MultiDimNoisyORCompound< GUM_SCALAR >(external_weight), id);
522 }
523
524 template < typename GUM_SCALAR >
526 GUM_SCALAR external_weight,
527 NodeId id) {
528 return add(var, new MultiDimNoisyORNet< GUM_SCALAR >(external_weight), id);
529 }
530
531 template < typename GUM_SCALAR >
532 void BayesNet< GUM_SCALAR >::addWeightedArc(NodeId tail, NodeId head, GUM_SCALAR causalWeight) {
533 auto* CImodel = dynamic_cast< const MultiDimICIModel< GUM_SCALAR >* >(cpt(head).content());
534
535 if (CImodel != 0) {
536 // or is OK
537 addArc(tail, head);
538
539 CImodel->causalWeight(variable(tail), causalWeight);
540 } else {
542 "Head variable (" << variable(head).name() << ") is not a CIModel variable !")
543 }
544 }
545
546 template < typename GUM_SCALAR >
547 INLINE std::ostream& operator<<(std::ostream& output, const BayesNet< GUM_SCALAR >& bn) {
548 output << bn.toString();
549 return output;
550 }
553 template < typename GUM_SCALAR >
555 for (const auto node: nodes())
556 _probaMap_[node]->beginMultipleChanges();
557 }
558
560 template < typename GUM_SCALAR >
562 for (const auto node: nodes())
563 _probaMap_[node]->endMultipleChanges();
564 }
565
567 template < typename GUM_SCALAR >
569 // Removing previous tensors
570 for (const auto& elt: _probaMap_) {
571 delete elt.second;
572 }
574 _probaMap_.clear();
575 }
576
578 template < typename GUM_SCALAR >
579 void BayesNet< GUM_SCALAR >::_copyTensors_(const BayesNet< GUM_SCALAR >& source) {
580 // Copying tensors
581
582 for (const auto& src: source._probaMap_) {
583 // First we build the node's CPT
584 auto copy_array = new Tensor< GUM_SCALAR >();
585 copy_array->beginMultipleChanges();
586 for (gum::Idx i = 0; i < src.second->nbrDim(); i++) {
587 (*copy_array) << variableFromName(src.second->variable(i).name());
588 }
589 copy_array->endMultipleChanges();
590 copy_array->copyFrom(*(src.second));
591
592 // We add the CPT to the CPT hashmap
593 _probaMap_.insert(src.first, copy_array);
594 }
595 }
597 template < typename GUM_SCALAR >
599 for (const auto node: nodes())
600 generateCPT(node);
601 }
602
603 template < typename GUM_SCALAR >
606
607 generator.generateCPT(cpt(node).pos(variable(node)), cpt(node));
608 }
610 template < typename GUM_SCALAR >
611 void BayesNet< GUM_SCALAR >::changeTensor(NodeId id, Tensor< GUM_SCALAR >* newPot) {
612 if (cpt(id).nbrDim() != newPot->nbrDim()) {
614 "cannot exchange tensors with different "
615 "dimensions for variable with id "
616 << id)
619 for (Idx i = 0; i < cpt(id).nbrDim(); i++) {
620 if (&cpt(id).variable(i) != &(newPot->variable(i))) {
622 "cannot exchange tensors because, for variable with id " << id << ", dimension "
623 << i << " differs. ")
624 }
625 }
626
627 _unsafeChangeTensor_(id, newPot);
628 }
629
630 template < typename GUM_SCALAR >
631 void BayesNet< GUM_SCALAR >::_unsafeChangeTensor_(NodeId id, Tensor< GUM_SCALAR >* newPot) {
632 delete _probaMap_[id];
633 _probaMap_[id] = newPot;
634 }
636 template < typename GUM_SCALAR >
637 void BayesNet< GUM_SCALAR >::changeTensor(const std::string& name, Tensor< GUM_SCALAR >* newPot) {
638 changeTensor(idFromName(name), newPot);
639 }
640
641 template < typename GUM_SCALAR >
642 BayesNet< GUM_SCALAR >
644 const gum::Instantiation& interventions) const {
645 Instantiation all;
646 for (gum::Idx i = 0; i < observations.nbrDim(); i++) {
647 if (interventions.contains(observations.variable(i))) {
649 "Cannot have both an observation and an intervention on the same variable")
650 }
651 all.add(observations.variable(i));
652 }
653 for (gum::Idx i = 0; i < observations.nbrDim(); i++) {
654 all.add(interventions.variable(i));
655 }
656 all.setVals(observations);
657 all.setVals(interventions);
658
659 NodeSet cpt_changed;
660
662
664 for (gum::Idx i = 0; i < observations.nbrDim(); i++) {
665 const std::string& nam = observations.variable(i).name();
666 const gum::NodeId nod = this->idFromName(nam);
667 for (gum::NodeId child: this->children(nod)) {
668 bn.eraseArc(bn.idFromName(nam), bn.idFromName(this->variable(child).name()));
669 }
670 }
671 for (gum::Idx i = 0; i < interventions.nbrDim(); i++) {
672 const std::string& nam = interventions.variable(i).name();
673 const gum::NodeId nod = this->idFromName(nam);
674 for (gum::NodeId child: this->children(nod)) {
675 bn.eraseArc(bn.idFromName(nam), bn.idFromName(this->variable(child).name()));
676 }
677 for (gum::NodeId par: this->parents(nod)) {
678 const auto v1 = bn.idFromName(this->variable(par).name());
679 const auto v2 = bn.idFromName(nam);
680 if (bn.existsArc(v1, v2)) bn.eraseArc(v1, v2);
681 }
682 cpt_changed.insert(bn.idFromName(nam));
683 bn.cpt(bn.idFromName(nam))
685 interventions.val(i)));
686 }
688
689 for (gum::Idx i = 0; i < all.nbrDim(); i++) {
690 const gum::NodeId nod = this->idFromName(all.variable(i).name());
691 for (gum::NodeId child: this->children(nod)) {
692 if (!cpt_changed.contains(child)) {
693 cpt_changed.insert(child);
694 bn.cpt(bn.idFromName(this->variable(child).name()))
695 .fillWith(this->cpt(child).extract(all));
696 }
697 }
698 }
700 return bn;
701 }
702} /* namespace gum */
Class representing Bayesian networks.
amplitude aggregator
and aggregator
virtual void eraseArc(const Arc &arc)
removes an arc from the ArcGraphPart
The base class for all directed edges.
GUM_NODISCARD NodeId head() const
returns the head of the arc
GUM_NODISCARD NodeId tail() const
returns the tail of the arc
Exception base for argument error.
Class representing a Bayesian network.
Definition BayesNet.h:93
NodeId addNoisyORNet(const DiscreteVariable &var, GUM_SCALAR external_weight)
Add a variable, it's associate node and a gum::noisyOR implementation.
NodeId addLogit(const DiscreteVariable &var, GUM_SCALAR external_weight, NodeId id)
Add a variable, its associate node and a Logit implementation.
NodeId addAMPLITUDE(const DiscreteVariable &var)
Others aggregators.
NodeId addMEDIAN(const DiscreteVariable &var)
Others aggregators.
void endTopologyTransformation()
terminates a sequence of insertions/deletions of arcs by adjusting all CPTs dimensions.
const Tensor< GUM_SCALAR > & cpt(NodeId varId) const final
Returns the CPT of a variable.
void changeVariableLabel(NodeId id, const std::string &old_label, const std::string &new_label)
Changes a variable's label in the gum::BayesNet.
void changeVariableName(NodeId id, const std::string &new_name)
Changes a variable's name in the gum::BayesNet.
NodeId addNoisyOR(const DiscreteVariable &var, GUM_SCALAR external_weight)
Add a variable, it's associate node and a gum::noisyOR implementation.
void beginTopologyTransformation()
When inserting/removing arcs, node CPTs change their dimension with a cost in time.
NodeId addFORALL(const DiscreteVariable &var, Idx value=1)
Others aggregators.
NodeId addEXISTS(const DiscreteVariable &var, Idx value=1)
Others aggregators.
void erase(NodeId varId)
Remove a variable from the gum::BayesNet.
NodeId addOR(const DiscreteVariable &var)
Add a variable, it's associate node and an OR implementation.
const DiscreteVariable & variableFromName(const std::string &name) const final
Returns a variable given its name in the gum::BayesNet.
NodeId add(const DiscreteVariable &var)
Add a variable to the gum::BayesNet.
NodeId addAND(const DiscreteVariable &var)
Add a variable, it's associate node and an AND implementation.
void generateCPTs() const
randomly generates CPTs for a given structure
NodeId addCOUNT(const DiscreteVariable &var, Idx value=1)
Others aggregators.
void clear()
clear the whole Bayes net *
BayesNet()
Default constructor.
const DiscreteVariable & variable(NodeId id) const final
Returns a gum::DiscreteVariable given its gum::NodeId in the gum::BayesNet.
void changeTensor(NodeId id, Tensor< GUM_SCALAR > *newPot)
change the CPT associated to nodeId to newPot delete the old CPT associated to nodeId.
void addWeightedArc(NodeId tail, NodeId head, GUM_SCALAR causalWeight)
Add an arc in the BN, and update arc.head's CPT.
void reverseArc(NodeId tail, NodeId head)
Reverses an arc while preserving the same joint distribution.
NodeId addMIN(const DiscreteVariable &var)
Others aggregators.
BayesNet< GUM_SCALAR > contextualize(const gum::Instantiation &observations, const gum::Instantiation &interventions) const
create a contextual BN from this and a set of hard observations and hard interventions.
const VariableNodeMap & variableNodeMap() const final
Returns a map between variables and nodes of this gum::BayesNet.
void _copyTensors_(const BayesNet< GUM_SCALAR > &source)
copy of tensors from a BN to another, using names of vars as ref.
static BayesNet< GUM_SCALAR > fastPrototype(const std::string &dotlike, Size domainSize)
Create a Bayesian network with a dot-like syntax which specifies:
void addArc(NodeId tail, NodeId head)
Add an arc in the BN, and update arc.head's CPT.
void eraseArc(const Arc &arc)
Removes an arc in the BN, and update head's CTP.
NodeId addSUM(const DiscreteVariable &var)
Others aggregators.
virtual ~BayesNet()
Destructor.
NodeProperty< Tensor< GUM_SCALAR > * > _probaMap_
Mapping between the variable's id and their CPT.
Definition BayesNet.h:693
VariableNodeMap _varMap_
the map between variable and id
Definition BayesNet.h:690
NodeId addNoisyAND(const DiscreteVariable &var, GUM_SCALAR external_weight, NodeId id)
Add a variable, its associate node and a noisyAND implementation.
NodeId nodeId(const DiscreteVariable &var) const final
Returns a variable's id in the gum::BayesNet.
NodeId addMAX(const DiscreteVariable &var)
Others aggregators.
void generateCPT(NodeId node) const
randomly generate CPT for a given node in a given structure
void _clearTensors_()
clear all tensors
void _unsafeChangeTensor_(NodeId id, Tensor< GUM_SCALAR > *newPot)
change the CPT associated to nodeId to newPot delete the old CPT associated to nodeId.
NodeId idFromName(const std::string &name) const final
Returns a variable's id given its name in the gum::BayesNet.
BayesNet< GUM_SCALAR > & operator=(const BayesNet< GUM_SCALAR > &source)
Copy operator.
NodeId addNoisyORCompound(const DiscreteVariable &var, GUM_SCALAR external_weight)
Add a variable, it's associate node and a gum::noisyOR implementation.
Base class for dag.
Definition DAG.h:121
void addArc(NodeId tail, NodeId head) final
insert a new arc into the directed graph
Definition DAG_inl.h:63
const DAG & dag() const
Returns a constant reference to the dag of this Bayes Net.
DAG dag_
The DAG of this Directed Graphical Model.
Definition DAGmodel.h:272
bool existsArc(const NodeId tail, const NodeId head) const
return true if the arc tail->head exists in the DAGmodel
const NodeSet & children(const NodeId id) const
returns the set of nodes with arc outgoing from a given node
const NodeGraphPart & nodes() const final
Returns a constant reference to the dag of this Bayes Net.
Base class for discrete random variable.
virtual Size domainSize() const =0
Exception : a similar element already exists.
Base class for all aGrUM's exceptions.
Definition exceptions.h:118
void setProperty(const std::string &name, const std::string &value)
Add or change a property of this GraphicalModel.
virtual bool empty() const
Return true if this graphical model is empty.
IBayesNet< GUM_SCALAR > & operator=(const IBayesNet< GUM_SCALAR > &source)
Copy operator.
IBayesNet()
Default constructor.
Class for assigning/browsing values to tuples of discrete variables.
void add(const DiscreteVariable &v) final
Adds a new variable in the Instantiation.
Instantiation & setVals(const Instantiation &i)
Assign the values from i in the Instantiation.
bool contains(const DiscreteVariable &v) const final
Indicates whether a given variable belongs to the Instantiation.
Idx val(Idx i) const
Returns the current value of the variable at position i.
const DiscreteVariable & variable(Idx i) const final
Returns the variable at position i in the tuple.
Idx nbrDim() const final
Returns the number of variables in the Instantiation.
Exception : there is something wrong with an arc.
class LabelizedVariable
Idx posLabel(const std::string &label) const
return the pos from label
void changeLabel(Idx pos, const std::string &aLabel) const
change a label for this index
Multidimensional matrix stored as an array in memory.
abstract class for Conditional Indepency Models
<agrum/base/multidim/multiDimImplementation.h>
Logit representation.
Noisy AND representation.
Noisy OR representation.
Exception : the element we looked for cannot be found.
Exception : operation not allowed.
bool contains(const Key &k) const
Indicates whether a given elements belong to the set.
Definition set_tpl.h:497
void insert(const Key &k)
Inserts a new element into the set.
Definition set_tpl.h:539
<agrum/BN/generator/simpleCPTGenerator.h>
Exception : problem with size.
static Tensor< GUM_SCALAR > deterministicTensor(const DiscreteVariable &var, Idx value)
Container used to map discrete variables with nodes.
const std::string & name() const
returns the name of the variable
amplitude aggregator
Definition amplitude.h:74
And aggregator.
Definition and.h:74
count aggregator
Definition count.h:76
exists aggregator
Definition exists.h:73
forall aggregator
Definition forall.h:74
max aggregator
Definition max.h:73
median aggregator
Definition median.h:79
min aggregator
Definition min.h:72
or aggregator
Definition or.h:75
Sum aggregator.
Definition sum.h:71
count aggregator
#define GUM_ERROR(type, msg)
Definition exceptions.h:72
exists aggregator
forall aggregator
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition types.h:74
Size Idx
Type for indexes.
Definition types.h:79
Size NodeId
Type for node ids.
Set< NodeId > NodeSet
Some typdefs and define for shortcuts ...
std::string remove_newline(const std::string &s)
remove all newlines in a string
std::vector< std::string > split(const std::string &str, const std::string &delim)
Split str using the delimiter.
max aggregator
median aggregator
min aggregator
class for LOGIT implementation as multiDim
class for NoisyAND-net implementation as multiDim
class for multiDimNoisyORCompound
class for NoisyOR-net implementation as multiDim
gum is the global namespace for all aGrUM entities
Definition agrum.h:46
Set< const DiscreteVariable * > VariableSet
std::ostream & operator<<(std::ostream &stream, const AVLTree< Val, Cmp > &tree)
display the content of a tree
Definition AVLTree.h:913
std::unique_ptr< DiscreteVariable > fastVariable(std::string var_description, Size default_domain_size)
Create a pointer on a Discrete Variable from a "fast" syntax.
NodeId build_node(gum::BayesNet< GUM_SCALAR > &bn, const std::string &node, const std::string &default_domain)
or aggregator
Abstract class for generating Conditional Probability Tables.
sum aggregator
Utilities for manipulating strings.