aGrUM 2.3.2
a C++ library for (probabilistic) graphical models
regress_tpl.h
Go to the documentation of this file.
1/****************************************************************************
2 * This file is part of the aGrUM/pyAgrum library. *
3 * *
4 * Copyright (c) 2005-2025 by *
5 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
6 * - Christophe GONZALES(_at_AMU) *
7 * *
8 * The aGrUM/pyAgrum library is free software; you can redistribute it *
9 * and/or modify it under the terms of either : *
10 * *
11 * - the GNU Lesser General Public License as published by *
12 * the Free Software Foundation, either version 3 of the License, *
13 * or (at your option) any later version, *
14 * - the MIT license (MIT), *
15 * - or both in dual license, as here. *
16 * *
17 * (see https://agrum.gitlab.io/articles/dual-licenses-lgplv3mit.html) *
18 * *
19 * This aGrUM/pyAgrum library is distributed in the hope that it will be *
20 * useful, but WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, *
21 * INCLUDING BUT NOT LIMITED TO THE WARRANTIES MERCHANTABILITY or FITNESS *
22 * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
23 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, *
25 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR *
26 * OTHER DEALINGS IN THE SOFTWARE. *
27 * *
28 * See LICENCES for more details. *
29 * *
30 * SPDX-FileCopyrightText: Copyright 2005-2025 *
31 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
32 * - Christophe GONZALES(_at_AMU) *
33 * SPDX-License-Identifier: LGPL-3.0-or-later OR MIT *
34 * *
35 * Contact : info_at_agrum_dot_org *
36 * homepage : http://agrum.gitlab.io *
37 * gitlab : https://gitlab.com/agrumery/agrum *
38 * *
39 ****************************************************************************/
40#pragma once
41
42
51
54
55#define ALLOCATE(x) SmallObjectAllocator::instance().allocate(x)
56#define DEALLOCATE(x, y) SmallObjectAllocator::instance().deallocate(x, y)
57
58namespace gum {
59
60 template < typename GUM_SCALAR,
61 template < typename > class COMBINEOPERATOR,
62 template < typename > class PROJECTOPERATOR,
63 template < typename > class TerminalNodePolicy >
67 const gum::VariableSet* primedVars,
68 const DiscreteVariable* targetVar,
69 const GUM_SCALAR neutral) :
70 _DG1_(DG1), _DG2_(DG2), _neutral_(neutral), _combine_(), _project_(),
71 _DG1InstantiationNeeded_(DG1->realSize(), true, false),
72 _DG2InstantiationNeeded_(DG2->realSize(), true, false) {
73 GUM_CONSTRUCTOR(Regress);
75 _nbVar_ = 0;
76 _default_ = nullptr;
77 _primedVars_ = primedVars;
78 _targetVar_ = targetVar;
79 }
80
81 template < typename GUM_SCALAR,
82 template < typename > class COMBINEOPERATOR,
83 template < typename > class PROJECTOPERATOR,
84 template < typename > class TerminalNodePolicy >
86 GUM_DESTRUCTOR(Regress);
87
88 for (auto instIter = _DG1InstantiationNeeded_.beginSafe();
89 instIter != _DG1InstantiationNeeded_.endSafe();
90 ++instIter)
91 DEALLOCATE(instIter.val(), sizeof(short int) * _nbVar_);
92
93 for (auto instIter = _DG2InstantiationNeeded_.beginSafe();
94 instIter != _DG2InstantiationNeeded_.endSafe();
95 ++instIter)
96 DEALLOCATE(instIter.val(), sizeof(short int) * _nbVar_);
97
98 if (_nbVar_ != 0) DEALLOCATE(_default_, sizeof(short int) * _nbVar_);
99 }
100
101 // This function is the main function. To be call every time an operation
102 // between the two given Function Graphs is required
103 template < typename GUM_SCALAR,
104 template < typename > class COMBINEOPERATOR,
105 template < typename > class PROJECTOPERATOR,
106 template < typename > class TerminalNodePolicy >
112
113 Idx* varInst = nullptr;
114 if (_nbVar_ != 0) {
115 varInst = static_cast< Idx* >(ALLOCATE(sizeof(Idx) * _nbVar_));
116 for (Idx i = 0; i < _nbVar_; i++)
117 varInst[i] = (Idx)0;
118 }
119
120 O4DGContext conti(varInst, _nbVar_);
121 conti.setDG1Node(_DG1_->root());
122 conti.setDG2Node(_DG2_->root());
123
124 NodeId root = _compute_(conti, (Idx)0 - 1);
125 _rd_->manager()->setRootNode(root);
126
127 if (_nbVar_ != 0) DEALLOCATE(varInst, sizeof(Idx) * _nbVar_);
128
129 _rd_->erase(*_targetVar_);
130
131 return _rd_;
132 }
133
134 // This function computes an efficient order for the final decision diagrams.
135 // Its main criterion to do so is the number of
136 // re-exploration to be done
137 template < typename GUM_SCALAR,
138 template < typename > class COMBINEOPERATOR,
139 template < typename > class PROJECTOPERATOR,
140 template < typename > class TerminalNodePolicy >
143 SequenceIteratorSafe< const DiscreteVariable* > fite = _DG1_->variablesSequence().beginSafe();
144 SequenceIteratorSafe< const DiscreteVariable* > site = _DG2_->variablesSequence().beginSafe();
145
146 while (fite != _DG1_->variablesSequence().endSafe()
147 && site != _DG2_->variablesSequence().endSafe()) {
148 // Test : if var from first order is already in final order
149 // we move onto the next one
150 if (_rd_->variablesSequence().exists(*fite)) {
151 ++fite;
152 continue;
153 }
154
155 // Test : if var from second order is already in final order
156 // we move onto the next one
157 if (_rd_->variablesSequence().exists(*site)) {
158 ++site;
159 continue;
160 }
161
162 // Test : is current var of the first order present in the second order.
163 // if not we add it to final order
164 if (!_DG2_->variablesSequence().exists(*fite) && !_primedVars_->exists(*fite)) {
165 _rd_->add(**fite);
166 ++fite;
167 continue;
168 }
169
170 // Test : is current var of the second order present in the first order.
171 // if not we add it to final order
172 if (!_DG1_->variablesSequence().exists(*site) && !_primedVars_->exists(*site)) {
173 _rd_->add(**site);
174 ++site;
175 continue;
176 }
177
178 // Test : is current var of the second order present in the first order.
179 // if not we add it to final order
180 if (*fite == *site) {
181 _rd_->add(**fite);
182 ++fite;
183 ++site;
184 continue;
185 }
186
187 // Test : if chosing first order var cost less in terms or re exploration,
188 // we chose it
189 _rd_->add(**fite);
190 ++fite;
191 }
192
193 // Whenever an iterator has finished its sequence,
194 // the other may still be in the middle of its one.
195 // Hence, this part ensures that any variables remaining
196 // will be added to the final sequence if needed.
197 if (fite == _DG1_->variablesSequence().endSafe()) {
198 for (; site != _DG2_->variablesSequence().endSafe(); ++site)
199 if (!_rd_->variablesSequence().exists(*site)) _rd_->add(**site);
200 } else {
201 for (; fite != _DG1_->variablesSequence().endSafe(); ++fite)
202 if (!_rd_->variablesSequence().exists(*fite)) _rd_->add(**fite);
203 }
204
205 // Various initialization needed now that we have a bigger picture
206 _nbVar_ = _rd_->variablesSequence().size();
207
208 if (_nbVar_ != 0) {
209 _default_ = static_cast< short int* >(ALLOCATE(sizeof(short int) * _nbVar_));
210 for (Idx i = 0; i < _nbVar_; i++)
211 _default_[i] = (short int)0;
212 }
213 }
214
215 // This function computes for every nodes if any retrograde variable is
216 // present below
217 template < typename GUM_SCALAR,
218 template < typename > class COMBINEOPERATOR,
219 template < typename > class PROJECTOPERATOR,
220 template < typename > class TerminalNodePolicy >
224 HashTable< NodeId, short int* > nodesVarDescendant;
225 Size tableSize = Size(_nbVar_ * sizeof(short int));
226
227 for (auto varIter = dg->variablesSequence().rbeginSafe();
228 varIter != dg->variablesSequence().rendSafe();
229 --varIter) {
230 Idx varPos = _rd_->variablesSequence().pos(*varIter);
231
232 const Link< NodeId >* nodeIter = dg->varNodeListe(*varIter)->list();
233 while (nodeIter != nullptr) {
234 short int* instantiationNeeded = static_cast< short int* >(ALLOCATE(tableSize));
235 dgInstNeed.insert(nodeIter->element(), instantiationNeeded);
236 short int* varDescendant = static_cast< short int* >(ALLOCATE(tableSize));
237 nodesVarDescendant.insert(nodeIter->element(), varDescendant);
238 for (Idx j = 0; j < _nbVar_; j++) {
239 instantiationNeeded[j] = (short int)0;
240 varDescendant[j] = (short int)0;
241 }
242
243
244 varDescendant[varPos] = (short int)1;
245 for (Idx modality = 0; modality < dg->node(nodeIter->element())->nbSons(); ++modality) {
246 if (!dg->isTerminalNode(dg->node(nodeIter->element())->son(modality))) {
247 short int* sonVarDescendant
248 = nodesVarDescendant[dg->node(nodeIter->element())->son(modality)];
249 for (Idx varIdx = 0; varIdx < _nbVar_; varIdx++) {
250 varDescendant[varIdx] += sonVarDescendant[varIdx];
251 if (varDescendant[varIdx] && varIdx < varPos)
252 instantiationNeeded[varIdx] = (short int)1;
253 }
254 }
255 }
256 nodeIter = nodeIter->nextLink();
257 }
258 }
259
260 for (auto varIter = dg->variablesSequence().beginSafe();
261 varIter != dg->variablesSequence().endSafe();
262 ++varIter) {
263 const Link< NodeId >* nodeIter = dg->varNodeListe(*varIter)->list();
264 while (nodeIter != nullptr) {
265 for (Idx modality = 0; modality < dg->node(nodeIter->element())->nbSons(); ++modality) {
266 NodeId sonId = dg->node(nodeIter->element())->son(modality);
267 if (!dg->isTerminalNode(sonId)) {
268 for (Idx varIdx = 0; varIdx < _nbVar_; ++varIdx) {
269 if (dgInstNeed[nodeIter->element()][varIdx] && nodesVarDescendant[sonId][varIdx]) {
270 dgInstNeed[sonId][varIdx] = (short int)1;
271 }
272 }
273 }
274 }
275 nodeIter = nodeIter->nextLink();
276 }
277 }
278
279 for (HashTableIterator< NodeId, short int* > it = nodesVarDescendant.begin();
280 it != nodesVarDescendant.end();
281 ++it) {
282 DEALLOCATE(it.val(), tableSize);
283 }
284
285 nodesVarDescendant.clear();
286 }
287
288 // A key is used for prunning uneccesary operations since once a node has been
289 // visited in a given context, there's no use to revisit him,
290 // the result will be the same node, so we just have to do an association
291 // context - node.
292 // The context consists in :
293 // _ Leader node we are visiting.
294 // _ Follower node we are visiting.
295 // _ For all retrograde variables, if it has been instanciated
296 // before, current modality instanciated, meaning :
297 // _ 0 means the variable hasn't be instanciated yet,
298 // _ From 1 to domainSize + 1 means that current modality
299 // index of variable is value - 1,
300 // _ domainSize + 2 means variable is on default mode.
301 // A key - node association is made each time we create a node in resulting
302 // diagram.
303 // Since GUM_MULTI_DIM_DECISION_DIAGRAM_RECUR_FUNCTION is a corner step in
304 // algorithm ( meaning each time we explore a node we go trought
305 // this function ), check only have to be at the beginning of that function.
306 template < typename GUM_SCALAR,
307 template < typename > class COMBINEOPERATOR,
308 template < typename > class PROJECTOPERATOR,
309 template < typename > class TerminalNodePolicy >
310 INLINE NodeId
312 O4DGContext& currentSituation,
313 Idx lastInstVarPos) {
314 NodeId newNode = 0;
315
316 // If both current nodes are terminal,
317 // we only have to compute the resulting value
318 if (_DG1_->isTerminalNode(currentSituation.DG1Node())
319 && _DG2_->isTerminalNode(currentSituation.DG2Node())) {
320 // We have to compute new valueand we insert a new node in diagram with
321 // this value, ...
322 GUM_SCALAR newVal = _neutral_;
323 GUM_SCALAR tempVal = _combine_(_DG1_->nodeValue(currentSituation.DG1Node()),
324 _DG2_->nodeValue(currentSituation.DG2Node()));
325 for (Idx targetModa = 0; targetModa < _targetVar_->domainSize(); ++targetModa)
326 newVal = _project_(newVal, tempVal);
327 return _rd_->manager()->addTerminalNode(newVal);
328 }
329
330 // If not,
331 // we'll have to do some exploration
332
333 // First we ensure that we hadn't already visit this pair of node under hte
334 // same circumstances
335 short int* dg1NeededVar = _DG1InstantiationNeeded_.exists(currentSituation.DG1Node())
336 ? _DG1InstantiationNeeded_[currentSituation.DG1Node()]
337 : _default_;
338 Idx dg1CurrentVarPos
339 = _DG1_->isTerminalNode(currentSituation.DG1Node())
340 ? _nbVar_
341 : _rd_->variablesSequence().pos(_DG1_->node(currentSituation.DG1Node())->nodeVar());
342 short int* dg2NeededVar = _DG2InstantiationNeeded_.exists(currentSituation.DG2Node())
343 ? _DG2InstantiationNeeded_[currentSituation.DG2Node()]
344 : _default_;
345 Idx dg2CurrentVarPos
346 = _DG2_->isTerminalNode(currentSituation.DG2Node())
347 ? _nbVar_
348 : _rd_->variablesSequence().pos(_DG2_->node(currentSituation.DG2Node())->nodeVar());
349
350 short int* instNeeded = static_cast< short int* >(ALLOCATE(sizeof(short int) * _nbVar_));
351
352 for (Idx i = 0; i < _nbVar_; i++) {
353 instNeeded[i] = dg1NeededVar[i] + dg2NeededVar[i];
354 }
355
356 double curSitKey = currentSituation.key(instNeeded);
357
358 if (_explorationTable_.exists(curSitKey)) {
359 DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
360
361 return _explorationTable_[curSitKey];
362 }
363
364 // ====================================================
365
366 NodeId origDG1 = currentSituation.DG1Node(), origDG2 = currentSituation.DG2Node();
367
369 NodeId leadNodeId = 0;
370 Idx leadVarPos = _rd_->variablesSequence().size();
371 using SetNodeFunction = void (O4DGContext::*)(const NodeId&);
372 SetNodeFunction leadFunction = nullptr;
373
374 bool sameVar = false;
375
376 if (!_DG1_->isTerminalNode(currentSituation.DG1Node())) {
377 if (currentSituation.varModality(dg1CurrentVarPos) != 0) {
378 // If var associated to current node has already been instanciated, we
379 // have to jump it
380 currentSituation.setDG1Node(_DG1_->node(currentSituation.DG1Node())
381 ->son(currentSituation.varModality(dg1CurrentVarPos) - 1));
382
383 newNode = _compute_(currentSituation, lastInstVarPos);
384 _explorationTable_.insert(curSitKey, newNode);
385 currentSituation.setDG1Node(origDG1);
386 currentSituation.setDG2Node(origDG2);
387
388 DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
389
390 return newNode;
391 }
392
393 leaddg = _DG1_;
394 leadNodeId = currentSituation.DG1Node();
395 leadVarPos = dg1CurrentVarPos;
396 leadFunction = &O4DGContext::setDG1Node;
397 }
398
399 if (!_DG2_->isTerminalNode(currentSituation.DG2Node())) {
400 if (currentSituation.varModality(dg2CurrentVarPos) != 0) {
401 // If var associated to current node has already been instanciated, we
402 // have to jump it
403 currentSituation.setDG2Node(_DG2_->node(currentSituation.DG2Node())
404 ->son(currentSituation.varModality(dg2CurrentVarPos) - 1));
405
406 newNode = _compute_(currentSituation, lastInstVarPos);
407 _explorationTable_.insert(curSitKey, newNode);
408 currentSituation.setDG1Node(origDG1);
409 currentSituation.setDG2Node(origDG2);
410
411 DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
412
413 return newNode;
414 }
415
416 if (leadVarPos == dg2CurrentVarPos) { sameVar = true; }
417
418 if (leadVarPos > dg2CurrentVarPos) {
419 leaddg = _DG2_;
420 leadNodeId = currentSituation.DG2Node();
421 leadVarPos = dg2CurrentVarPos;
422 leadFunction = &O4DGContext::setDG2Node;
423 }
424 }
425
426 // ====================================================
427 // Anticipated Exploration
428
429 // Before exploring nodes, we have to ensure that every anticipated
430 // exploration is done
431 for (Idx varPos = lastInstVarPos + 1; varPos < leadVarPos; ++varPos) {
432 if (instNeeded[varPos]) {
433 const DiscreteVariable* curVar = _rd_->variablesSequence().atPos(varPos);
434 NodeId* sonsIds = static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
435
436 for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
437 currentSituation.chgVarModality(varPos, modality + 1);
438
439 sonsIds[modality] = _compute_(currentSituation, varPos);
440 }
441
442 newNode = _rd_->manager()->addInternalNode(curVar, sonsIds);
443
444 _explorationTable_.insert(curSitKey, newNode);
445 currentSituation.chgVarModality(varPos, 0);
446 currentSituation.setDG1Node(origDG1);
447 currentSituation.setDG2Node(origDG2);
448
449 DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
450
451 return newNode;
452 }
453 }
454
455 // ====================================================
456 // Terminal Exploration
457 if (sameVar && _DG1_->node(origDG1)->nodeVar() == _targetVar_) {
458 GUM_SCALAR newVal = _neutral_;
459 for (Idx targetModa = 0; targetModa < _targetVar_->domainSize(); ++targetModa)
460 newVal = _project_(newVal,
461 _combine_(_DG1_->nodeValue(_DG1_->node(origDG1)->son(targetModa)),
462 _DG2_->nodeValue(_DG2_->node(origDG2)->son(targetModa))));
463 newNode = _rd_->manager()->addTerminalNode(newVal);
464 _explorationTable_.insert(curSitKey, newNode);
465 DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
466 return newNode;
467 }
468 if (_DG1_->isTerminalNode(origDG1)) {
469 if (_DG2_->node(origDG2)->nodeVar() == _targetVar_) {
470 GUM_SCALAR newVal = _neutral_;
471 for (Idx targetModa = 0; targetModa < _targetVar_->domainSize(); ++targetModa)
472 newVal = _project_(newVal,
473 _combine_(_DG1_->nodeValue(origDG1),
474 _DG2_->nodeValue(_DG2_->node(origDG2)->son(targetModa))));
475 newNode = _rd_->manager()->addTerminalNode(newVal);
476 _explorationTable_.insert(curSitKey, newNode);
477 DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
478 return newNode;
479 }
480 } else {
481 if (_DG1_->node(origDG1)->nodeVar() == _targetVar_ && _DG2_->isTerminalNode(origDG2)) {
482 GUM_SCALAR newVal = _neutral_;
483 for (Idx targetModa = 0; targetModa < _targetVar_->domainSize(); ++targetModa)
484 newVal = _project_(newVal,
485 _combine_(_DG1_->nodeValue(_DG1_->node(origDG1)->son(targetModa)),
486 _DG2_->nodeValue(origDG2)));
487 newNode = _rd_->manager()->addTerminalNode(newVal);
488 _explorationTable_.insert(curSitKey, newNode);
489 DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
490 return newNode;
491 }
492 }
493
494 // ====================================================
495 // Normal Exploration
496
497 // If only one of the current node is terminal,
498 // we have to pursue deeper on the other diagram
499 if (sameVar) {
500 // If so - meaning it's the same variable - we have to go
501 // down on both
502 const InternalNode* dg1Node = _DG1_->node(origDG1);
503 const InternalNode* dg2Node = _DG2_->node(origDG2);
504
505 const DiscreteVariable* curVar = dg1Node->nodeVar();
506 Idx varPos = _rd_->variablesSequence().pos(curVar);
507 NodeId* sonsIds = static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
508
509 for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
510 currentSituation.chgVarModality(varPos, modality + 1);
511 currentSituation.setDG1Node(dg1Node->son(modality));
512 currentSituation.setDG2Node(dg2Node->son(modality));
513
514 sonsIds[modality] = _compute_(currentSituation, varPos);
515 }
516
517 newNode = _rd_->manager()->addInternalNode(curVar, sonsIds);
518
519 _explorationTable_.insert(curSitKey, newNode);
520 currentSituation.chgVarModality(varPos, 0);
521 currentSituation.setDG1Node(origDG1);
522 currentSituation.setDG2Node(origDG2);
523
524 DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
525
526 return newNode;
527 }
528 // ====================================================
529 else {
530 const InternalNode* leaddgNode = leaddg->node(leadNodeId);
531
532 const DiscreteVariable* curVar = leaddgNode->nodeVar();
533 NodeId* sonsIds = static_cast< NodeId* >(ALLOCATE(sizeof(NodeId) * curVar->domainSize()));
534
535 for (Idx modality = 0; modality < curVar->domainSize(); modality++) {
536 currentSituation.chgVarModality(leadVarPos, modality + 1);
537 (currentSituation.*leadFunction)(leaddgNode->son(modality));
538
539 sonsIds[modality] = _compute_(currentSituation, leadVarPos);
540 }
541
542 newNode = _rd_->manager()->addInternalNode(curVar, sonsIds);
543
544 _explorationTable_.insert(curSitKey, newNode);
545 currentSituation.chgVarModality(leadVarPos, 0);
546 currentSituation.setDG1Node(origDG1);
547 currentSituation.setDG2Node(origDG2);
548
549 DEALLOCATE(instNeeded, sizeof(short int) * _nbVar_);
550
551 return newNode;
552 }
553 }
554
555} // namespace gum
Unsafe Iterators for hashtables.
Definition hashTable.h:2428
Base class for discrete random variable.
virtual Size domainSize() const =0
The class for generic Hash Tables.
Definition hashTable.h:637
iterator begin()
Returns an unsafe iterator pointing to the beginning of the hashtable.
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
const iterator & end() noexcept
Returns the unsafe iterator pointing to the end of the hashtable.
void clear()
Removes all the elements in the hash table.
Structure used to represent a node internal structure.
const DiscreteVariable * nodeVar() const
Returns the node variable.
NodeId son(Idx modality) const
Returns the son at a given index.
static MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * getReducedAndOrderedInstance()
Returns a reduced and ordered instance.
virtual const Sequence< const DiscreteVariable * > & variablesSequence() const override
Returns a const ref to the sequence of DiscreteVariable*.
Class used to manipulate context during Function Graph Operations.
Definition o4DGContext.h:70
const NodeId & DG2Node() const
Get DG2 diagram current explored Node.
void setDG2Node(const NodeId &)
Set DG2 diagram current explored Node.
const NodeId & DG1Node() const
Get DG1 diagram current explored Node.
void chgVarModality(Idx, Idx)
Changes given variable modality.
void setDG1Node(const NodeId &)
Set DG1 diagram current explored Node.
const double & key(short int *instNeeded)
Returns o4DGContext key.
Idx varModality(Idx)
Changes given variable modality.
const DiscreteVariable * _targetVar_
The variable we work on to eleminate.
Definition regress.h:128
short int * _default_
Just a computationnal trick.
Definition regress.h:150
HashTable< double, NodeId > _explorationTable_
The hashtable used to know if two pair of nodes have already been visited.
Definition regress.h:142
Regress(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *vfunction, const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *probDist, const gum::VariableSet *primedVars, const DiscreteVariable *targetVar, const GUM_SCALAR neutral)
Default constructor.
Definition regress_tpl.h:64
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _DG1_
One of the two function graphs used for the operation.
Definition regress.h:116
HashTable< NodeId, short int * > _DG1InstantiationNeeded_
Table uses to know if a given node of given function graph has retrograde variables.
Definition regress.h:146
void _establishVarOrder_()
Computes an order for the final Decision graph that will minimize the number of re exploration.
const GUM_SCALAR _neutral_
The function to be performed on the leaves.
Definition regress.h:131
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _rd_
The resulting function graph.
Definition regress.h:122
HashTable< NodeId, short int * > _DG2InstantiationNeeded_
Definition regress.h:147
const PROJECTOPERATOR< GUM_SCALAR > _project_
Definition regress.h:138
const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * _DG2_
The other one.
Definition regress.h:119
void _findRetrogradeVariables_(const MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > *dg, HashTable< NodeId, short int * > &dgInstNeed)
Establish for each node in both function graph if it has retrograde variables beneath it.
Idx _nbVar_
The total number of variable implied in the operation.
Definition regress.h:134
MultiDimFunctionGraph< GUM_SCALAR, TerminalNodePolicy > * compute()
Computes and builds the Function Graph that is the result of the operation.
const COMBINEOPERATOR< GUM_SCALAR > _combine_
The functions to be performed on the leaves.
Definition regress.h:137
const gum::VariableSet * _primedVars_
The set of variables we want to keep at the end.
Definition regress.h:125
NodeId _compute_(O4DGContext &currentSituation, Idx lastInstVarPos)
The main recursion function.
~Regress()
Default destructor.
Definition regress_tpl.h:85
Safe iterators for Sequence.
Definition sequence.h:1134
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition types.h:74
Size Idx
Type for indexes.
Definition types.h:79
Size NodeId
Type for node ids.
#define DEALLOCATE(x, y)
#define ALLOCATE(x)
Headers of the InternalNode class.
gum is the global namespace for all aGrUM entities
Definition agrum.h:46
Set< const DiscreteVariable * > VariableSet
Class used to compute the operation between two decision diagrams.