aGrUM 2.3.2
a C++ library for (probabilistic) graphical models
samplingInference_tpl.h
Go to the documentation of this file.
1/****************************************************************************
2 * This file is part of the aGrUM/pyAgrum library. *
3 * *
4 * Copyright (c) 2005-2025 by *
5 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
6 * - Christophe GONZALES(_at_AMU) *
7 * *
8 * The aGrUM/pyAgrum library is free software; you can redistribute it *
9 * and/or modify it under the terms of either : *
10 * *
11 * - the GNU Lesser General Public License as published by *
12 * the Free Software Foundation, either version 3 of the License, *
13 * or (at your option) any later version, *
14 * - the MIT license (MIT), *
15 * - or both in dual license, as here. *
16 * *
17 * (see https://agrum.gitlab.io/articles/dual-licenses-lgplv3mit.html) *
18 * *
19 * This aGrUM/pyAgrum library is distributed in the hope that it will be *
20 * useful, but WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, *
21 * INCLUDING BUT NOT LIMITED TO THE WARRANTIES MERCHANTABILITY or FITNESS *
22 * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
23 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, *
25 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR *
26 * OTHER DEALINGS IN THE SOFTWARE. *
27 * *
28 * See LICENCES for more details. *
29 * *
30 * SPDX-FileCopyrightText: Copyright 2005-2025 *
31 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
32 * - Christophe GONZALES(_at_AMU) *
33 * SPDX-License-Identifier: LGPL-3.0-or-later OR MIT *
34 * *
35 * Contact : info_at_agrum_dot_org *
36 * homepage : http://agrum.gitlab.io *
37 * gitlab : https://gitlab.com/agrumery/agrum *
38 * *
39 ****************************************************************************/
40#pragma once
41
42
50
55
56
57#define DEFAULT_MAXITER 10000000
58#define DEFAULT_PERIOD_SIZE 100
59#define DEFAULT_VERBOSITY false
60#define DEFAULT_TIMEOUT 6000
61#define DEFAULT_EPSILON 1e-2
62#define DEFAULT_MIN_EPSILON_RATE 1e-5
63
64namespace gum {
65
66 template < typename GUM_SCALAR >
77
78 template < typename GUM_SCALAR >
80 GUM_DESTRUCTOR(SamplingInference);
81 if (_samplingBN_ != nullptr) {
82 if (isContextualized) { // otherwise _samplingBN_==&BN()
83 delete _samplingBN_;
84 }
85 }
86 }
87
88 template < typename GUM_SCALAR >
90 this->prepareInference();
91 if (_samplingBN_ == nullptr) return this->BN();
92 else return *_samplingBN_;
93 }
94
95 template < typename GUM_SCALAR >
100
101 template < typename GUM_SCALAR >
104 GUM_SCALAR virtualLBPSize) {
105 _estimator_.setFromLBP(lbp, this->hardEvidenceNodes(), virtualLBPSize);
106 this->isSetEstimator = true;
107 }
108
109 template < typename GUM_SCALAR >
111 return _estimator_.posterior(this->BN().variable(id));
112 }
113
114 template < typename GUM_SCALAR >
115 const Tensor< GUM_SCALAR >&
117 return currentPosterior(this->BN().idFromName(name));
118 }
119
120 template < typename GUM_SCALAR >
121 const Tensor< GUM_SCALAR >& SamplingInference< GUM_SCALAR >::posterior_(NodeId id) {
122 return _estimator_.posterior(this->BN().variable(id));
123 }
124
125 template < typename GUM_SCALAR >
127 // Finding Barren nodes
128
129 BarrenNodesFinder barr_nodes = BarrenNodesFinder(&this->BN().dag());
130 barr_nodes.setTargets(&this->targets());
131 barr_nodes.setEvidence(&this->hardEvidenceNodes());
132 const NodeSet& barren = barr_nodes.barrenNodes();
133
134 // creating BN fragment
136 for (const auto elmt: this->BN().dag().asNodeSet() - barren)
137 _samplingBN_->installNode(elmt);
138
139 // D-separated nodes
140
142 NodeSet requisite;
143 dsep.requisiteNodes(this->BN().dag(),
144 this->BN().nodes().asNodeSet(), // no target for approximateInference
145 this->hardEvidenceNodes(),
146 this->softEvidenceNodes(), // should be empty
147 requisite);
148 requisite += this->hardEvidenceNodes();
149
150 auto nonRequisite = this->BN().dag().asNodeSet() - requisite;
151
152 for (const auto elmt: nonRequisite)
153 _samplingBN_->uninstallNode(elmt);
154 for (const auto hard: this->hardEvidenceNodes()) {
156 I.add(this->BN().variable(hard));
157 I.chgVal(this->BN().variable(hard), this->hardEvidence()[hard]);
158
159 for (const auto& child: this->BN().children(hard)) {
160 _samplingBN_->installCPT(child, this->BN().cpt(child).extract(I));
161 }
162 }
163
164 this->isContextualized = true;
166 }
167
168 template < typename GUM_SCALAR >
173
174 template < typename GUM_SCALAR >
176 //@todo This should be in _prepareInference_
177 if (!isContextualized) { this->contextualize(); }
178
181 GUM_SCALAR w = .0; //
182
183 // Burn in
184 Ip = this->burnIn_();
185 do {
186 Ip = this->draw_(&w, Ip);
187 _estimator_.update(Ip, w);
189 } while (this->continueApproximationScheme(_estimator_.confidence()));
190
191 this->isSetEstimator = false;
192 }
193
194 template < typename GUM_SCALAR >
197
198 I->add(samplingBN().variable(nod));
199 I->chgVal(samplingBN().variable(nod), samplingBN().cpt(nod).extract(Itop).draw());
200 }
201
202 template < typename GUM_SCALAR >
204
205 template < typename GUM_SCALAR >
206 void SamplingInference< GUM_SCALAR >::onEvidenceAdded_(const NodeId id, bool isHardEvidence) {
207 if (!isHardEvidence) {
208 GUM_ERROR(FatalError, "Approximated inference only accept hard evidence")
209 }
210 }
211
212 template < typename GUM_SCALAR >
213 void SamplingInference< GUM_SCALAR >::onEvidenceErased_(const NodeId id, bool isHardEvidence) {}
214
215 template < typename GUM_SCALAR >
216 void SamplingInference< GUM_SCALAR >::onAllEvidenceErased_(bool contains_hard_evidence) {}
217
218 template < typename GUM_SCALAR >
220 bool hasChangedSoftHard) {
221 if (hasChangedSoftHard) {
222 GUM_ERROR(FatalError, "Approximated inference only accept hard evidence")
223 }
224 }
225
226 template < typename GUM_SCALAR >
228
229 template < typename GUM_SCALAR >
231
232 template < typename GUM_SCALAR >
234
235 template < typename GUM_SCALAR >
237
238 template < typename GUM_SCALAR >
240
241 template < typename GUM_SCALAR >
243
244 template < typename GUM_SCALAR >
246
247 template < typename GUM_SCALAR >
249 if (this->isInferenceReady()) {
250 _estimator_.clear();
252 }
253 }
254} // namespace gum
Class representing Fragment of Bayesian networks.
Detect barren nodes for inference in Bayesian networks.
ApproximateInference(const IBayesNet< GUM_SCALAR > *bn)
void updateApproximationScheme(unsigned int incr=1)
Update the scheme w.r.t the new error and increment steps.
void setMaxIter(Size max) override
Stopping criterion on number of iterations.
void setMaxTime(double timeout) override
Stopping criterion on timeout.
void setMinEpsilonRate(double rate) override
Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).
void setPeriodSize(Size p) override
How many samples between two stopping is enable.
void initApproximationScheme()
Initialise the scheme.
bool continueApproximationScheme(double error)
Update the scheme w.r.t the new error.
void setVerbosity(bool v) override
Set the verbosity on (true) or off (false).
void setEpsilon(double eps) override
Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.
Detect barren nodes for inference in Bayesian networks.
void setEvidence(const NodeSet *observed_nodes)
sets the observed nodes in the DAG
void setTargets(const NodeSet *target_nodes)
sets the set of target nodes we are interested in
NodeSet barrenNodes()
returns the set of barren nodes
Portion of a BN identified by the list of nodes and a BayesNet.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
Exception : fatal (unknown ?) error.
const NodeSet & softEvidenceNodes() const
returns the set of nodes with soft evidence
virtual void prepareInference() final
prepare the internal inference structures for the next inference
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
const NodeProperty< Idx > & hardEvidence() const
indicate for each node with hard evidence which value it took
Virtual base class for probabilistic graphical models.
Class representing the minimal interface for Bayesian network with no numerical data.
Definition IBayesNet.h:75
Class for assigning/browsing values to tuples of discrete variables.
Instantiation & chgVal(const DiscreteVariable &v, Idx newval)
Assign newval to variable v in the Instantiation.
void add(const DiscreteVariable &v) final
Adds a new variable in the Instantiation.
<agrum/BN/inference/loopyBeliefPropagation.h>
virtual const NodeSet & targets() const noexcept final
returns the list of marginal targets
void onEvidenceAdded_(const NodeId id, bool isHardEvidence) override
fired after a new evidence is inserted
const Tensor< GUM_SCALAR > & posterior_(NodeId id) override
Computes and returns the posterior of a node.
const IBayesNet< GUM_SCALAR > & samplingBN()
get the BayesNet which is used to really perform the sampling
const Tensor< GUM_SCALAR > & currentPosterior(NodeId id)
Computes and returns the actual estimation of the posterior of a node.
bool isContextualized
whether the referenced Bayesian network has been "contextualized"
void onEvidenceChanged_(const NodeId id, bool hasChangedSoftHard) override
fired after an evidence is changed, in particular when its status (soft/hard) changes
void onAllMarginalTargetsErased_() override
fired before a all marginal targets are removed
void onEvidenceErased_(const NodeId id, bool isHardEvidence) override
fired before an evidence is removed
void onStateChanged_() override
fired when the stage is changed
void onMarginalTargetAdded_(const NodeId id) override
fired after a new marginal target is inserted
virtual void addVarSample_(NodeId nod, Instantiation *I)
adds a node to current instantiation
SamplingInference(const IBayesNet< GUM_SCALAR > *bn)
default constructor
void onAllEvidenceErased_(bool contains_hard_evidence) override
fired before all the evidence are erased
void updateOutdatedStructure_() override
prepares inference when the latter is in OutdatedStructure state
virtual void setEstimatorFromLBP_(LoopyBeliefPropagation< GUM_SCALAR > *lbp, GUM_SCALAR virtualLBPSize)
Initializes the estimators object linked to the simulation.
virtual void contextualize()
Simplifying the Bayesian network with relevance reasonning to lighten the computational charge.
void onAllMarginalTargetsAdded_() override
fired after all the nodes of the BN are added as marginal targets
virtual Instantiation draw_(GUM_SCALAR *w, Instantiation prev)=0
draws a sample in the Bayesian network given a previous one
virtual void setEstimatorFromBN_()
Initializes the estimators object linked to the simulation.
virtual Instantiation burnIn_()=0
draws samples without updating the estimators
Estimator< GUM_SCALAR > _estimator_
Estimator object designed to approximate target posteriors.
void updateOutdatedTensors_() override
prepares inference when the latter is in OutdatedTensors state
virtual void onContextualize_(BayesNetFragment< GUM_SCALAR > *bn)
fired when Bayesian network is contextualized
~SamplingInference() override
destructor
void onMarginalTargetErased_(const NodeId id) override
fired before a marginal target is removed
void makeInference_() override
makes the inference by generating samples
void onModelChanged_(const GraphicalModel *bn) override
fired after a new Bayes net has been assigned to the engine
bool isSetEstimator
whether the Estimator object has been initialized
BayesNetFragment< GUM_SCALAR > * _samplingBN_
the d-separation algorithm as described in Koller & Friedman (2009)
void requisiteNodes(const DAG &dag, const NodeSet &query, const NodeSet &hardEvidence, const NodeSet &softEvidence, NodeSet &requisite) const
Fill the 'requisite' nodeset with the requisite nodes in dag given a query and evidence.
d-separation analysis (as described in Koller & Friedman 2009)
#define GUM_ERROR(type, msg)
Definition exceptions.h:72
Size NodeId
Type for node ids.
Set< NodeId > NodeSet
Some typdefs and define for shortcuts ...
gum is the global namespace for all aGrUM entities
Definition agrum.h:46
This file contains general methods for simulation-oriented approximate inference.
#define DEFAULT_VERBOSITY
#define DEFAULT_MIN_EPSILON_RATE
#define DEFAULT_EPSILON
#define DEFAULT_TIMEOUT
#define DEFAULT_PERIOD_SIZE
#define DEFAULT_MAXITER