57#define DEFAULT_MAXITER 10000000
58#define DEFAULT_PERIOD_SIZE 100
59#define DEFAULT_VERBOSITY false
60#define DEFAULT_TIMEOUT 6000
61#define DEFAULT_EPSILON 1e-2
62#define DEFAULT_MIN_EPSILON_RATE 1e-5
66 template <
typename GUM_SCALAR >
78 template <
typename GUM_SCALAR >
88 template <
typename GUM_SCALAR >
95 template <
typename GUM_SCALAR >
101 template <
typename GUM_SCALAR >
104 GUM_SCALAR virtualLBPSize) {
109 template <
typename GUM_SCALAR >
114 template <
typename GUM_SCALAR >
115 const Tensor< GUM_SCALAR >&
120 template <
typename GUM_SCALAR >
125 template <
typename GUM_SCALAR >
136 for (
const auto elmt: this->
BN().dag().asNodeSet() - barren)
144 this->
BN().nodes().asNodeSet(),
150 auto nonRequisite = this->
BN().dag().asNodeSet() - requisite;
152 for (
const auto elmt: nonRequisite)
156 I.
add(this->
BN().variable(hard));
159 for (
const auto& child: this->
BN().children(hard)) {
160 _samplingBN_->installCPT(child, this->
BN().cpt(child).extract(I));
168 template <
typename GUM_SCALAR >
174 template <
typename GUM_SCALAR >
186 Ip = this->
draw_(&w, Ip);
194 template <
typename GUM_SCALAR >
202 template <
typename GUM_SCALAR >
205 template <
typename GUM_SCALAR >
207 if (!isHardEvidence) {
212 template <
typename GUM_SCALAR >
215 template <
typename GUM_SCALAR >
218 template <
typename GUM_SCALAR >
220 bool hasChangedSoftHard) {
221 if (hasChangedSoftHard) {
226 template <
typename GUM_SCALAR >
229 template <
typename GUM_SCALAR >
232 template <
typename GUM_SCALAR >
235 template <
typename GUM_SCALAR >
238 template <
typename GUM_SCALAR >
241 template <
typename GUM_SCALAR >
244 template <
typename GUM_SCALAR >
247 template <
typename GUM_SCALAR >
Class representing Fragment of Bayesian networks.
Detect barren nodes for inference in Bayesian networks.
ApproximateInference(const IBayesNet< GUM_SCALAR > *bn)
void updateApproximationScheme(unsigned int incr=1)
Update the scheme w.r.t the new error and increment steps.
void setMaxIter(Size max) override
Stopping criterion on number of iterations.
void setMaxTime(double timeout) override
Stopping criterion on timeout.
void setMinEpsilonRate(double rate) override
Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).
void setPeriodSize(Size p) override
How many samples between two stopping is enable.
void initApproximationScheme()
Initialise the scheme.
bool continueApproximationScheme(double error)
Update the scheme w.r.t the new error.
void setVerbosity(bool v) override
Set the verbosity on (true) or off (false).
void setEpsilon(double eps) override
Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.
Detect barren nodes for inference in Bayesian networks.
void setEvidence(const NodeSet *observed_nodes)
sets the observed nodes in the DAG
void setTargets(const NodeSet *target_nodes)
sets the set of target nodes we are interested in
NodeSet barrenNodes()
returns the set of barren nodes
Portion of a BN identified by the list of nodes and a BayesNet.
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
Exception : fatal (unknown ?) error.
const NodeSet & softEvidenceNodes() const
returns the set of nodes with soft evidence
virtual void prepareInference() final
prepare the internal inference structures for the next inference
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
const NodeProperty< Idx > & hardEvidence() const
indicate for each node with hard evidence which value it took
Virtual base class for probabilistic graphical models.
Class representing the minimal interface for Bayesian network with no numerical data.
Class for assigning/browsing values to tuples of discrete variables.
Instantiation & chgVal(const DiscreteVariable &v, Idx newval)
Assign newval to variable v in the Instantiation.
void add(const DiscreteVariable &v) final
Adds a new variable in the Instantiation.
<agrum/BN/inference/loopyBeliefPropagation.h>
virtual const NodeSet & targets() const noexcept final
returns the list of marginal targets
void onEvidenceAdded_(const NodeId id, bool isHardEvidence) override
fired after a new evidence is inserted
const Tensor< GUM_SCALAR > & posterior_(NodeId id) override
Computes and returns the posterior of a node.
const IBayesNet< GUM_SCALAR > & samplingBN()
get the BayesNet which is used to really perform the sampling
const Tensor< GUM_SCALAR > & currentPosterior(NodeId id)
Computes and returns the actual estimation of the posterior of a node.
bool isContextualized
whether the referenced Bayesian network has been "contextualized"
void onEvidenceChanged_(const NodeId id, bool hasChangedSoftHard) override
fired after an evidence is changed, in particular when its status (soft/hard) changes
void onAllMarginalTargetsErased_() override
fired before a all marginal targets are removed
void onEvidenceErased_(const NodeId id, bool isHardEvidence) override
fired before an evidence is removed
void onStateChanged_() override
fired when the stage is changed
void onMarginalTargetAdded_(const NodeId id) override
fired after a new marginal target is inserted
virtual void addVarSample_(NodeId nod, Instantiation *I)
adds a node to current instantiation
SamplingInference(const IBayesNet< GUM_SCALAR > *bn)
default constructor
void onAllEvidenceErased_(bool contains_hard_evidence) override
fired before all the evidence are erased
void updateOutdatedStructure_() override
prepares inference when the latter is in OutdatedStructure state
virtual void setEstimatorFromLBP_(LoopyBeliefPropagation< GUM_SCALAR > *lbp, GUM_SCALAR virtualLBPSize)
Initializes the estimators object linked to the simulation.
virtual void contextualize()
Simplifying the Bayesian network with relevance reasonning to lighten the computational charge.
void onAllMarginalTargetsAdded_() override
fired after all the nodes of the BN are added as marginal targets
virtual Instantiation draw_(GUM_SCALAR *w, Instantiation prev)=0
draws a sample in the Bayesian network given a previous one
virtual void setEstimatorFromBN_()
Initializes the estimators object linked to the simulation.
virtual Instantiation burnIn_()=0
draws samples without updating the estimators
Estimator< GUM_SCALAR > _estimator_
Estimator object designed to approximate target posteriors.
void updateOutdatedTensors_() override
prepares inference when the latter is in OutdatedTensors state
void loopApproxInference_()
virtual void onContextualize_(BayesNetFragment< GUM_SCALAR > *bn)
fired when Bayesian network is contextualized
~SamplingInference() override
destructor
void onMarginalTargetErased_(const NodeId id) override
fired before a marginal target is removed
void makeInference_() override
makes the inference by generating samples
void onModelChanged_(const GraphicalModel *bn) override
fired after a new Bayes net has been assigned to the engine
bool isSetEstimator
whether the Estimator object has been initialized
BayesNetFragment< GUM_SCALAR > * _samplingBN_
the d-separation algorithm as described in Koller & Friedman (2009)
void requisiteNodes(const DAG &dag, const NodeSet &query, const NodeSet &hardEvidence, const NodeSet &softEvidence, NodeSet &requisite) const
Fill the 'requisite' nodeset with the requisite nodes in dag given a query and evidence.
d-separation analysis (as described in Koller & Friedman 2009)
#define GUM_ERROR(type, msg)
Size NodeId
Type for node ids.
Set< NodeId > NodeSet
Some typdefs and define for shortcuts ...
gum is the global namespace for all aGrUM entities
This file contains general methods for simulation-oriented approximate inference.
#define DEFAULT_VERBOSITY
#define DEFAULT_MIN_EPSILON_RATE
#define DEFAULT_PERIOD_SIZE