aGrUM 2.3.2
a C++ library for (probabilistic) graphical models
gum::SamplingInference< GUM_SCALAR > Class Template Referenceabstract

<agrum/BN/inference/samplingInference.h> More...

#include <samplingInference.h>

Inheritance diagram for gum::SamplingInference< GUM_SCALAR >:
Collaboration diagram for gum::SamplingInference< GUM_SCALAR >:

Public Types

enum class  StateOfInference { OutdatedStructure , OutdatedTensors , ReadyForInference , Done }
 current state of the inference More...
enum class  ApproximationSchemeSTATE : char {
  Undefined , Continue , Epsilon , Rate ,
  Limit , TimeLimit , Stopped
}
 The different state of an approximation scheme. More...

Public Member Functions

virtual void contextualize ()
 Simplifying the Bayesian network with relevance reasonning to lighten the computational charge.
Tensor< GUM_SCALAR > evidenceImpact (NodeId target, const NodeSet &evs)
 Create a gum::Tensor for P(target|evs) (for all instanciation of target and evs).
Tensor< GUM_SCALAR > evidenceImpact (const std::string &target, const std::vector< std::string > &evs)
 Create a gum::Tensor for P(target|evs) (for all instanciation of target and evs).
Constructors / Destructors
 SamplingInference (const IBayesNet< GUM_SCALAR > *bn)
 default constructor
 ~SamplingInference () override
 destructor
const Tensor< GUM_SCALAR > & currentPosterior (NodeId id)
 Computes and returns the actual estimation of the posterior of a node.
const Tensor< GUM_SCALAR > & currentPosterior (const std::string &name)
 Computes and returns the actual estimation of the posterior of a node by its name.
Probability computations
const IBayesNet< GUM_SCALAR > & samplingBN ()
 get the BayesNet which is used to really perform the sampling
const Tensor< GUM_SCALAR > & posterior_ (NodeId id) override
 Computes and returns the posterior of a node.
Estimator objects initializing
virtual void setEstimatorFromBN_ ()
 Initializes the estimators object linked to the simulation.
virtual void setEstimatorFromLBP_ (LoopyBeliefPropagation< GUM_SCALAR > *lbp, GUM_SCALAR virtualLBPSize)
 Initializes the estimators object linked to the simulation.
Probability computations
virtual const Tensor< GUM_SCALAR > & posterior (NodeId node)
 Computes and returns the posterior of a node.
virtual const Tensor< GUM_SCALAR > & posterior (const std::string &nodeName)
 Computes and returns the posterior of a node.
Targets
virtual void eraseAllTargets ()
 Clear all previously defined targets.
virtual void addAllTargets () final
 adds all nodes as targets
virtual void addTarget (NodeId target) final
 Add a marginal target to the list of targets.
virtual void addTarget (const std::string &nodeName) final
 Add a marginal target to the list of targets.
virtual void eraseTarget (NodeId target) final
 removes an existing (marginal) target
virtual void eraseTarget (const std::string &nodeName) final
 removes an existing (marginal) target
virtual bool isTarget (NodeId node) const final
 return true if variable is a (marginal) target
virtual bool isTarget (const std::string &nodeName) const final
 return true if variable is a (marginal) target
virtual Size nbrTargets () const noexcept final
 returns the number of marginal targets
virtual const NodeSettargets () const noexcept final
 returns the list of marginal targets
virtual bool isInTargetMode () const noexcept final
 indicates whether the inference is in a target mode
Information Theory related functions
virtual GUM_SCALAR H (NodeId X) final
 Entropy Compute Shanon's entropy of a node given the observation.
virtual GUM_SCALAR H (const std::string &nodeName) final
 Entropy Compute Shanon's entropy of a node given the observation.
Accessors / Modifiers
virtual void setBN (const IBayesNet< GUM_SCALAR > *bn)
 assigns a new BN to the inference engine
virtual const IBayesNet< GUM_SCALAR > & BN () const final
 Returns a constant reference over the IBayesNet referenced by this class.
Accessors / Modifiers
virtual const GraphicalModelmodel () const final
 Returns a constant reference over the IBayesNet referenced by this class.
virtual const NodeProperty< Size > & domainSizes () const final
 get the domain sizes of the random variables of the model
virtual bool isInferenceReady () const noexcept final
 returns whether the inference object is in a ready state
virtual bool isInferenceOutdatedStructure () const noexcept final
 returns whether the inference object is in a OutdatedStructure state
virtual bool isInferenceOutdatedTensors () const noexcept final
 returns whether the inference object is in a OutdatedTensor state
virtual bool isInferenceDone () const noexcept final
 returns whether the inference object is in a InferenceDone state
virtual void prepareInference () final
 prepare the internal inference structures for the next inference
virtual void makeInference () final
 perform the heavy computations needed to compute the targets' posteriors
virtual void clear ()
 clears all the data structures allocated for the last inference
virtual StateOfInference state () const noexcept final
 returns the state of the inference engine
Evidence
virtual void addEvidence (NodeId id, const Idx val) final
 adds a new hard evidence on node id
virtual void addEvidence (const std::string &nodeName, const Idx val) final
 adds a new hard evidence on node named nodeName
virtual void addEvidence (NodeId id, const std::string &label) final
 adds a new hard evidence on node id
virtual void addEvidence (const std::string &nodeName, const std::string &label) final
 adds a new hard evidence on node named nodeName
virtual void addEvidence (NodeId id, const std::vector< GUM_SCALAR > &vals) final
 adds a new evidence on node id (might be soft or hard)
virtual void addEvidence (const std::string &nodeName, const std::vector< GUM_SCALAR > &vals) final
 adds a new evidence on node named nodeName (might be soft or hard)
virtual void addEvidence (const Tensor< GUM_SCALAR > &pot) final
 adds a new evidence on node id (might be soft or hard)
virtual void addEvidence (Tensor< GUM_SCALAR > &&pot) final
 adds a new evidence on node id (might be soft or hard)
virtual void addSetOfEvidence (const Set< const Tensor< GUM_SCALAR > * > &potset) final
 adds a new set of evidence
virtual void addListOfEvidence (const List< const Tensor< GUM_SCALAR > * > &potlist) final
 adds a new list of evidence
virtual void chgEvidence (NodeId id, const Idx val) final
 change the value of an already existing hard evidence
virtual void chgEvidence (const std::string &nodeName, const Idx val) final
 change the value of an already existing hard evidence
virtual void chgEvidence (NodeId id, const std::string &label) final
 change the value of an already existing hard evidence
virtual void chgEvidence (const std::string &nodeName, const std::string &label) final
 change the value of an already existing hard evidence
virtual void chgEvidence (NodeId id, const std::vector< GUM_SCALAR > &vals) final
 change the value of an already existing evidence (might be soft or hard)
virtual void chgEvidence (const std::string &nodeName, const std::vector< GUM_SCALAR > &vals) final
 change the value of an already existing evidence (might be soft or hard)
virtual void chgEvidence (const Tensor< GUM_SCALAR > &pot) final
 change the value of an already existing evidence (might be soft or hard)
virtual void eraseAllEvidence () final
 removes all the evidence entered into the network
virtual void eraseEvidence (NodeId id) final
 removed the evidence, if any, corresponding to node id
virtual void eraseEvidence (const std::string &nodeName) final
 removed the evidence, if any, corresponding to node of name nodeName
virtual bool hasEvidence () const final
 indicates whether some node(s) have received evidence
virtual bool hasEvidence (NodeId id) const final
 indicates whether node id has received an evidence
virtual bool hasEvidence (const std::string &nodeName) const final
 indicates whether node id has received an evidence
virtual bool hasHardEvidence (NodeId id) const final
 indicates whether node id has received a hard evidence
virtual bool hasHardEvidence (const std::string &nodeName) const final
 indicates whether node id has received a hard evidence
virtual bool hasSoftEvidence (NodeId id) const final
 indicates whether node id has received a soft evidence
virtual bool hasSoftEvidence (const std::string &nodeName) const final
 indicates whether node id has received a soft evidence
virtual Size nbrEvidence () const final
 returns the number of evidence entered into the Bayesian network
virtual Size nbrHardEvidence () const final
 returns the number of hard evidence entered into the Bayesian network
virtual Size nbrSoftEvidence () const final
 returns the number of soft evidence entered into the Bayesian network
const NodeProperty< const Tensor< GUM_SCALAR > * > & evidence () const
 returns the set of evidence
const NodeSetsoftEvidenceNodes () const
 returns the set of nodes with soft evidence
const NodeSethardEvidenceNodes () const
 returns the set of nodes with hard evidence
const NodeProperty< Idx > & hardEvidence () const
 indicate for each node with hard evidence which value it took
Getters and setters
void setEpsilon (double eps) override
 Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.
double epsilon () const override
 Returns the value of epsilon.
void disableEpsilon () override
 Disable stopping criterion on epsilon.
void enableEpsilon () override
 Enable stopping criterion on epsilon.
bool isEnabledEpsilon () const override
 Returns true if stopping criterion on epsilon is enabled, false otherwise.
void setMinEpsilonRate (double rate) override
 Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).
double minEpsilonRate () const override
 Returns the value of the minimal epsilon rate.
void disableMinEpsilonRate () override
 Disable stopping criterion on epsilon rate.
void enableMinEpsilonRate () override
 Enable stopping criterion on epsilon rate.
bool isEnabledMinEpsilonRate () const override
 Returns true if stopping criterion on epsilon rate is enabled, false otherwise.
void setMaxIter (Size max) override
 Stopping criterion on number of iterations.
Size maxIter () const override
 Returns the criterion on number of iterations.
void disableMaxIter () override
 Disable stopping criterion on max iterations.
void enableMaxIter () override
 Enable stopping criterion on max iterations.
bool isEnabledMaxIter () const override
 Returns true if stopping criterion on max iterations is enabled, false otherwise.
void setMaxTime (double timeout) override
 Stopping criterion on timeout.
double maxTime () const override
 Returns the timeout (in seconds).
double currentTime () const override
 Returns the current running time in second.
void disableMaxTime () override
 Disable stopping criterion on timeout.
void enableMaxTime () override
 Enable stopping criterion on timeout.
bool isEnabledMaxTime () const override
 Returns true if stopping criterion on timeout is enabled, false otherwise.
void setPeriodSize (Size p) override
 How many samples between two stopping is enable.
Size periodSize () const override
 Returns the period size.
void setVerbosity (bool v) override
 Set the verbosity on (true) or off (false).
bool verbosity () const override
 Returns true if verbosity is enabled.
ApproximationSchemeSTATE stateApproximationScheme () const override
 Returns the approximation scheme state.
Size nbrIterations () const override
 Returns the number of iterations.
const std::vector< double > & history () const override
 Returns the scheme history.
void initApproximationScheme ()
 Initialise the scheme.
bool startOfPeriod () const
 Returns true if we are at the beginning of a period (compute error is mandatory).
void updateApproximationScheme (unsigned int incr=1)
 Update the scheme w.r.t the new error and increment steps.
Size remainingBurnIn () const
 Returns the remaining burn in.
void stopApproximationScheme ()
 Stop the approximation scheme.
bool continueApproximationScheme (double error)
 Update the scheme w.r.t the new error.
Getters and setters
std::string messageApproximationScheme () const
 Returns the approximation scheme message.

Public Attributes

Signaler3< Size, double, doubleonProgress
 Progression, error and time.
Signaler1< const std::string & > onStop
 Criteria messageApproximationScheme.

Protected Member Functions

virtual Instantiation burnIn_ ()=0
 draws samples without updating the estimators
virtual Instantiation draw_ (GUM_SCALAR *w, Instantiation prev)=0
 draws a sample in the Bayesian network given a previous one
void makeInference_ () override
 makes the inference by generating samples
void loopApproxInference_ ()
virtual void addVarSample_ (NodeId nod, Instantiation *I)
 adds a node to current instantiation
virtual void onContextualize_ (BayesNetFragment< GUM_SCALAR > *bn)
 fired when Bayesian network is contextualized
void onEvidenceAdded_ (const NodeId id, bool isHardEvidence) override
 fired after a new evidence is inserted
void onEvidenceErased_ (const NodeId id, bool isHardEvidence) override
 fired before an evidence is removed
void onAllEvidenceErased_ (bool contains_hard_evidence) override
 fired before all the evidence are erased
void onEvidenceChanged_ (const NodeId id, bool hasChangedSoftHard) override
 fired after an evidence is changed, in particular when its status (soft/hard) changes
void onModelChanged_ (const GraphicalModel *bn) override
 fired after a new Bayes net has been assigned to the engine
void updateOutdatedStructure_ () override
 prepares inference when the latter is in OutdatedStructure state
void updateOutdatedTensors_ () override
 prepares inference when the latter is in OutdatedTensors state
void onMarginalTargetAdded_ (const NodeId id) override
 fired after a new marginal target is inserted
void onMarginalTargetErased_ (const NodeId id) override
 fired before a marginal target is removed
void onAllMarginalTargetsAdded_ () override
 fired after all the nodes of the BN are added as marginal targets
void onAllMarginalTargetsErased_ () override
 fired before a all marginal targets are removed
void onStateChanged_ () override
 fired when the stage is changed
void setTargetedMode_ ()
bool isTargetedMode_ () const
void setOutdatedStructureState_ ()
 put the inference into an outdated model structure state
void setOutdatedTensorsState_ ()
 puts the inference into an OutdatedTensors state if it is not already in an OutdatedStructure state
virtual void setState_ (const StateOfInference state) final
 set the state of the inference engine and call the notification onStateChanged_ when necessary (i.e. when the state has effectively changed).
void setModel_ (const GraphicalModel *model)
void setModelDuringConstruction_ (const GraphicalModel *model)
 assigns a model during the inference engine construction
bool hasNoModel_ () const

Protected Attributes

Estimator< GUM_SCALAR > _estimator_
 Estimator object designed to approximate target posteriors.
bool isSetEstimator = false
 whether the Estimator object has been initialized
bool isContextualized = false
 whether the referenced Bayesian network has been "contextualized"
double current_epsilon_
 Current epsilon.
double last_epsilon_
 Last epsilon value.
double current_rate_
 Current rate.
Size current_step_
 The current step.
Timer timer_
 The timer.
ApproximationSchemeSTATE current_state_
 The current state.
std::vector< doublehistory_
 The scheme history, used only if verbosity == true.
double eps_
 Threshold for convergence.
bool enabled_eps_
 If true, the threshold convergence is enabled.
double min_rate_eps_
 Threshold for the epsilon rate.
bool enabled_min_rate_eps_
 If true, the minimal threshold for epsilon rate is enabled.
double max_time_
 The timeout.
bool enabled_max_time_
 If true, the timeout is enabled.
Size max_iter_
 The maximum iterations.
bool enabled_max_iter_
 If true, the maximum iterations stopping criterion is enabled.
Size burn_in_
 Number of iterations before checking stopping criteria.
Size period_size_
 Checking criteria frequency.
bool verbosity_
 If true, verbosity is enabled.

Private Member Functions

void _setAllMarginalTargets_ ()
 sets all the nodes of the Bayes net as targets
void stopScheme_ (ApproximationSchemeSTATE new_state)
 Stop the scheme given a new state.

Private Attributes

BayesNetFragment< GUM_SCALAR > * _samplingBN_
bool _targeted_mode_
 whether the actual targets are default
NodeSet _targets_
 the set of marginal targets

Detailed Description

template<typename GUM_SCALAR>
class gum::SamplingInference< GUM_SCALAR >

<agrum/BN/inference/samplingInference.h>

A generic class for making sampling inference in Bayesian networks adapted

The goal of this class is to define the genereal scheme used by all sampling inference algorithms, which are implemented as derived classes of ApproximateInference. This class inherits from MarginalTargetedInference for the handling of marginal targets and from ApproximationScheme.

Definition at line 75 of file samplingInference.h.

Member Enumeration Documentation

◆ ApproximationSchemeSTATE

The different state of an approximation scheme.

Enumerator
Undefined 
Continue 
Epsilon 
Rate 
Limit 
TimeLimit 
Stopped 

Definition at line 86 of file IApproximationSchemeConfiguration.h.

86 : char {
87 Undefined,
88 Continue,
89 Epsilon,
90 Rate,
91 Limit,
92 TimeLimit,
93 Stopped
94 };

◆ StateOfInference

template<typename GUM_SCALAR>
enum class gum::GraphicalModelInference::StateOfInference
stronginherited

current state of the inference

graphicalModelInference can be in one of 4 different states:

  • OutdatedStructure: in this state, the inference is fully unprepared to be applied because some events changed the "logical" structure of the model: for instance a node received a hard evidence, which implies that its outgoing arcs can be removed from the model, hence involving a structural change in the model.
  • OutdatedTensors: in this state, the structure of the model remains unchanged, only some tensors stored in it have changed. Therefore, the inference probably just needs to invalidate some already computed tensors to be ready. Only a light amount of preparation is needed to be able to perform inference.
  • Ready4Inference: in this state, all the data structures are ready for inference. There just remains to perform the inference computations.
  • Done: the heavy computations of inference have been done. There might still remain a few light computations to perform to get the posterior tensors we need.
Enumerator
OutdatedStructure 
OutdatedTensors 
ReadyForInference 
Done 

Definition at line 127 of file graphicalModelInference.h.

127{ OutdatedStructure, OutdatedTensors, ReadyForInference, Done };

Constructor & Destructor Documentation

◆ SamplingInference()

template<typename GUM_SCALAR>
gum::SamplingInference< GUM_SCALAR >::SamplingInference ( const IBayesNet< GUM_SCALAR > * bn)
explicit

default constructor

Warning
By default, all the nodes of the Bayes net are targets.
note that, by aGrUM's rule, the BN is not copied but only referenced by the inference algorithm.

Definition at line 67 of file samplingInference_tpl.h.

67 :
76 }
ApproximateInference(const IBayesNet< GUM_SCALAR > *bn)
void setMaxIter(Size max) override
Stopping criterion on number of iterations.
void setMaxTime(double timeout) override
Stopping criterion on timeout.
void setMinEpsilonRate(double rate) override
Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).
void setPeriodSize(Size p) override
How many samples between two stopping is enable.
void setVerbosity(bool v) override
Set the verbosity on (true) or off (false).
void setEpsilon(double eps) override
Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.
<agrum/BN/inference/samplingInference.h>
SamplingInference(const IBayesNet< GUM_SCALAR > *bn)
default constructor
Estimator< GUM_SCALAR > _estimator_
Estimator object designed to approximate target posteriors.
BayesNetFragment< GUM_SCALAR > * _samplingBN_

References gum::ApproximateInference< GUM_SCALAR >::ApproximateInference(), SamplingInference(), _estimator_, _samplingBN_, DEFAULT_EPSILON, DEFAULT_MAXITER, DEFAULT_MIN_EPSILON_RATE, DEFAULT_PERIOD_SIZE, DEFAULT_TIMEOUT, DEFAULT_VERBOSITY, gum::ApproximationScheme::setEpsilon(), gum::ApproximationScheme::setMaxIter(), gum::ApproximationScheme::setMaxTime(), gum::ApproximationScheme::setMinEpsilonRate(), gum::ApproximationScheme::setPeriodSize(), and gum::ApproximationScheme::setVerbosity().

Referenced by gum::GibbsSampling< GUM_SCALAR >::GibbsSampling(), gum::ImportanceSampling< GUM_SCALAR >::ImportanceSampling(), gum::MonteCarloSampling< GUM_SCALAR >::MonteCarloSampling(), SamplingInference(), gum::WeightedSampling< GUM_SCALAR >::WeightedSampling(), and ~SamplingInference().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ ~SamplingInference()

template<typename GUM_SCALAR>
gum::SamplingInference< GUM_SCALAR >::~SamplingInference ( )
override

destructor

Definition at line 79 of file samplingInference_tpl.h.

79 {
81 if (_samplingBN_ != nullptr) {
82 if (isContextualized) { // otherwise _samplingBN_==&BN()
83 delete _samplingBN_;
84 }
85 }
86 }
bool isContextualized
whether the referenced Bayesian network has been "contextualized"

References SamplingInference(), _samplingBN_, and isContextualized.

Here is the call graph for this function:

Member Function Documentation

◆ _setAllMarginalTargets_()

template<typename GUM_SCALAR>
void gum::MarginalTargetedInference< GUM_SCALAR >::_setAllMarginalTargets_ ( )
privateinherited

sets all the nodes of the Bayes net as targets

Definition at line 229 of file marginalTargetedInference_tpl.h.

229 {
230 _targets_.clear();
231 if (!this->hasNoModel_()) {
232 _targets_ = this->BN().dag().asNodeSet();
234 }
235 }
virtual const IBayesNet< GUM_SCALAR > & BN() const final
Returns a constant reference over the IBayesNet referenced by this class.
NodeSet _targets_
the set of marginal targets
virtual void onAllMarginalTargetsAdded_()=0
fired after all the nodes of the BN are added as marginal targets

References _targets_, gum::BayesNetInference< GUM_SCALAR >::BN(), gum::GraphicalModelInference< GUM_SCALAR >::hasNoModel_(), and onAllMarginalTargetsAdded_().

Referenced by onModelChanged_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ addAllTargets()

template<typename GUM_SCALAR>
void gum::MarginalTargetedInference< GUM_SCALAR >::addAllTargets ( )
finalvirtualinherited

adds all nodes as targets

Definition at line 144 of file marginalTargetedInference_tpl.h.

144 {
145 // check if the node belongs to the Bayesian network
146 if (this->hasNoModel_())
148 "No Bayes net has been assigned to the "
149 "inference algorithm");
150
151
152 setTargetedMode_(); // does nothing if already in targeted mode
153 for (const auto target: this->BN().dag()) {
154 if (!_targets_.contains(target)) {
155 _targets_.insert(target);
158 }
159 }
160 }
virtual void setState_(const StateOfInference state) final
set the state of the inference engine and call the notification onStateChanged_ when necessary (i....
<agrum/BN/inference/marginalTargetedInference.h>
virtual void onMarginalTargetAdded_(const NodeId id)=0
fired after a new marginal target is inserted
#define GUM_ERROR(type, msg)
Definition exceptions.h:72

References _targets_, gum::BayesNetInference< GUM_SCALAR >::BN(), GUM_ERROR, gum::GraphicalModelInference< GUM_SCALAR >::hasNoModel_(), onMarginalTargetAdded_(), gum::GraphicalModelInference< GUM_SCALAR >::OutdatedStructure, gum::GraphicalModelInference< GUM_SCALAR >::setState_(), and setTargetedMode_().

Here is the call graph for this function:

◆ addEvidence() [1/8]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::addEvidence ( const std::string & nodeName,
const Idx val )
finalvirtualinherited

adds a new hard evidence on node named nodeName

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif nodeName already has an evidence

Definition at line 235 of file graphicalModelInference_tpl.h.

236 {
238 }
<agrum/base/graphicalModels/graphicalModel.h>
virtual void addEvidence(NodeId id, const Idx val) final
adds a new hard evidence on node id
virtual const GraphicalModel & model() const final
Returns a constant reference over the IBayesNet referenced by this class.

References addEvidence(), and model().

Here is the call graph for this function:

◆ addEvidence() [2/8]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::addEvidence ( const std::string & nodeName,
const std::string & label )
finalvirtualinherited

adds a new hard evidence on node named nodeName

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif nodeName already has an evidence

Definition at line 249 of file graphicalModelInference_tpl.h.

250 {
251 const NodeId id = this->model().idFromName(nodeName);
252 addEvidence(id, this->model().variable(id)[label]);
253 }
virtual NodeId idFromName(const std::string &name) const =0
Getter by name.

References addEvidence(), and model().

Here is the call graph for this function:

◆ addEvidence() [3/8]

template<typename GUM_SCALAR>
void gum::GraphicalModelInference< GUM_SCALAR >::addEvidence ( const std::string & nodeName,
const std::vector< GUM_SCALAR > & vals )
finalvirtualinherited

adds a new evidence on node named nodeName (might be soft or hard)

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif nodeName already has an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node nodeName

Definition at line 281 of file graphicalModelInference_tpl.h.

282 {
284 }

References addEvidence(), and model().

Here is the call graph for this function:

◆ addEvidence() [4/8]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::addEvidence ( const Tensor< GUM_SCALAR > & pot)
finalvirtualinherited

adds a new evidence on node id (might be soft or hard)

Exceptions
UndefinedElementif the tensor is defined over several nodes
UndefinedElementif the node on which the tensor is defined does not belong to the Bayesian network
InvalidArgumentif the node of the tensor already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 323 of file graphicalModelInference_tpl.h.

References addEvidence().

Here is the call graph for this function:

◆ addEvidence() [5/8]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::addEvidence ( NodeId id,
const Idx val )
finalvirtualinherited

adds a new hard evidence on node id

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id already has an evidence

Definition at line 229 of file graphicalModelInference_tpl.h.

229 {
231 }
Tensor< GUM_SCALAR > _createHardEvidence_(NodeId id, Idx val) const
create the internal structure for a hard evidence

References _createHardEvidence_(), and addEvidence().

Referenced by addEvidence(), addEvidence(), addEvidence(), addEvidence(), addEvidence(), addEvidence(), addEvidence(), addListOfEvidence(), addSetOfEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), gum::JointTargetedMRFInference< GUM_SCALAR >::evidenceJointImpact(), and gum::LoopySamplingInference< GUM_SCALAR, APPROX >::makeInference_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ addEvidence() [6/8]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::addEvidence ( NodeId id,
const std::string & label )
finalvirtualinherited

adds a new hard evidence on node id

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id already has an evidence

Definition at line 242 of file graphicalModelInference_tpl.h.

243 {
244 addEvidence(id, this->model().variable(id)[label]);
245 }

References addEvidence(), and model().

Here is the call graph for this function:

◆ addEvidence() [7/8]

template<typename GUM_SCALAR>
void gum::GraphicalModelInference< GUM_SCALAR >::addEvidence ( NodeId id,
const std::vector< GUM_SCALAR > & vals )
finalvirtualinherited

adds a new evidence on node id (might be soft or hard)

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif id already has an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node id

Definition at line 257 of file graphicalModelInference_tpl.h.

258 {
259 // checks that the evidence is meaningful
260 if (_model_ == nullptr)
262 "No Bayes net has been assigned to the "
263 "inference algorithm");
264
265 if (!_model_->exists(id)) { GUM_ERROR(UndefinedElement, id << " is not a NodeId in the model") }
266
267 if (_model_->variable(id).domainSize() != vals.size()) {
269 "node " << _model_->variable(id)
270 << " and its evidence vector have different sizes.");
271 }
272
274 pot.add(_model_->variable(id));
275 pot.fillWith(vals);
277 }
const GraphicalModel * _model_
the Bayes net on which we perform inferences

References _model_, addEvidence(), and GUM_ERROR.

Here is the call graph for this function:

◆ addEvidence() [8/8]

template<typename GUM_SCALAR>
void gum::GraphicalModelInference< GUM_SCALAR >::addEvidence ( Tensor< GUM_SCALAR > && pot)
finalvirtualinherited

adds a new evidence on node id (might be soft or hard)

Exceptions
UndefinedElementif the tensor is defined over several nodes
UndefinedElementif the node on which the tensor is defined does not belong to the Bayesian network
InvalidArgumentif the node of the tensor already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 288 of file graphicalModelInference_tpl.h.

288 {
289 // check if the tensor corresponds to an evidence
290 if (pot.nbrDim() != 1) { GUM_ERROR(InvalidArgument, pot << " is not mono-dimensional.") }
291 if (_model_ == nullptr)
293 "No Bayes net has been assigned to the "
294 "inference algorithm");
295
296 NodeId id = _model_->nodeId(pot.variable(0));
297
298 if (hasEvidence(id)) {
300 " node " << id << " already has an evidence. Please use chgEvidence().");
301 }
302
303 // check whether we have a hard evidence (and also check whether the
304 // tensor only contains 0 (in this case, this will automatically raise
305 // an exception) )
306 Idx val = 0;
308
309 // insert the evidence
311 if (is_hard_evidence) { // pot is deterministic
312 _hard_evidence_.insert(id, val);
313 _hard_evidence_nodes_.insert(id);
314 } else {
315 _soft_evidence_nodes_.insert(id);
316 }
319 }
NodeProperty< const Tensor< GUM_SCALAR > * > _evidence_
the set of evidence entered into the network
bool _isHardEvidence_(const Tensor< GUM_SCALAR > &pot, Idx &val) const
checks whether a tensor corresponds to a hard evidence or not
NodeSet _soft_evidence_nodes_
the set of nodes that received soft evidence
virtual bool hasEvidence() const final
indicates whether some node(s) have received evidence
virtual void onEvidenceAdded_(const NodeId id, bool isHardEvidence)=0
fired after a new evidence is inserted
NodeSet _hard_evidence_nodes_
the set of nodes that received hard evidence
NodeProperty< Idx > _hard_evidence_
assign to each node with a hard evidence the index of its observed value

References _evidence_, _hard_evidence_, _hard_evidence_nodes_, _isHardEvidence_(), _model_, _soft_evidence_nodes_, GUM_ERROR, hasEvidence(), onEvidenceAdded_(), OutdatedStructure, and setState_().

Here is the call graph for this function:

◆ addListOfEvidence()

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::addListOfEvidence ( const List< const Tensor< GUM_SCALAR > * > & potlist)
finalvirtualinherited

adds a new list of evidence

Exceptions
UndefinedElementif some tensor is defined over several nodes
UndefinedElementif the node on which some tensor is defined does not belong to the Bayesian network
InvalidArgumentif the node of some tensor already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 330 of file graphicalModelInference_tpl.h.

331 {
332 for (const auto pot: potlist)
334 }

References addEvidence().

Here is the call graph for this function:

◆ addSetOfEvidence()

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::addSetOfEvidence ( const Set< const Tensor< GUM_SCALAR > * > & potset)
finalvirtualinherited

adds a new set of evidence

Exceptions
UndefinedElementif some tensor is defined over several nodes
UndefinedElementif the node on which some tensor is defined does not belong to the Bayesian network
InvalidArgumentif the node of some tensor already has an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 338 of file graphicalModelInference_tpl.h.

339 {
340 for (const auto pot: potset)
342 }

References addEvidence().

Here is the call graph for this function:

◆ addTarget() [1/2]

template<typename GUM_SCALAR>
void gum::MarginalTargetedInference< GUM_SCALAR >::addTarget ( const std::string & nodeName)
finalvirtualinherited

Add a marginal target to the list of targets.

Exceptions
UndefinedElementif target is not a NodeId in the Bayes net

Definition at line 164 of file marginalTargetedInference_tpl.h.

164 {
165 // check if the node belongs to the Bayesian network
166 if (this->hasNoModel_())
168 "No Bayes net has been assigned to the "
169 "inference algorithm");
170
172 }
virtual void addTarget(NodeId target) final
Add a marginal target to the list of targets.

References addTarget(), gum::BayesNetInference< GUM_SCALAR >::BN(), GUM_ERROR, and gum::GraphicalModelInference< GUM_SCALAR >::hasNoModel_().

Here is the call graph for this function:

◆ addTarget() [2/2]

template<typename GUM_SCALAR>
void gum::MarginalTargetedInference< GUM_SCALAR >::addTarget ( NodeId target)
finalvirtualinherited

Add a marginal target to the list of targets.

Exceptions
UndefinedElementif target is not a NodeId in the Bayes net

Definition at line 122 of file marginalTargetedInference_tpl.h.

122 {
123 // check if the node belongs to the Bayesian network
124 if (this->hasNoModel_())
126 "No Bayes net has been assigned to the "
127 "inference algorithm");
128
129 if (!this->BN().dag().exists(target)) {
130 GUM_ERROR(UndefinedElement, target << " is not a NodeId in the bn")
131 }
132
133 setTargetedMode_(); // does nothing if already in targeted mode
134 // add the new target
135 if (!_targets_.contains(target)) {
136 _targets_.insert(target);
139 }
140 }

References _targets_, gum::BayesNetInference< GUM_SCALAR >::BN(), GUM_ERROR, gum::GraphicalModelInference< GUM_SCALAR >::hasNoModel_(), onMarginalTargetAdded_(), gum::GraphicalModelInference< GUM_SCALAR >::OutdatedStructure, gum::GraphicalModelInference< GUM_SCALAR >::setState_(), and setTargetedMode_().

Referenced by addTarget(), and evidenceImpact().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ addVarSample_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::addVarSample_ ( NodeId nod,
Instantiation * I )
protectedvirtual

adds a node to current instantiation

Parameters
nodthe node to add to the sample
Ithe current sample

generates random value based on the BN's CPT's and adds the node to the Instantiation with that value

Definition at line 195 of file samplingInference_tpl.h.

195 {
197
198 I->add(samplingBN().variable(nod));
200 }
const IBayesNet< GUM_SCALAR > & samplingBN()
get the BayesNet which is used to really perform the sampling

References gum::Instantiation::add(), gum::Instantiation::chgVal(), and samplingBN().

Referenced by gum::ImportanceSampling< GUM_SCALAR >::draw_(), gum::MonteCarloSampling< GUM_SCALAR >::draw_(), and gum::WeightedSampling< GUM_SCALAR >::draw_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ BN()

template<typename GUM_SCALAR>
INLINE const IBayesNet< GUM_SCALAR > & gum::BayesNetInference< GUM_SCALAR >::BN ( ) const
finalvirtualinherited

Returns a constant reference over the IBayesNet referenced by this class.

Exceptions
UndefinedElementis raised if no Bayes net has been assigned to the inference.

Definition at line 75 of file BayesNetInference_tpl.h.

75 {
76 return static_cast< const IBayesNet< GUM_SCALAR >& >(this->model());
77 }
<agrum/BN/inference/BayesNetInference.h>

References gum::GraphicalModelInference< GUM_SCALAR >::model().

Referenced by gum::LazyPropagation< GUM_SCALAR >::LazyPropagation(), gum::ShaferShenoyInference< GUM_SCALAR >::ShaferShenoyInference(), gum::VariableElimination< GUM_SCALAR >::VariableElimination(), gum::MarginalTargetedInference< GUM_SCALAR >::_setAllMarginalTargets_(), gum::MarginalTargetedInference< GUM_SCALAR >::addAllTargets(), gum::JointTargetedInference< GUM_SCALAR >::addJointTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), gum::SamplingInference< GUM_SCALAR >::contextualize(), gum::SamplingInference< GUM_SCALAR >::currentPosterior(), gum::SamplingInference< GUM_SCALAR >::currentPosterior(), gum::ImportanceSampling< GUM_SCALAR >::draw_(), gum::MonteCarloSampling< GUM_SCALAR >::draw_(), gum::WeightedSampling< GUM_SCALAR >::draw_(), gum::JointTargetedInference< GUM_SCALAR >::eraseJointTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), gum::MarginalTargetedInference< GUM_SCALAR >::H(), gum::JointTargetedInference< GUM_SCALAR >::isJointTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::isTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::isTarget(), gum::JointTargetedInference< GUM_SCALAR >::jointMutualInformation(), gum::JointTargetedInference< GUM_SCALAR >::jointMutualInformation(), gum::JointTargetedInference< GUM_SCALAR >::posterior(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), gum::SamplingInference< GUM_SCALAR >::posterior_(), gum::SamplingInference< GUM_SCALAR >::samplingBN(), and gum::Estimator< GUM_SCALAR >::setFromLBP().

Here is the call graph for this function:

◆ burnIn_()

template<typename GUM_SCALAR>
virtual Instantiation gum::SamplingInference< GUM_SCALAR >::burnIn_ ( )
protectedpure virtual

draws samples without updating the estimators

Implemented in gum::GibbsSampling< GUM_SCALAR >, gum::ImportanceSampling< GUM_SCALAR >, gum::MonteCarloSampling< GUM_SCALAR >, and gum::WeightedSampling< GUM_SCALAR >.

Referenced by loopApproxInference_().

Here is the caller graph for this function:

◆ chgEvidence() [1/7]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::chgEvidence ( const std::string & nodeName,
const Idx val )
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 397 of file graphicalModelInference_tpl.h.

398 {
400 }
virtual void chgEvidence(NodeId id, const Idx val) final
change the value of an already existing hard evidence

References chgEvidence(), and model().

Here is the call graph for this function:

◆ chgEvidence() [2/7]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::chgEvidence ( const std::string & nodeName,
const std::string & label )
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 411 of file graphicalModelInference_tpl.h.

412 {
413 NodeId id = this->model().idFromName(nodeName);
414 chgEvidence(id, this->model().variable(id)[label]);
415 }

References chgEvidence(), and model().

Here is the call graph for this function:

◆ chgEvidence() [3/7]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::chgEvidence ( const std::string & nodeName,
const std::vector< GUM_SCALAR > & vals )
finalvirtualinherited

change the value of an already existing evidence (might be soft or hard)

Exceptions
UndefinedElementif nodeName does not belong to the Bayesian network
InvalidArgumentif the node does not already have an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node id

Definition at line 445 of file graphicalModelInference_tpl.h.

446 {
448 }

References chgEvidence(), and model().

Here is the call graph for this function:

◆ chgEvidence() [4/7]

template<typename GUM_SCALAR>
void gum::GraphicalModelInference< GUM_SCALAR >::chgEvidence ( const Tensor< GUM_SCALAR > & pot)
finalvirtualinherited

change the value of an already existing evidence (might be soft or hard)

Exceptions
UndefinedElementif the tensor is defined over several nodes
UndefinedElementif the node on which the tensor is defined does not belong to the Bayesian network
InvalidArgumentif the node of the tensor does not already have an evidence
FatalErrorif pot=[0,0,...,0]

Definition at line 452 of file graphicalModelInference_tpl.h.

452 {
453 // check if the tensor corresponds to an evidence
454 if (pot.nbrDim() != 1) {
455 GUM_ERROR(InvalidArgument, pot << " is not a mono-dimensional tensor.")
456 }
457 if (_model_ == nullptr)
459 "No Bayes net has been assigned to the "
460 "inference algorithm");
461
462 NodeId id = _model_->nodeId(pot.variable(0));
463
464 if (!hasEvidence(id)) {
465 GUM_ERROR(InvalidArgument, id << " has no evidence. Please use addEvidence().")
466 }
467
468 // check whether we have a hard evidence (and also check whether the
469 // tensor only contains 0 (in this case, this will automatically raise
470 // an exception) )
471 Idx val;
473
474 // modify the evidence already stored
477 for (I.setFirst(); !I.end(); I.inc()) {
478 localPot->set(I, pot[I]);
479 }
480
481 // the inference state will be different
482 // whether evidence change from Hard to Soft or not.
483 bool hasChangedSoftHard = false;
484
485 if (is_hard_evidence) {
486 if (!hasHardEvidence(id)) {
487 hasChangedSoftHard = true;
488 _hard_evidence_.insert(id, val);
489 _hard_evidence_nodes_.insert(id);
490 _soft_evidence_nodes_.erase(id);
491 } else {
493 }
494 } else {
495 if (hasHardEvidence(id)) { // evidence was hard
496 _hard_evidence_.erase(id);
497 _hard_evidence_nodes_.erase(id);
498 _soft_evidence_nodes_.insert(id);
499 hasChangedSoftHard = true;
500 }
501 }
502
503 if (hasChangedSoftHard) {
505 } else {
507 }
508
510 }
virtual bool hasHardEvidence(NodeId id) const final
indicates whether node id has received a hard evidence
virtual bool isInferenceOutdatedStructure() const noexcept final
returns whether the inference object is in a OutdatedStructure state
virtual void onEvidenceChanged_(const NodeId id, bool hasChangedSoftHard)=0
fired after an evidence is changed, in particular when its status (soft/hard) changes

References _evidence_, _hard_evidence_, _hard_evidence_nodes_, _isHardEvidence_(), _model_, _soft_evidence_nodes_, gum::Instantiation::end(), GUM_ERROR, hasEvidence(), hasHardEvidence(), gum::Instantiation::inc(), isInferenceOutdatedStructure(), onEvidenceChanged_(), OutdatedStructure, OutdatedTensors, gum::Instantiation::setFirst(), and setState_().

Here is the call graph for this function:

◆ chgEvidence() [5/7]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::chgEvidence ( NodeId id,
const Idx val )
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 391 of file graphicalModelInference_tpl.h.

391 {
393 }

References _createHardEvidence_(), and chgEvidence().

Referenced by chgEvidence(), chgEvidence(), chgEvidence(), chgEvidence(), chgEvidence(), chgEvidence(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), and gum::JointTargetedMRFInference< GUM_SCALAR >::evidenceJointImpact().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ chgEvidence() [6/7]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::chgEvidence ( NodeId id,
const std::string & label )
finalvirtualinherited

change the value of an already existing hard evidence

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif val is not a value for id
InvalidArgumentif id does not already have an evidence

Definition at line 404 of file graphicalModelInference_tpl.h.

405 {
406 chgEvidence(id, this->model().variable(id)[label]);
407 }

References chgEvidence(), and model().

Here is the call graph for this function:

◆ chgEvidence() [7/7]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::chgEvidence ( NodeId id,
const std::vector< GUM_SCALAR > & vals )
finalvirtualinherited

change the value of an already existing evidence (might be soft or hard)

Exceptions
UndefinedElementif id does not belong to the Bayesian network
InvalidArgumentif the node does not already have an evidence
FatalErrorif vals=[0,0,...,0]
InvalidArgumentif the size of vals is different from the domain size of node id

Definition at line 420 of file graphicalModelInference_tpl.h.

421 {
422 // check whether this corresponds to an evidence
423 if (_model_ == nullptr)
425 "No Bayes net has been assigned to the "
426 "inference algorithm");
427
428 if (!_model_->exists(id)) { GUM_ERROR(UndefinedElement, id << " is not a NodeId in the model") }
429
430 if (_model_->variable(id).domainSize() != vals.size()) {
432 "node " << _model_->variable(id) << " and its evidence have different sizes.");
433 }
434
435 // create the tensor corresponding to vals
437 pot.add(_model_->variable(id));
438 pot.fillWith(vals);
440 }

References _model_, chgEvidence(), and GUM_ERROR.

Here is the call graph for this function:

◆ clear()

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::clear ( )
virtualinherited

clears all the data structures allocated for the last inference

Reimplemented in gum::ShaferShenoyLIMIDInference< GUM_SCALAR >.

Definition at line 155 of file graphicalModelInference_tpl.h.

155 {
158 }
virtual void eraseAllEvidence() final
removes all the evidence entered into the network

References eraseAllEvidence(), OutdatedStructure, and setState_().

Referenced by setModel_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ contextualize()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::contextualize ( )
virtual

Simplifying the Bayesian network with relevance reasonning to lighten the computational charge.

Sets the reference Bayesian network as a BayesNetFragment after having eliminated nodes that are idle for simulation and computation, such as barren or d-separated nodes. Eliminates the arcs from evidence nodes to it's children, after setting new CPT's for them.

Definition at line 126 of file samplingInference_tpl.h.

126 {
127 // Finding Barren nodes
128
130 barr_nodes.setTargets(&this->targets());
131 barr_nodes.setEvidence(&this->hardEvidenceNodes());
132 const NodeSet& barren = barr_nodes.barrenNodes();
133
134 // creating BN fragment
136 for (const auto elmt: this->BN().dag().asNodeSet() - barren)
137 _samplingBN_->installNode(elmt);
138
139 // D-separated nodes
140
143 dsep.requisiteNodes(this->BN().dag(),
144 this->BN().nodes().asNodeSet(), // no target for approximateInference
145 this->hardEvidenceNodes(),
146 this->softEvidenceNodes(), // should be empty
147 requisite);
148 requisite += this->hardEvidenceNodes();
149
150 auto nonRequisite = this->BN().dag().asNodeSet() - requisite;
151
152 for (const auto elmt: nonRequisite)
153 _samplingBN_->uninstallNode(elmt);
154 for (const auto hard: this->hardEvidenceNodes()) {
156 I.add(this->BN().variable(hard));
157 I.chgVal(this->BN().variable(hard), this->hardEvidence()[hard]);
158
159 for (const auto& child: this->BN().children(hard)) {
160 _samplingBN_->installCPT(child, this->BN().cpt(child).extract(I));
161 }
162 }
163
164 this->isContextualized = true;
166 }
const NodeSet & softEvidenceNodes() const
returns the set of nodes with soft evidence
const NodeSet & hardEvidenceNodes() const
returns the set of nodes with hard evidence
const NodeProperty< Idx > & hardEvidence() const
indicate for each node with hard evidence which value it took
virtual const NodeSet & targets() const noexcept final
returns the list of marginal targets
virtual void onContextualize_(BayesNetFragment< GUM_SCALAR > *bn)
fired when Bayesian network is contextualized

References _samplingBN_, gum::Instantiation::add(), gum::BarrenNodesFinder::barrenNodes(), gum::BayesNetInference< GUM_SCALAR >::BN(), gum::Instantiation::chgVal(), gum::GraphicalModelInference< GUM_SCALAR >::hardEvidence(), gum::GraphicalModelInference< GUM_SCALAR >::hardEvidenceNodes(), isContextualized, onContextualize_(), gum::dSeparationAlgorithm::requisiteNodes(), gum::BarrenNodesFinder::setEvidence(), gum::BarrenNodesFinder::setTargets(), gum::GraphicalModelInference< GUM_SCALAR >::softEvidenceNodes(), and gum::MarginalTargetedInference< GUM_SCALAR >::targets().

Referenced by loopApproxInference_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ continueApproximationScheme()

INLINE bool gum::ApproximationScheme::continueApproximationScheme ( double error)
inherited

Update the scheme w.r.t the new error.

Test the stopping criterion that are enabled.

Parameters
errorThe new error value.
Returns
false if state become != ApproximationSchemeSTATE::Continue
Exceptions
OperationNotAllowedRaised if state != ApproximationSchemeSTATE::Continue.

Definition at line 229 of file approximationScheme_inl.h.

229 {
230 // For coherence, we fix the time used in the method
231
232 double timer_step = timer_.step();
233
234 if (enabled_max_time_) {
235 if (timer_step > max_time_) {
237 return false;
238 }
239 }
240
241 if (!startOfPeriod()) { return true; }
242
244 GUM_ERROR(
245 OperationNotAllowed,
246 "state of the approximation scheme is not correct : " << messageApproximationScheme());
247 }
248
249 if (verbosity()) { history_.push_back(error); }
250
251 if (enabled_max_iter_) {
252 if (current_step_ > max_iter_) {
254 return false;
255 }
256 }
257
259 current_epsilon_ = error; // eps rate isEnabled needs it so affectation was
260 // moved from eps isEnabled below
261
262 if (enabled_eps_) {
263 if (current_epsilon_ <= eps_) {
265 return false;
266 }
267 }
268
269 if (last_epsilon_ >= 0.) {
270 if (current_epsilon_ > .0) {
271 // ! current_epsilon_ can be 0. AND epsilon
272 // isEnabled can be disabled !
274 }
275 // limit with current eps ---> 0 is | 1 - ( last_eps / 0 ) | --->
276 // infinity the else means a return false if we isEnabled the rate below,
277 // as we would have returned false if epsilon isEnabled was enabled
278 else {
280 }
281
285 return false;
286 }
287 }
288 }
289
291 if (onProgress.hasListener()) {
293 }
294
295 return true;
296 } else {
297 return false;
298 }
299 }
Size current_step_
The current step.
double current_epsilon_
Current epsilon.
double last_epsilon_
Last epsilon value.
double eps_
Threshold for convergence.
bool enabled_max_time_
If true, the timeout is enabled.
Size max_iter_
The maximum iterations.
bool enabled_eps_
If true, the threshold convergence is enabled.
ApproximationSchemeSTATE current_state_
The current state.
double min_rate_eps_
Threshold for the epsilon rate.
std::vector< double > history_
The scheme history, used only if verbosity == true.
double current_rate_
Current rate.
ApproximationSchemeSTATE stateApproximationScheme() const override
Returns the approximation scheme state.
bool startOfPeriod() const
Returns true if we are at the beginning of a period (compute error is mandatory).
bool enabled_max_iter_
If true, the maximum iterations stopping criterion is enabled.
void stopScheme_(ApproximationSchemeSTATE new_state)
Stop the scheme given a new state.
bool verbosity() const override
Returns true if verbosity is enabled.
bool enabled_min_rate_eps_
If true, the minimal threshold for epsilon rate is enabled.
std::string messageApproximationScheme() const
Returns the approximation scheme message.
Signaler3< Size, double, double > onProgress
Progression, error and time.
#define GUM_EMIT3(signal, arg1, arg2, arg3)
Definition signaler3.h:61

References enabled_max_time_, and timer_.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::computeKL_(), gum::learning::GreedyHillClimbing::learnStructure(), gum::learning::LocalSearchWithTabuList::learnStructure(), gum::SamplingInference< GUM_SCALAR >::loopApproxInference_(), gum::credal::CNLoopyPropagation< GUM_SCALAR >::makeInferenceByOrderedArcs_(), gum::credal::CNLoopyPropagation< GUM_SCALAR >::makeInferenceByRandomOrder_(), and gum::credal::CNLoopyPropagation< GUM_SCALAR >::makeInferenceNodeToNeighbours_().

Here is the caller graph for this function:

◆ currentPosterior() [1/2]

template<typename GUM_SCALAR>
const Tensor< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::currentPosterior ( const std::string & name)

Computes and returns the actual estimation of the posterior of a node by its name.

Returns
a const ref to the posterior probability of the node referred by name.
Parameters
namethe name of the node for which we need a posterior probability
Warning
for efficiency reasons, the tensor is returned by reference. In order to ensure that the tensor may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
Exceptions
UndefinedElementif node corresponding to name is not in the set of targets.
NotFoundif node corresponding to name is not in the BN.

Definition at line 116 of file samplingInference_tpl.h.

116 {
117 return currentPosterior(this->BN().idFromName(name));
118 }
const Tensor< GUM_SCALAR > & currentPosterior(NodeId id)
Computes and returns the actual estimation of the posterior of a node.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and currentPosterior().

Here is the call graph for this function:

◆ currentPosterior() [2/2]

template<typename GUM_SCALAR>
const Tensor< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::currentPosterior ( NodeId id)

Computes and returns the actual estimation of the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
idthe node for which we need a posterior probability
Warning
for efficiency reasons, the tensor is returned by reference. In order to ensure that the tensor may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
Exceptions
UndefinedElementif node is not in the set of targets.
NotFoundif node is not in the BN.

Definition at line 110 of file samplingInference_tpl.h.

110 {
111 return _estimator_.posterior(this->BN().variable(id));
112 }

References _estimator_, and gum::BayesNetInference< GUM_SCALAR >::BN().

Referenced by currentPosterior().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ currentTime()

INLINE double gum::ApproximationScheme::currentTime ( ) const
overridevirtualinherited

Returns the current running time in second.

Returns
Returns the current running time in second.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 136 of file approximationScheme_inl.h.

136{ return timer_.step(); }

References timer_.

◆ disableEpsilon()

INLINE void gum::ApproximationScheme::disableEpsilon ( )
overridevirtualinherited

Disable stopping criterion on epsilon.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 74 of file approximationScheme_inl.h.

74{ enabled_eps_ = false; }

References enabled_eps_.

Referenced by gum::learning::EMApproximationScheme::EMApproximationScheme(), and gum::learning::EMApproximationScheme::setMinEpsilonRate().

Here is the caller graph for this function:

◆ disableMaxIter()

INLINE void gum::ApproximationScheme::disableMaxIter ( )
overridevirtualinherited

Disable stopping criterion on max iterations.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 115 of file approximationScheme_inl.h.

115{ enabled_max_iter_ = false; }

References enabled_max_iter_.

Referenced by gum::learning::GreedyHillClimbing::GreedyHillClimbing().

Here is the caller graph for this function:

◆ disableMaxTime()

INLINE void gum::ApproximationScheme::disableMaxTime ( )
overridevirtualinherited

Disable stopping criterion on timeout.

Returns
Disable stopping criterion on timeout.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 139 of file approximationScheme_inl.h.

139{ enabled_max_time_ = false; }

References enabled_max_time_.

Referenced by gum::learning::GreedyHillClimbing::GreedyHillClimbing().

Here is the caller graph for this function:

◆ disableMinEpsilonRate()

INLINE void gum::ApproximationScheme::disableMinEpsilonRate ( )
overridevirtualinherited

Disable stopping criterion on epsilon rate.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 95 of file approximationScheme_inl.h.

95{ enabled_min_rate_eps_ = false; }

References enabled_min_rate_eps_.

Referenced by gum::learning::GreedyHillClimbing::GreedyHillClimbing(), gum::GibbsBNdistance< GUM_SCALAR >::computeKL_(), and gum::learning::EMApproximationScheme::setEpsilon().

Here is the caller graph for this function:

◆ domainSizes()

template<typename GUM_SCALAR>
INLINE const NodeProperty< Size > & gum::GraphicalModelInference< GUM_SCALAR >::domainSizes ( ) const
finalvirtualinherited

get the domain sizes of the random variables of the model

Definition at line 173 of file graphicalModelInference_tpl.h.

173 {
174 return _domain_sizes_;
175 }
NodeProperty< Size > _domain_sizes_
the domain sizes of the random variables

References _domain_sizes_.

◆ draw_()

template<typename GUM_SCALAR>
virtual Instantiation gum::SamplingInference< GUM_SCALAR >::draw_ ( GUM_SCALAR * w,
Instantiation prev )
protectedpure virtual

draws a sample in the Bayesian network given a previous one

Parameters
wthe weight of sample being generated
prevthe previous sample generated

Implemented in gum::GibbsSampling< GUM_SCALAR >, gum::ImportanceSampling< GUM_SCALAR >, gum::MonteCarloSampling< GUM_SCALAR >, and gum::WeightedSampling< GUM_SCALAR >.

Referenced by loopApproxInference_().

Here is the caller graph for this function:

◆ enableEpsilon()

INLINE void gum::ApproximationScheme::enableEpsilon ( )
overridevirtualinherited

Enable stopping criterion on epsilon.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 77 of file approximationScheme_inl.h.

77{ enabled_eps_ = true; }

References enabled_eps_.

◆ enableMaxIter()

INLINE void gum::ApproximationScheme::enableMaxIter ( )
overridevirtualinherited

Enable stopping criterion on max iterations.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 118 of file approximationScheme_inl.h.

118{ enabled_max_iter_ = true; }

References enabled_max_iter_.

◆ enableMaxTime()

INLINE void gum::ApproximationScheme::enableMaxTime ( )
overridevirtualinherited

Enable stopping criterion on timeout.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 142 of file approximationScheme_inl.h.

142{ enabled_max_time_ = true; }

References enabled_max_time_.

◆ enableMinEpsilonRate()

INLINE void gum::ApproximationScheme::enableMinEpsilonRate ( )
overridevirtualinherited

Enable stopping criterion on epsilon rate.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 98 of file approximationScheme_inl.h.

98{ enabled_min_rate_eps_ = true; }

References enabled_min_rate_eps_.

Referenced by gum::learning::EMApproximationScheme::EMApproximationScheme(), and gum::GibbsBNdistance< GUM_SCALAR >::computeKL_().

Here is the caller graph for this function:

◆ epsilon()

INLINE double gum::ApproximationScheme::epsilon ( ) const
overridevirtualinherited

Returns the value of epsilon.

Returns
Returns the value of epsilon.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 71 of file approximationScheme_inl.h.

71{ return eps_; }

References eps_.

Referenced by gum::ImportanceSampling< GUM_SCALAR >::onContextualize_(), and gum::ImportanceSampling< GUM_SCALAR >::unsharpenBN_().

Here is the caller graph for this function:

◆ eraseAllEvidence()

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::eraseAllEvidence ( )
finalvirtualinherited

removes all the evidence entered into the network

Definition at line 540 of file graphicalModelInference_tpl.h.

540 {
541 bool has_hard_evidence = !_hard_evidence_.empty();
543
544 for (const auto& pair: _evidence_) {
545 if (pair.second != nullptr) { delete (pair.second); }
546 }
547
548 _evidence_.clear();
549 _hard_evidence_.clear();
550 _hard_evidence_nodes_.clear();
551 _soft_evidence_nodes_.clear();
552
553 if (has_hard_evidence) {
555 } else {
557 }
558 }
virtual void onAllEvidenceErased_(bool contains_hard_evidence)=0
fired before all the evidence are erased

References _evidence_, _hard_evidence_, _hard_evidence_nodes_, _soft_evidence_nodes_, isInferenceOutdatedStructure(), onAllEvidenceErased_(), OutdatedStructure, OutdatedTensors, and setState_().

Referenced by clear(), gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), gum::JointTargetedMRFInference< GUM_SCALAR >::evidenceJointImpact(), gum::JointTargetedInference< GUM_SCALAR >::jointMutualInformation(), and gum::JointTargetedMRFInference< GUM_SCALAR >::jointMutualInformation().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ eraseAllTargets()

template<typename GUM_SCALAR>
INLINE void gum::MarginalTargetedInference< GUM_SCALAR >::eraseAllTargets ( )
virtualinherited

Clear all previously defined targets.

Reimplemented in gum::JointTargetedInference< GUM_SCALAR >.

Definition at line 111 of file marginalTargetedInference_tpl.h.

111 {
113
114 _targets_.clear();
115 setTargetedMode_(); // does nothing if already in targeted mode
116
118 }
virtual void onAllMarginalTargetsErased_()=0
fired before a all marginal targets are removed

References _targets_, onAllMarginalTargetsErased_(), gum::GraphicalModelInference< GUM_SCALAR >::OutdatedStructure, gum::GraphicalModelInference< GUM_SCALAR >::setState_(), and setTargetedMode_().

Referenced by gum::JointTargetedInference< GUM_SCALAR >::eraseAllMarginalTargets(), and evidenceImpact().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ eraseEvidence() [1/2]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::eraseEvidence ( const std::string & nodeName)
finalvirtualinherited

removed the evidence, if any, corresponding to node of name nodeName

Definition at line 534 of file graphicalModelInference_tpl.h.

534 {
536 }
virtual void eraseEvidence(NodeId id) final
removed the evidence, if any, corresponding to node id

References eraseEvidence(), and model().

Here is the call graph for this function:

◆ eraseEvidence() [2/2]

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::eraseEvidence ( NodeId id)
finalvirtualinherited

removed the evidence, if any, corresponding to node id

Definition at line 514 of file graphicalModelInference_tpl.h.

514 {
515 if (hasEvidence(id)) {
516 if (hasHardEvidence(id)) {
517 onEvidenceErased_(id, true);
518 _hard_evidence_.erase(id);
519 _hard_evidence_nodes_.erase(id);
521 } else {
522 onEvidenceErased_(id, false);
523 _soft_evidence_nodes_.erase(id);
525 }
526
527 delete (_evidence_[id]);
528 _evidence_.erase(id);
529 }
530 }
virtual void onEvidenceErased_(const NodeId id, bool isHardEvidence)=0
fired before an evidence is removed

References _evidence_, _hard_evidence_, _hard_evidence_nodes_, _soft_evidence_nodes_, hasEvidence(), hasHardEvidence(), isInferenceOutdatedStructure(), onEvidenceErased_(), OutdatedStructure, OutdatedTensors, and setState_().

Referenced by eraseEvidence().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ eraseTarget() [1/2]

template<typename GUM_SCALAR>
void gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget ( const std::string & nodeName)
finalvirtualinherited

removes an existing (marginal) target

Warning
If the target does not already exist, the method does nothing. In particular, it does not raise any exception.

Definition at line 199 of file marginalTargetedInference_tpl.h.

199 {
200 // check if the node belongs to the Bayesian network
201 if (this->hasNoModel_())
203 "No Bayes net has been assigned to the "
204 "inference algorithm");
205
207 }
virtual void eraseTarget(NodeId target) final
removes an existing (marginal) target

References gum::BayesNetInference< GUM_SCALAR >::BN(), eraseTarget(), GUM_ERROR, and gum::GraphicalModelInference< GUM_SCALAR >::hasNoModel_().

Here is the call graph for this function:

◆ eraseTarget() [2/2]

template<typename GUM_SCALAR>
void gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget ( NodeId target)
finalvirtualinherited

removes an existing (marginal) target

Warning
If the target does not already exist, the method does nothing. In particular, it does not raise any exception.

Definition at line 176 of file marginalTargetedInference_tpl.h.

176 {
177 // check if the node belongs to the Bayesian network
178 if (this->hasNoModel_())
180 "No Bayes net has been assigned to the "
181 "inference algorithm");
182
183 if (!this->BN().dag().exists(target)) {
184 GUM_ERROR(UndefinedElement, target << " is not a NodeId in the bn")
185 }
186
187
188 if (_targets_.contains(target)) {
189 _targeted_mode_ = true; // we do not use setTargetedMode_ because we do not
190 // want to clear the targets
192 _targets_.erase(target);
194 }
195 }
virtual void onMarginalTargetErased_(const NodeId id)=0
fired before a marginal target is removed
bool _targeted_mode_
whether the actual targets are default

References _targeted_mode_, _targets_, gum::BayesNetInference< GUM_SCALAR >::BN(), GUM_ERROR, gum::GraphicalModelInference< GUM_SCALAR >::hasNoModel_(), onMarginalTargetErased_(), gum::GraphicalModelInference< GUM_SCALAR >::OutdatedStructure, and gum::GraphicalModelInference< GUM_SCALAR >::setState_().

Referenced by eraseTarget().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ evidence()

template<typename GUM_SCALAR>
INLINE const NodeProperty< const Tensor< GUM_SCALAR > * > & gum::GraphicalModelInference< GUM_SCALAR >::evidence ( ) const
inherited

returns the set of evidence

Definition at line 587 of file graphicalModelInference_tpl.h.

587 {
588 return _evidence_;
589 }

References _evidence_.

Referenced by gum::ImportanceSampling< GUM_SCALAR >::onContextualize_(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), and gum::MarginalTargetedMRFInference< GUM_SCALAR >::posterior().

Here is the caller graph for this function:

◆ evidenceImpact() [1/2]

template<typename GUM_SCALAR>
Tensor< GUM_SCALAR > gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact ( const std::string & target,
const std::vector< std::string > & evs )
inherited

Create a gum::Tensor for P(target|evs) (for all instanciation of target and evs).

Warning
If some evs are d-separated, they are not included in the Tensor
Parameters
targetthe nodeId of the target variable
evsthe nodeId of the observed variable
Returns
a Tensor

Definition at line 319 of file marginalTargetedInference_tpl.h.

321 {
322 const auto& bn = this->BN();
323 return evidenceImpact(bn.idFromName(target), bn.nodeset(evs));
324 }
Tensor< GUM_SCALAR > evidenceImpact(NodeId target, const NodeSet &evs)
Create a gum::Tensor for P(target|evs) (for all instanciation of target and evs).

References gum::BayesNetInference< GUM_SCALAR >::BN(), and evidenceImpact().

Here is the call graph for this function:

◆ evidenceImpact() [2/2]

template<typename GUM_SCALAR>
Tensor< GUM_SCALAR > gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact ( NodeId target,
const NodeSet & evs )
inherited

Create a gum::Tensor for P(target|evs) (for all instanciation of target and evs).

Warning
If some evs are d-separated, they are not included in the Tensor
Parameters
bnthe BayesNet
targetthe nodeId of the targetted variable
evsthe vector of nodeId of the observed variables
Returns
a Tensor

Definition at line 280 of file marginalTargetedInference_tpl.h.

281 {
282 const auto& vtarget = this->BN().variable(target);
283
284 if (evs.contains(target)) {
286 "Target <" << vtarget.name() << "> (" << target << ") can not be in evs (" << evs
287 << ").");
288 }
289 auto condset = this->BN().minimalCondSet(target, evs);
290
292 this->eraseAllTargets();
293 this->eraseAllEvidence();
294 res.add(this->BN().variable(target));
295 this->addTarget(target);
296 for (const auto& n: condset) {
297 res.add(this->BN().variable(n));
298 this->addEvidence(n, 0);
299 }
300
302 for (inst.setFirst(); !inst.end(); inst.incNotVar(vtarget)) {
303 // inferring
304 for (const auto& n: condset)
305 this->chgEvidence(n, inst.val(this->BN().variable(n)));
306 this->makeInference();
307 // populate res
308 const auto& pot = this->posterior(target);
309 for (inst.setFirstVar(vtarget); !inst.end(); inst.incVar(vtarget)) {
310 res.set(inst, pot[inst]);
311 }
312 inst.setFirstVar(vtarget); // remove inst.end() flag
313 }
314
315 return res;
316 }
virtual void makeInference() final
perform the heavy computations needed to compute the targets' posteriors
virtual const Tensor< GUM_SCALAR > & posterior(NodeId node)
Computes and returns the posterior of a node.
virtual void eraseAllTargets()
Clear all previously defined targets.

References gum::GraphicalModelInference< GUM_SCALAR >::addEvidence(), addTarget(), gum::BayesNetInference< GUM_SCALAR >::BN(), gum::GraphicalModelInference< GUM_SCALAR >::chgEvidence(), gum::Set< Key >::contains(), gum::Instantiation::end(), gum::GraphicalModelInference< GUM_SCALAR >::eraseAllEvidence(), eraseAllTargets(), GUM_ERROR, gum::Instantiation::incNotVar(), gum::Instantiation::incVar(), gum::GraphicalModelInference< GUM_SCALAR >::makeInference(), posterior(), gum::Instantiation::setFirst(), gum::Instantiation::setFirstVar(), and gum::Instantiation::val().

Referenced by evidenceImpact().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ H() [1/2]

template<typename GUM_SCALAR>
INLINE GUM_SCALAR gum::MarginalTargetedInference< GUM_SCALAR >::H ( const std::string & nodeName)
finalvirtualinherited

Entropy Compute Shanon's entropy of a node given the observation.

See also
http://en.wikipedia.org/wiki/Information_entropy

Definition at line 275 of file marginalTargetedInference_tpl.h.

275 {
276 return H(this->BN().idFromName(nodeName));
277 }
virtual GUM_SCALAR H(NodeId X) final
Entropy Compute Shanon's entropy of a node given the observation.

References gum::BayesNetInference< GUM_SCALAR >::BN(), and H().

Here is the call graph for this function:

◆ H() [2/2]

template<typename GUM_SCALAR>
INLINE GUM_SCALAR gum::MarginalTargetedInference< GUM_SCALAR >::H ( NodeId X)
finalvirtualinherited

Entropy Compute Shanon's entropy of a node given the observation.

See also
http://en.wikipedia.org/wiki/Information_entropy

Definition at line 267 of file marginalTargetedInference_tpl.h.

267 {
268 return posterior(X).entropy();
269 }

References posterior().

Referenced by H().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ hardEvidence()

template<typename GUM_SCALAR>
INLINE const NodeProperty< Idx > & gum::GraphicalModelInference< GUM_SCALAR >::hardEvidence ( ) const
inherited

indicate for each node with hard evidence which value it took

Definition at line 580 of file graphicalModelInference_tpl.h.

580 {
581 return _hard_evidence_;
582 }

References _hard_evidence_.

Referenced by gum::GibbsSampling< GUM_SCALAR >::GibbsSampling(), gum::SamplingInference< GUM_SCALAR >::contextualize(), gum::ImportanceSampling< GUM_SCALAR >::draw_(), and gum::WeightedSampling< GUM_SCALAR >::draw_().

Here is the caller graph for this function:

◆ hardEvidenceNodes()

template<typename GUM_SCALAR>
INLINE const NodeSet & gum::GraphicalModelInference< GUM_SCALAR >::hardEvidenceNodes ( ) const
inherited

◆ hasEvidence() [1/3]

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::hasEvidence ( ) const
finalvirtualinherited

indicates whether some node(s) have received evidence

Definition at line 346 of file graphicalModelInference_tpl.h.

346 {
347 return !_evidence_.empty();
348 }

References _evidence_.

Referenced by addEvidence(), chgEvidence(), eraseEvidence(), and hasEvidence().

Here is the caller graph for this function:

◆ hasEvidence() [2/3]

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::hasEvidence ( const std::string & nodeName) const
finalvirtualinherited

indicates whether node id has received an evidence

Definition at line 371 of file graphicalModelInference_tpl.h.

371 {
372 return hasEvidence(this->model().idFromName(nodeName));
373 }

References hasEvidence(), and model().

Here is the call graph for this function:

◆ hasEvidence() [3/3]

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::hasEvidence ( NodeId id) const
finalvirtualinherited

indicates whether node id has received an evidence

Definition at line 352 of file graphicalModelInference_tpl.h.

352 {
353 return _evidence_.exists(id);
354 }

References _evidence_.

◆ hasHardEvidence() [1/2]

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::hasHardEvidence ( const std::string & nodeName) const
finalvirtualinherited

indicates whether node id has received a hard evidence

Definition at line 378 of file graphicalModelInference_tpl.h.

378 {
379 return hasHardEvidence(this->model().idFromName(nodeName));
380 }

References hasHardEvidence(), and model().

Here is the call graph for this function:

◆ hasHardEvidence() [2/2]

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::hasHardEvidence ( NodeId id) const
finalvirtualinherited

indicates whether node id has received a hard evidence

Definition at line 358 of file graphicalModelInference_tpl.h.

358 {
359 return _hard_evidence_nodes_.exists(id);
360 }

References _hard_evidence_nodes_.

Referenced by chgEvidence(), gum::ImportanceSampling< GUM_SCALAR >::draw_(), eraseEvidence(), hasHardEvidence(), and gum::JointTargetedMRFInference< GUM_SCALAR >::jointPosterior().

Here is the caller graph for this function:

◆ hasNoModel_()

template<typename GUM_SCALAR>
bool gum::GraphicalModelInference< GUM_SCALAR >::hasNoModel_ ( ) const
inlineprotectedinherited

Definition at line 542 of file graphicalModelInference.h.

542{ return _model_ == nullptr; };

References _model_.

Referenced by gum::EvidenceInference< GUM_SCALAR >::EvidenceInference(), gum::EvidenceMRFInference< GUM_SCALAR >::EvidenceMRFInference(), gum::JointTargetedInference< GUM_SCALAR >::JointTargetedInference(), gum::JointTargetedMRFInference< GUM_SCALAR >::JointTargetedMRFInference(), gum::MarginalTargetedInference< GUM_SCALAR >::MarginalTargetedInference(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::MarginalTargetedMRFInference(), _computeDomainSizes_(), gum::MarginalTargetedInference< GUM_SCALAR >::_setAllMarginalTargets_(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::_setAllMarginalTargets_(), gum::MarginalTargetedInference< GUM_SCALAR >::addAllTargets(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::addAllTargets(), gum::JointTargetedInference< GUM_SCALAR >::addJointTarget(), gum::JointTargetedMRFInference< GUM_SCALAR >::addJointTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::addTarget(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::addTarget(), gum::JointTargetedInference< GUM_SCALAR >::eraseJointTarget(), gum::JointTargetedMRFInference< GUM_SCALAR >::eraseJointTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::eraseTarget(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::eraseTarget(), gum::JointTargetedInference< GUM_SCALAR >::isJointTarget(), gum::JointTargetedMRFInference< GUM_SCALAR >::isJointTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::isTarget(), and gum::MarginalTargetedMRFInference< GUM_SCALAR >::isTarget().

Here is the caller graph for this function:

◆ hasSoftEvidence() [1/2]

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::hasSoftEvidence ( const std::string & nodeName) const
finalvirtualinherited

indicates whether node id has received a soft evidence

Definition at line 385 of file graphicalModelInference_tpl.h.

385 {
386 return hasSoftEvidence(this->model().idFromName(nodeName));
387 }
virtual bool hasSoftEvidence(NodeId id) const final
indicates whether node id has received a soft evidence

References hasSoftEvidence(), and model().

Here is the call graph for this function:

◆ hasSoftEvidence() [2/2]

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::hasSoftEvidence ( NodeId id) const
finalvirtualinherited

indicates whether node id has received a soft evidence

Definition at line 364 of file graphicalModelInference_tpl.h.

364 {
365 return _soft_evidence_nodes_.exists(id);
366 }

References _soft_evidence_nodes_.

Referenced by hasSoftEvidence().

Here is the caller graph for this function:

◆ history()

INLINE const std::vector< double > & gum::ApproximationScheme::history ( ) const
overridevirtualinherited

Returns the scheme history.

Returns
Returns the scheme history.
Exceptions
OperationNotAllowedRaised if the scheme did not performed or if verbosity is set to false.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 178 of file approximationScheme_inl.h.

178 {
180 GUM_ERROR(OperationNotAllowed, "state of the approximation scheme is udefined")
181 }
182
183 if (!verbosity()) GUM_ERROR(OperationNotAllowed, "No history when verbosity=false")
184
185 return history_;
186 }

References GUM_ERROR, stateApproximationScheme(), and gum::IApproximationSchemeConfiguration::Undefined.

Here is the call graph for this function:

◆ initApproximationScheme()

INLINE void gum::ApproximationScheme::initApproximationScheme ( )
inherited

Initialise the scheme.

Definition at line 189 of file approximationScheme_inl.h.

189 {
191 current_step_ = 0;
193 history_.clear();
194 timer_.reset();
195 }

References ApproximationScheme(), gum::IApproximationSchemeConfiguration::Continue, current_epsilon_, current_rate_, current_state_, current_step_, and initApproximationScheme().

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::computeKL_(), initApproximationScheme(), gum::learning::GreedyHillClimbing::learnStructure(), gum::learning::LocalSearchWithTabuList::learnStructure(), gum::SamplingInference< GUM_SCALAR >::loopApproxInference_(), gum::credal::CNLoopyPropagation< GUM_SCALAR >::makeInference(), and gum::SamplingInference< GUM_SCALAR >::onStateChanged_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ isEnabledEpsilon()

INLINE bool gum::ApproximationScheme::isEnabledEpsilon ( ) const
overridevirtualinherited

Returns true if stopping criterion on epsilon is enabled, false otherwise.

Returns
Returns true if stopping criterion on epsilon is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 81 of file approximationScheme_inl.h.

81{ return enabled_eps_; }

References enabled_eps_.

◆ isEnabledMaxIter()

INLINE bool gum::ApproximationScheme::isEnabledMaxIter ( ) const
overridevirtualinherited

Returns true if stopping criterion on max iterations is enabled, false otherwise.

Returns
Returns true if stopping criterion on max iterations is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 122 of file approximationScheme_inl.h.

122{ return enabled_max_iter_; }

References enabled_max_iter_.

◆ isEnabledMaxTime()

INLINE bool gum::ApproximationScheme::isEnabledMaxTime ( ) const
overridevirtualinherited

Returns true if stopping criterion on timeout is enabled, false otherwise.

Returns
Returns true if stopping criterion on timeout is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 146 of file approximationScheme_inl.h.

146{ return enabled_max_time_; }

References enabled_max_time_.

◆ isEnabledMinEpsilonRate()

INLINE bool gum::ApproximationScheme::isEnabledMinEpsilonRate ( ) const
overridevirtualinherited

Returns true if stopping criterion on epsilon rate is enabled, false otherwise.

Returns
Returns true if stopping criterion on epsilon rate is enabled, false otherwise.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 102 of file approximationScheme_inl.h.

102{ return enabled_min_rate_eps_; }

References enabled_min_rate_eps_.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::computeKL_().

Here is the caller graph for this function:

◆ isInferenceDone()

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::isInferenceDone ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a InferenceDone state

The inference object is in a done state when the posteriors can be retrieved without performing a new inference, i.e., all the heavy computations have already been performed. Typically, in a junction tree algorithm, this corresponds to a situation in which all the messages needed in the JT have been computed and sent.

Definition at line 104 of file graphicalModelInference_tpl.h.

104 {
106 }
StateOfInference _state_
the current state of the inference (outdated/ready/done)

References _state_, and Done.

Referenced by gum::JointTargetedInference< GUM_SCALAR >::jointPosterior(), gum::JointTargetedMRFInference< GUM_SCALAR >::jointPosterior(), makeInference(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::posterior(), and prepareInference().

Here is the caller graph for this function:

◆ isInferenceOutdatedStructure()

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::isInferenceOutdatedStructure ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a OutdatedStructure state

Definition at line 92 of file graphicalModelInference_tpl.h.

92 {
94 }

References _state_, and OutdatedStructure.

Referenced by chgEvidence(), eraseAllEvidence(), and eraseEvidence().

Here is the caller graph for this function:

◆ isInferenceOutdatedTensors()

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::isInferenceOutdatedTensors ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a OutdatedTensor state

Definition at line 98 of file graphicalModelInference_tpl.h.

98 {
100 }

References _state_, and OutdatedTensors.

◆ isInferenceReady()

template<typename GUM_SCALAR>
INLINE bool gum::GraphicalModelInference< GUM_SCALAR >::isInferenceReady ( ) const
finalvirtualnoexceptinherited

returns whether the inference object is in a ready state

Definition at line 86 of file graphicalModelInference_tpl.h.

References _state_, and ReadyForInference.

Referenced by makeInference(), gum::SamplingInference< GUM_SCALAR >::onStateChanged_(), and prepareInference().

Here is the caller graph for this function:

◆ isInTargetMode()

template<typename GUM_SCALAR>
INLINE bool gum::MarginalTargetedInference< GUM_SCALAR >::isInTargetMode ( ) const
finalvirtualnoexceptinherited

indicates whether the inference is in a target mode

Definition at line 223 of file marginalTargetedInference_tpl.h.

223 {
224 return _targeted_mode_;
225 }

References _targeted_mode_.

◆ isTarget() [1/2]

template<typename GUM_SCALAR>
INLINE bool gum::MarginalTargetedInference< GUM_SCALAR >::isTarget ( const std::string & nodeName) const
finalvirtualinherited

return true if variable is a (marginal) target

Definition at line 105 of file marginalTargetedInference_tpl.h.

105 {
106 return isTarget(this->BN().idFromName(nodeName));
107 }
virtual bool isTarget(NodeId node) const final
return true if variable is a (marginal) target

References gum::BayesNetInference< GUM_SCALAR >::BN(), and isTarget().

Here is the call graph for this function:

◆ isTarget() [2/2]

template<typename GUM_SCALAR>
INLINE bool gum::MarginalTargetedInference< GUM_SCALAR >::isTarget ( NodeId node) const
finalvirtualinherited

return true if variable is a (marginal) target

Definition at line 90 of file marginalTargetedInference_tpl.h.

90 {
91 // check that the variable belongs to the bn
92 if (this->hasNoModel_())
94 "No Bayes net has been assigned to the "
95 "inference algorithm");
96 if (!this->BN().dag().exists(node)) {
97 GUM_ERROR(UndefinedElement, node << " is not a NodeId in the bn")
98 }
99
100 return _targets_.contains(node);
101 }

References _targets_, gum::BayesNetInference< GUM_SCALAR >::BN(), GUM_ERROR, and gum::GraphicalModelInference< GUM_SCALAR >::hasNoModel_().

Referenced by isTarget(), gum::JointTargetedInference< GUM_SCALAR >::posterior(), and posterior().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ isTargetedMode_()

template<typename GUM_SCALAR>
INLINE bool gum::MarginalTargetedInference< GUM_SCALAR >::isTargetedMode_ ( ) const
protectedinherited

Definition at line 327 of file marginalTargetedInference_tpl.h.

327 {
328 return _targeted_mode_;
329 }

References _targeted_mode_.

◆ loopApproxInference_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::loopApproxInference_ ( )
protected

Definition at line 175 of file samplingInference_tpl.h.

175 {
176 //@todo This should be in _prepareInference_
177 if (!isContextualized) { this->contextualize(); }
178
181 GUM_SCALAR w = .0; //
182
183 // Burn in
184 Ip = this->burnIn_();
185 do {
186 Ip = this->draw_(&w, Ip);
187 _estimator_.update(Ip, w);
189 } while (this->continueApproximationScheme(_estimator_.confidence()));
190
191 this->isSetEstimator = false;
192 }
void updateApproximationScheme(unsigned int incr=1)
Update the scheme w.r.t the new error and increment steps.
void initApproximationScheme()
Initialise the scheme.
bool continueApproximationScheme(double error)
Update the scheme w.r.t the new error.
virtual void contextualize()
Simplifying the Bayesian network with relevance reasonning to lighten the computational charge.
virtual Instantiation draw_(GUM_SCALAR *w, Instantiation prev)=0
draws a sample in the Bayesian network given a previous one
virtual Instantiation burnIn_()=0
draws samples without updating the estimators
bool isSetEstimator
whether the Estimator object has been initialized

References _estimator_, burnIn_(), contextualize(), gum::ApproximationScheme::continueApproximationScheme(), draw_(), gum::ApproximationScheme::initApproximationScheme(), isContextualized, isSetEstimator, and gum::ApproximationScheme::updateApproximationScheme().

Referenced by makeInference_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ makeInference()

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::makeInference ( )
finalvirtualinherited

perform the heavy computations needed to compute the targets' posteriors

In a Junction tree propagation scheme, for instance, the heavy computations are those of the messages sent in the JT. This is precisely what makeInference should compute. Later, the computations of the posteriors can be done "lightly" by multiplying and projecting those messages.

Definition at line 638 of file graphicalModelInference_tpl.h.

638 {
639 if (isInferenceDone()) { return; }
640
642
644
646 }
virtual void prepareInference() final
prepare the internal inference structures for the next inference
virtual bool isInferenceReady() const noexcept final
returns whether the inference object is in a ready state
virtual void makeInference_()=0
called when the inference has to be performed effectively
virtual bool isInferenceDone() const noexcept final
returns whether the inference object is in a InferenceDone state

References Done, isInferenceDone(), isInferenceReady(), makeInference_(), prepareInference(), and setState_().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::evidenceImpact(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::evidenceImpact(), gum::JointTargetedInference< GUM_SCALAR >::evidenceJointImpact(), gum::JointTargetedMRFInference< GUM_SCALAR >::evidenceJointImpact(), gum::JointTargetedInference< GUM_SCALAR >::jointMutualInformation(), gum::JointTargetedMRFInference< GUM_SCALAR >::jointMutualInformation(), gum::JointTargetedInference< GUM_SCALAR >::jointPosterior(), gum::JointTargetedMRFInference< GUM_SCALAR >::jointPosterior(), gum::LoopySamplingInference< GUM_SCALAR, APPROX >::makeInference_(), gum::MarginalTargetedInference< GUM_SCALAR >::posterior(), and gum::MarginalTargetedMRFInference< GUM_SCALAR >::posterior().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ makeInference_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::makeInference_ ( )
overrideprotectedvirtual

makes the inference by generating samples

Implements gum::GraphicalModelInference< GUM_SCALAR >.

Definition at line 169 of file samplingInference_tpl.h.

169 {
172 }
virtual void setEstimatorFromBN_()
Initializes the estimators object linked to the simulation.

References isSetEstimator, loopApproxInference_(), and setEstimatorFromBN_().

Here is the call graph for this function:

◆ maxIter()

INLINE Size gum::ApproximationScheme::maxIter ( ) const
overridevirtualinherited

Returns the criterion on number of iterations.

Returns
Returns the criterion on number of iterations.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 112 of file approximationScheme_inl.h.

112{ return max_iter_; }

References max_iter_.

◆ maxTime()

INLINE double gum::ApproximationScheme::maxTime ( ) const
overridevirtualinherited

Returns the timeout (in seconds).

Returns
Returns the timeout (in seconds).

Implements gum::IApproximationSchemeConfiguration.

Definition at line 133 of file approximationScheme_inl.h.

133{ return max_time_; }

References max_time_.

◆ messageApproximationScheme()

INLINE std::string gum::IApproximationSchemeConfiguration::messageApproximationScheme ( ) const
inherited

Returns the approximation scheme message.

Returns
Returns the approximation scheme message.

Definition at line 59 of file IApproximationSchemeConfiguration_inl.h.

59 {
60 std::stringstream s;
61
62 switch (stateApproximationScheme()) {
63 case ApproximationSchemeSTATE::Continue : s << "in progress"; break;
64
65 case ApproximationSchemeSTATE::Epsilon : s << "stopped with epsilon=" << epsilon(); break;
66
67 case ApproximationSchemeSTATE::Rate : s << "stopped with rate=" << minEpsilonRate(); break;
68
69 case ApproximationSchemeSTATE::Limit : s << "stopped with max iteration=" << maxIter(); break;
70
71 case ApproximationSchemeSTATE::TimeLimit : s << "stopped with timeout=" << maxTime(); break;
72
73 case ApproximationSchemeSTATE::Stopped : s << "stopped on request"; break;
74
75 case ApproximationSchemeSTATE::Undefined : s << "undefined state"; break;
76 };
77
78 return s.str();
79 }
virtual double epsilon() const =0
Returns the value of epsilon.
virtual ApproximationSchemeSTATE stateApproximationScheme() const =0
Returns the approximation scheme state.
virtual double minEpsilonRate() const =0
Returns the value of the minimal epsilon rate.
virtual Size maxIter() const =0
Returns the criterion on number of iterations.
virtual double maxTime() const =0
Returns the timeout (in seconds).

References Continue, Epsilon, epsilon(), Limit, maxIter(), maxTime(), minEpsilonRate(), Rate, stateApproximationScheme(), Stopped, TimeLimit, and Undefined.

Referenced by gum::credal::InferenceEngine< GUM_SCALAR >::getApproximationSchemeMsg(), and gum::credal::MultipleInferenceEngine< GUM_SCALAR, LazyPropagation< GUM_SCALAR > >::stateApproximationScheme().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ minEpsilonRate()

INLINE double gum::ApproximationScheme::minEpsilonRate ( ) const
overridevirtualinherited

Returns the value of the minimal epsilon rate.

Returns
Returns the value of the minimal epsilon rate.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 92 of file approximationScheme_inl.h.

92{ return min_rate_eps_; }

References min_rate_eps_.

◆ model()

template<typename GUM_SCALAR>
INLINE const GraphicalModel & gum::GraphicalModelInference< GUM_SCALAR >::model ( ) const
finalvirtualinherited

Returns a constant reference over the IBayesNet referenced by this class.

Exceptions
UndefinedElementis raised if no Bayes net has been assigned to the inference.

Definition at line 126 of file graphicalModelInference_tpl.h.

126 {
127 if (_model_ == nullptr)
129 "No Bayes net has been assigned to "
130 "the inference algorithm.");
131 return *_model_;
132 }

References _model_, and GUM_ERROR.

Referenced by GraphicalModelInference(), addEvidence(), addEvidence(), addEvidence(), addEvidence(), gum::BayesNetInference< GUM_SCALAR >::BN(), chgEvidence(), chgEvidence(), chgEvidence(), chgEvidence(), eraseEvidence(), hasEvidence(), hasHardEvidence(), hasSoftEvidence(), gum::MRFInference< GUM_SCALAR >::MRF(), onModelChanged_(), gum::ShaferShenoyLIMIDInference< GUM_SCALAR >::onModelChanged_(), setModel_(), and setModelDuringConstruction_().

Here is the caller graph for this function:

◆ nbrEvidence()

template<typename GUM_SCALAR>
INLINE Size gum::GraphicalModelInference< GUM_SCALAR >::nbrEvidence ( ) const
finalvirtualinherited

returns the number of evidence entered into the Bayesian network

Definition at line 562 of file graphicalModelInference_tpl.h.

562 {
563 return _evidence_.size();
564 }

References _evidence_.

◆ nbrHardEvidence()

template<typename GUM_SCALAR>
INLINE Size gum::GraphicalModelInference< GUM_SCALAR >::nbrHardEvidence ( ) const
finalvirtualinherited

returns the number of hard evidence entered into the Bayesian network

Definition at line 568 of file graphicalModelInference_tpl.h.

568 {
569 return _hard_evidence_nodes_.size();
570 }

References _hard_evidence_nodes_.

◆ nbrIterations()

INLINE Size gum::ApproximationScheme::nbrIterations ( ) const
overridevirtualinherited

Returns the number of iterations.

Returns
Returns the number of iterations.
Exceptions
OperationNotAllowedRaised if the scheme did not perform.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 169 of file approximationScheme_inl.h.

169 {
171 GUM_ERROR(OperationNotAllowed, "state of the approximation scheme is undefined")
172 }
173
174 return current_step_;
175 }

References current_step_, GUM_ERROR, stateApproximationScheme(), and gum::IApproximationSchemeConfiguration::Undefined.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::computeKL_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ nbrSoftEvidence()

template<typename GUM_SCALAR>
INLINE Size gum::GraphicalModelInference< GUM_SCALAR >::nbrSoftEvidence ( ) const
finalvirtualinherited

returns the number of soft evidence entered into the Bayesian network

Definition at line 574 of file graphicalModelInference_tpl.h.

574 {
575 return _soft_evidence_nodes_.size();
576 }

References _soft_evidence_nodes_.

◆ nbrTargets()

template<typename GUM_SCALAR>
INLINE Size gum::MarginalTargetedInference< GUM_SCALAR >::nbrTargets ( ) const
finalvirtualnoexceptinherited

returns the number of marginal targets

Definition at line 217 of file marginalTargetedInference_tpl.h.

217 {
218 return _targets_.size();
219 }

References _targets_.

◆ onAllEvidenceErased_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onAllEvidenceErased_ ( bool contains_hard_evidence)
overrideprotectedvirtual

fired before all the evidence are erased

Implements gum::GraphicalModelInference< GUM_SCALAR >.

Definition at line 216 of file samplingInference_tpl.h.

216{}

◆ onAllMarginalTargetsAdded_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onAllMarginalTargetsAdded_ ( )
overrideprotectedvirtual

fired after all the nodes of the BN are added as marginal targets

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 242 of file samplingInference_tpl.h.

242{}

◆ onAllMarginalTargetsErased_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onAllMarginalTargetsErased_ ( )
overrideprotectedvirtual

fired before a all marginal targets are removed

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 245 of file samplingInference_tpl.h.

245{}

◆ onContextualize_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onContextualize_ ( BayesNetFragment< GUM_SCALAR > * bn)
protectedvirtual

fired when Bayesian network is contextualized

Parameters
bnthe contextualized BayesNetFragment
targetsinference target variables
hardEvNodeshard evidence nodes
hardEvhard evidences values

Reimplemented in gum::ImportanceSampling< GUM_SCALAR >.

Definition at line 203 of file samplingInference_tpl.h.

203{}

Referenced by contextualize().

Here is the caller graph for this function:

◆ onEvidenceAdded_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onEvidenceAdded_ ( const NodeId id,
bool isHardEvidence )
overrideprotectedvirtual

fired after a new evidence is inserted

Implements gum::GraphicalModelInference< GUM_SCALAR >.

Definition at line 206 of file samplingInference_tpl.h.

206 {
207 if (!isHardEvidence) {
208 GUM_ERROR(FatalError, "Approximated inference only accept hard evidence")
209 }
210 }

References GUM_ERROR.

◆ onEvidenceChanged_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onEvidenceChanged_ ( const NodeId id,
bool hasChangedSoftHard )
overrideprotectedvirtual

fired after an evidence is changed, in particular when its status (soft/hard) changes

Parameters
nodeIdthe node of the changed evidence
hasChangedSoftHardtrue if the evidence has changed from Soft to Hard or from Hard to Soft

Implements gum::GraphicalModelInference< GUM_SCALAR >.

Definition at line 219 of file samplingInference_tpl.h.

220 {
221 if (hasChangedSoftHard) {
222 GUM_ERROR(FatalError, "Approximated inference only accept hard evidence")
223 }
224 }

References GUM_ERROR.

◆ onEvidenceErased_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onEvidenceErased_ ( const NodeId id,
bool isHardEvidence )
overrideprotectedvirtual

fired before an evidence is removed

Implements gum::GraphicalModelInference< GUM_SCALAR >.

Definition at line 213 of file samplingInference_tpl.h.

213{}

◆ onMarginalTargetAdded_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onMarginalTargetAdded_ ( const NodeId id)
overrideprotectedvirtual

fired after a new marginal target is inserted

Parameters
idThe target variable's id.

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 236 of file samplingInference_tpl.h.

236{}

◆ onMarginalTargetErased_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onMarginalTargetErased_ ( const NodeId id)
overrideprotectedvirtual

fired before a marginal target is removed

Parameters
idThe target variable's id.

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 239 of file samplingInference_tpl.h.

239{}

◆ onModelChanged_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onModelChanged_ ( const GraphicalModel * bn)
overrideprotectedvirtual

fired after a new Bayes net has been assigned to the engine

Reimplemented from gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 227 of file samplingInference_tpl.h.

227{}

◆ onStateChanged_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::onStateChanged_ ( )
overrideprotectedvirtual

fired when the stage is changed

Implements gum::GraphicalModelInference< GUM_SCALAR >.

Definition at line 248 of file samplingInference_tpl.h.

248 {
249 if (this->isInferenceReady()) {
250 _estimator_.clear();
252 }
253 }

References _estimator_, gum::ApproximationScheme::initApproximationScheme(), and gum::GraphicalModelInference< GUM_SCALAR >::isInferenceReady().

Here is the call graph for this function:

◆ periodSize()

INLINE Size gum::ApproximationScheme::periodSize ( ) const
overridevirtualinherited

Returns the period size.

Returns
Returns the period size.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 155 of file approximationScheme_inl.h.

155{ return period_size_; }
Size period_size_
Checking criteria frequency.

References period_size_.

◆ posterior() [1/2]

template<typename GUM_SCALAR>
const Tensor< GUM_SCALAR > & gum::MarginalTargetedInference< GUM_SCALAR >::posterior ( const std::string & nodeName)
virtualinherited

Computes and returns the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
nodeNamethe anme of the node for which we need a posterior probability
Warning
for efficiency reasons, the tensor is stored into the inference engine and is returned by reference. In order to ensure that the tensor may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
prepareInference and makeInference may be applied if needed by the posterior method.
Exceptions
UndefinedElementif node is not in the set of targets

Reimplemented in gum::JointTargetedInference< GUM_SCALAR >.

Definition at line 259 of file marginalTargetedInference_tpl.h.

259 {
260 return posterior(this->BN().idFromName(nodeName));
261 }

References gum::BayesNetInference< GUM_SCALAR >::BN(), and posterior().

Here is the call graph for this function:

◆ posterior() [2/2]

template<typename GUM_SCALAR>
const Tensor< GUM_SCALAR > & gum::MarginalTargetedInference< GUM_SCALAR >::posterior ( NodeId node)
virtualinherited

Computes and returns the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
nodethe node for which we need a posterior probability
Warning
for efficiency reasons, the tensor is stored into the inference engine and is returned by reference. In order to ensure that the tensor may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
prepareInference and makeInference may be applied if needed by the posterior method.
Exceptions
UndefinedElementif node is not in the set of targets

Reimplemented in gum::JointTargetedInference< GUM_SCALAR >.

Definition at line 243 of file marginalTargetedInference_tpl.h.

243 {
244 if (this->hardEvidenceNodes().contains(node)) { return *(this->evidence()[node]); }
245
246 if (!isTarget(node)) {
247 // throws UndefinedElement if var is not a target
248 GUM_ERROR(UndefinedElement, node << " is not a target node")
249 }
250
251 if (!this->isInferenceDone()) { this->makeInference(); }
252
253 return posterior_(node);
254 }
const NodeProperty< const Tensor< GUM_SCALAR > * > & evidence() const
returns the set of evidence
virtual const Tensor< GUM_SCALAR > & posterior_(NodeId id)=0
asks derived classes for the posterior of a given variable

References gum::GraphicalModelInference< GUM_SCALAR >::evidence(), GUM_ERROR, gum::GraphicalModelInference< GUM_SCALAR >::hardEvidenceNodes(), gum::GraphicalModelInference< GUM_SCALAR >::isInferenceDone(), isTarget(), gum::GraphicalModelInference< GUM_SCALAR >::makeInference(), and posterior_().

Referenced by evidenceImpact(), H(), gum::JointTargetedInference< GUM_SCALAR >::posterior(), posterior(), and gum::Estimator< GUM_SCALAR >::setFromLBP().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ posterior_()

template<typename GUM_SCALAR>
const Tensor< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::posterior_ ( NodeId id)
overridevirtual

Computes and returns the posterior of a node.

Returns
a const ref to the posterior probability of the node.
Parameters
idthe node for which we need a posterior probability
Warning
for efficiency reasons, the tensor is returned by reference. In order to ensure that the tensor may still exist even if the Inference object is destroyed, the user has to copy it explicitly.
Exceptions
UndefinedElementif node is not in the set of targets.
NotFoundif node is not in the BN.

Implements gum::MarginalTargetedInference< GUM_SCALAR >.

Definition at line 121 of file samplingInference_tpl.h.

121 {
122 return _estimator_.posterior(this->BN().variable(id));
123 }

References _estimator_, and gum::BayesNetInference< GUM_SCALAR >::BN().

Here is the call graph for this function:

◆ prepareInference()

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::prepareInference ( )
finalvirtualinherited

prepare the internal inference structures for the next inference

Definition at line 622 of file graphicalModelInference_tpl.h.

622 {
623 if (isInferenceReady() || isInferenceDone()) { return; }
624
625 if (_model_ == nullptr)
627 "No model been assigned to the "
628 "inference algorithm");
629
632
634 }
virtual void updateOutdatedTensors_()=0
prepares inference when the latter is in OutdatedTensors state
virtual void updateOutdatedStructure_()=0
prepares inference when the latter is in OutdatedStructure state

References _model_, _state_, GUM_ERROR, isInferenceDone(), isInferenceReady(), OutdatedStructure, ReadyForInference, setState_(), updateOutdatedStructure_(), and updateOutdatedTensors_().

Referenced by makeInference(), and gum::SamplingInference< GUM_SCALAR >::samplingBN().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ remainingBurnIn()

INLINE Size gum::ApproximationScheme::remainingBurnIn ( ) const
inherited

Returns the remaining burn in.

Returns
Returns the remaining burn in.

Definition at line 212 of file approximationScheme_inl.h.

212 {
213 if (burn_in_ > current_step_) {
214 return burn_in_ - current_step_;
215 } else {
216 return 0;
217 }
218 }
Size burn_in_
Number of iterations before checking stopping criteria.

References burn_in_, and current_step_.

◆ samplingBN()

template<typename GUM_SCALAR>
INLINE const IBayesNet< GUM_SCALAR > & gum::SamplingInference< GUM_SCALAR >::samplingBN ( )

get the BayesNet which is used to really perform the sampling

Definition at line 89 of file samplingInference_tpl.h.

89 {
90 this->prepareInference();
91 if (_samplingBN_ == nullptr) return this->BN();
92 else return *_samplingBN_;
93 }

References _samplingBN_, gum::BayesNetInference< GUM_SCALAR >::BN(), and gum::GraphicalModelInference< GUM_SCALAR >::prepareInference().

Referenced by addVarSample_(), gum::ImportanceSampling< GUM_SCALAR >::draw_(), and setEstimatorFromBN_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ setBN()

template<typename GUM_SCALAR>
void gum::BayesNetInference< GUM_SCALAR >::setBN ( const IBayesNet< GUM_SCALAR > * bn)
virtualinherited

assigns a new BN to the inference engine

Assigns a new BN to the BayesNetInference engine and sends messages to the descendants of BayesNetInference to inform them that the BN has changed.

Warning
By default, all the nodes of the Bayes net are targets.
note that, by aGrUM's rule, the bn is not copied into the inference engine but only referenced.

Definition at line 81 of file BayesNetInference_tpl.h.

81 {
82 this->setModel_(bn);
83 }
void setModel_(const GraphicalModel *model)

References gum::GraphicalModelInference< GUM_SCALAR >::setModel_().

Here is the call graph for this function:

◆ setEpsilon()

INLINE void gum::ApproximationScheme::setEpsilon ( double eps)
overridevirtualinherited

Given that we approximate f(t), stopping criterion on |f(t+1)-f(t)|.

If the criterion was disabled it will be enabled.

Parameters
epsThe new epsilon value.
Exceptions
OutOfBoundsRaised if eps < 0.

Implements gum::IApproximationSchemeConfiguration.

Reimplemented in gum::learning::EMApproximationScheme.

Definition at line 63 of file approximationScheme_inl.h.

63 {
64 if (eps < 0.) { GUM_ERROR(OutOfBounds, "eps should be >=0") }
65
66 eps_ = eps;
67 enabled_eps_ = true;
68 }

References enabled_eps_, eps_, and GUM_ERROR.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsSampling< GUM_SCALAR >::GibbsSampling(), gum::learning::GreedyHillClimbing::GreedyHillClimbing(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::EMApproximationScheme::setEpsilon().

Here is the caller graph for this function:

◆ setEstimatorFromBN_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::setEstimatorFromBN_ ( )
virtual

Initializes the estimators object linked to the simulation.

Initializes the estimator object by creating a hashtable between non evidence nodes and a 0-filled tensor which will approximate the node's posterior

Definition at line 96 of file samplingInference_tpl.h.

96 {
97 _estimator_.setFromBN(&samplingBN(), this->hardEvidenceNodes());
98 this->isSetEstimator = true;
99 }

References _estimator_, gum::GraphicalModelInference< GUM_SCALAR >::hardEvidenceNodes(), isSetEstimator, and samplingBN().

Referenced by makeInference_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ setEstimatorFromLBP_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::setEstimatorFromLBP_ ( LoopyBeliefPropagation< GUM_SCALAR > * lbp,
GUM_SCALAR virtualLBPSize )
virtual

Initializes the estimators object linked to the simulation.

Parameters
lbpa LoopyBeliefPropagation object
virtualLBPSizethe size of the equivalent sampling by LBP

Initializes the estimator object by creating a hashtable between non evidence nodes and the current approximation of the node's posterior obtained by running LoopyBeliefPropagation algorithm

Definition at line 102 of file samplingInference_tpl.h.

104 {
105 _estimator_.setFromLBP(lbp, this->hardEvidenceNodes(), virtualLBPSize);
106 this->isSetEstimator = true;
107 }

References _estimator_, gum::GraphicalModelInference< GUM_SCALAR >::hardEvidenceNodes(), and isSetEstimator.

Here is the call graph for this function:

◆ setMaxIter()

INLINE void gum::ApproximationScheme::setMaxIter ( Size max)
overridevirtualinherited

Stopping criterion on number of iterations.

If the criterion was disabled it will be enabled.

Parameters
maxThe maximum number of iterations.
Exceptions
OutOfBoundsRaised if max <= 1.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 105 of file approximationScheme_inl.h.

105 {
106 if (max < 1) { GUM_ERROR(OutOfBounds, "max should be >=1") }
107 max_iter_ = max;
108 enabled_max_iter_ = true;
109 }

References enabled_max_iter_, GUM_ERROR, and max_iter_.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), and gum::SamplingInference< GUM_SCALAR >::SamplingInference().

Here is the caller graph for this function:

◆ setMaxTime()

INLINE void gum::ApproximationScheme::setMaxTime ( double timeout)
overridevirtualinherited

Stopping criterion on timeout.

If the criterion was disabled it will be enabled.

Parameters
timeoutThe timeout value in seconds.
Exceptions
OutOfBoundsRaised if timeout <= 0.0.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 126 of file approximationScheme_inl.h.

126 {
127 if (timeout <= 0.) { GUM_ERROR(OutOfBounds, "timeout should be >0.") }
128 max_time_ = timeout;
129 enabled_max_time_ = true;
130 }

References enabled_max_time_, GUM_ERROR, and max_time_.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), and gum::SamplingInference< GUM_SCALAR >::SamplingInference().

Here is the caller graph for this function:

◆ setMinEpsilonRate()

INLINE void gum::ApproximationScheme::setMinEpsilonRate ( double rate)
overridevirtualinherited

Given that we approximate f(t), stopping criterion on d/dt(|f(t+1)-f(t)|).

If the criterion was disabled it will be enabled

Parameters
rateThe minimal epsilon rate.
Exceptions
OutOfBoundsif rate<0

Implements gum::IApproximationSchemeConfiguration.

Reimplemented in gum::learning::EMApproximationScheme.

Definition at line 84 of file approximationScheme_inl.h.

84 {
85 if (rate < 0) { GUM_ERROR(OutOfBounds, "rate should be >=0") }
86
87 min_rate_eps_ = rate;
89 }

References enabled_min_rate_eps_, GUM_ERROR, and min_rate_eps_.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsSampling< GUM_SCALAR >::GibbsSampling(), gum::SamplingInference< GUM_SCALAR >::SamplingInference(), and gum::learning::EMApproximationScheme::setMinEpsilonRate().

Here is the caller graph for this function:

◆ setModel_()

template<typename GUM_SCALAR>
void gum::GraphicalModelInference< GUM_SCALAR >::setModel_ ( const GraphicalModel * model)
protectedinherited

Definition at line 136 of file graphicalModelInference_tpl.h.

136 {
137 clear();
138 _model_ = model;
142 }
void _computeDomainSizes_()
computes the domain sizes of the random variables
virtual void onModelChanged_(const GraphicalModel *model)=0
fired after a new Bayes net has been assigned to the engine
virtual void clear()
clears all the data structures allocated for the last inference

References _computeDomainSizes_(), _model_, clear(), model(), onModelChanged_(), OutdatedStructure, and setState_().

Referenced by gum::BayesNetInference< GUM_SCALAR >::setBN(), and gum::MRFInference< GUM_SCALAR >::setMRF().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ setModelDuringConstruction_()

template<typename GUM_SCALAR>
void gum::GraphicalModelInference< GUM_SCALAR >::setModelDuringConstruction_ ( const GraphicalModel * model)
protectedinherited

assigns a model during the inference engine construction

Definition at line 146 of file graphicalModelInference_tpl.h.

References _computeDomainSizes_(), _model_, model(), OutdatedStructure, and setState_().

Referenced by gum::BayesNetInference< GUM_SCALAR >::_setBayesNetDuringConstruction_(), and gum::MRFInference< GUM_SCALAR >::_setMRFDuringConstruction_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ setOutdatedStructureState_()

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::setOutdatedStructureState_ ( )
protectedinherited

put the inference into an outdated model structure state

OutdatedStructure: in this state, the inference is fully unprepared to be applied because some events changed the "logical" structure of the model: for instance a node received a hard evidence, which implies that its outgoing arcs can be removed from the model, hence involving a structural change in the model. As a consequence, the (incremental) inference (probably) needs a significant amount of preparation to be ready for the next inference. In a Lazy propagation, for instance, this step amounts to compute a new join tree, hence a new structure in which inference will be applied. Note that classes that inherit from graphicalModelInference may be smarter than graphicalModelInference and may, in some situations, find out that their data structures are still ok for inference and, therefore, only resort to perform the actions related to the OutdatedTensors state. As an example, consider a LazyPropagation inference in Bayes Net A->B->C->D->E in which C has received hard evidence e_C and E is the only target. In this case, A and B are not needed for inference, the only tensors that matter are P(D|e_C) and P(E|D). So the smallest join tree needed for inference contains only one clique DE. Now, adding new evidence e_A on A has no impact on E given hard evidence e_C. In this case, LazyPropagation can be smart and not update its join tree.

Definition at line 609 of file graphicalModelInference_tpl.h.

References OutdatedStructure, and setState_().

Referenced by makeInference_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ setOutdatedTensorsState_()

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::setOutdatedTensorsState_ ( )
protectedinherited

puts the inference into an OutdatedTensors state if it is not already in an OutdatedStructure state

OutdatedTensors: in this state, the structure of the model remains unchanged, only some tensors stored in it have changed. Therefore, the inference probably just needs to invalidate some already computed tensors to be ready. Only a light amount of preparation is needed to be able to perform inference.

Definition at line 616 of file graphicalModelInference_tpl.h.

References OutdatedTensors, and setState_().

Referenced by makeInference_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ setPeriodSize()

INLINE void gum::ApproximationScheme::setPeriodSize ( Size p)
overridevirtualinherited

How many samples between two stopping is enable.

Parameters
pThe new period value.
Exceptions
OutOfBoundsRaised if p < 1.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 149 of file approximationScheme_inl.h.

149 {
150 if (p < 1) { GUM_ERROR(OutOfBounds, "p should be >=1") }
151
152 period_size_ = p;
153 }

References GUM_ERROR, and period_size_.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), and gum::SamplingInference< GUM_SCALAR >::SamplingInference().

Here is the caller graph for this function:

◆ setState_()

template<typename GUM_SCALAR>
INLINE void gum::GraphicalModelInference< GUM_SCALAR >::setState_ ( const StateOfInference state)
finalprotectedvirtualinherited

set the state of the inference engine and call the notification onStateChanged_ when necessary (i.e. when the state has effectively changed).

Definition at line 117 of file graphicalModelInference_tpl.h.

117 {
118 if (_state_ != state) {
119 _state_ = state;
121 }
122 }
virtual void onStateChanged_()=0
fired when the stage is changed
virtual StateOfInference state() const noexcept final
returns the state of the inference engine

References _state_, onStateChanged_(), and state().

Referenced by gum::MarginalTargetedInference< GUM_SCALAR >::addAllTargets(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::addAllTargets(), addEvidence(), gum::JointTargetedInference< GUM_SCALAR >::addJointTarget(), gum::JointTargetedMRFInference< GUM_SCALAR >::addJointTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::addTarget(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::addTarget(), chgEvidence(), clear(), eraseAllEvidence(), gum::JointTargetedInference< GUM_SCALAR >::eraseAllJointTargets(), gum::JointTargetedMRFInference< GUM_SCALAR >::eraseAllJointTargets(), gum::MarginalTargetedInference< GUM_SCALAR >::eraseAllTargets(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::eraseAllTargets(), eraseEvidence(), gum::JointTargetedInference< GUM_SCALAR >::eraseJointTarget(), gum::JointTargetedMRFInference< GUM_SCALAR >::eraseJointTarget(), gum::MarginalTargetedInference< GUM_SCALAR >::eraseTarget(), gum::MarginalTargetedMRFInference< GUM_SCALAR >::eraseTarget(), makeInference(), prepareInference(), setModel_(), setModelDuringConstruction_(), setOutdatedStructureState_(), and setOutdatedTensorsState_().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ setTargetedMode_()

template<typename GUM_SCALAR>
INLINE void gum::MarginalTargetedInference< GUM_SCALAR >::setTargetedMode_ ( )
protectedinherited

Definition at line 332 of file marginalTargetedInference_tpl.h.

332 {
333 if (!_targeted_mode_) {
334 _targets_.clear();
335 _targeted_mode_ = true;
336 }
337 }

References _targeted_mode_, and _targets_.

Referenced by addAllTargets(), gum::JointTargetedInference< GUM_SCALAR >::addJointTarget(), addTarget(), and eraseAllTargets().

Here is the caller graph for this function:

◆ setVerbosity()

INLINE void gum::ApproximationScheme::setVerbosity ( bool v)
overridevirtualinherited

Set the verbosity on (true) or off (false).

Parameters
vIf true, then verbosity is turned on.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 158 of file approximationScheme_inl.h.

158{ verbosity_ = v; }
bool verbosity_
If true, verbosity is enabled.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), gum::GibbsBNdistance< GUM_SCALAR >::GibbsBNdistance(), and gum::SamplingInference< GUM_SCALAR >::SamplingInference().

Here is the caller graph for this function:

◆ softEvidenceNodes()

template<typename GUM_SCALAR>
INLINE const NodeSet & gum::GraphicalModelInference< GUM_SCALAR >::softEvidenceNodes ( ) const
inherited

returns the set of nodes with soft evidence

the set of nodes that received soft evidence

Definition at line 593 of file graphicalModelInference_tpl.h.

593 {
595 }

References _soft_evidence_nodes_.

Referenced by gum::SamplingInference< GUM_SCALAR >::contextualize().

Here is the caller graph for this function:

◆ startOfPeriod()

INLINE bool gum::ApproximationScheme::startOfPeriod ( ) const
inherited

Returns true if we are at the beginning of a period (compute error is mandatory).

Returns
Returns true if we are at the beginning of a period (compute error is mandatory).

Definition at line 199 of file approximationScheme_inl.h.

199 {
200 if (current_step_ < burn_in_) { return false; }
201
202 if (period_size_ == 1) { return true; }
203
204 return ((current_step_ - burn_in_) % period_size_ == 0);
205 }

References burn_in_, and current_step_.

◆ state()

template<typename GUM_SCALAR>
INLINE GraphicalModelInference< GUM_SCALAR >::StateOfInference gum::GraphicalModelInference< GUM_SCALAR >::state ( ) const
finalvirtualnoexceptinherited

returns the state of the inference engine

Definition at line 111 of file graphicalModelInference_tpl.h.

111 {
112 return _state_;
113 }

References _state_.

Referenced by setState_().

Here is the caller graph for this function:

◆ stateApproximationScheme()

INLINE IApproximationSchemeConfiguration::ApproximationSchemeSTATE gum::ApproximationScheme::stateApproximationScheme ( ) const
overridevirtualinherited

Returns the approximation scheme state.

Returns
Returns the approximation scheme state.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 164 of file approximationScheme_inl.h.

164 {
165 return current_state_;
166 }

References current_state_.

Referenced by history(), and nbrIterations().

Here is the caller graph for this function:

◆ stopApproximationScheme()

INLINE void gum::ApproximationScheme::stopApproximationScheme ( )
inherited

Stop the approximation scheme.

Definition at line 221 of file approximationScheme_inl.h.

Referenced by gum::learning::GreedyHillClimbing::learnStructure(), gum::learning::LocalSearchWithTabuList::learnStructure(), and gum::credal::CNLoopyPropagation< GUM_SCALAR >::makeInferenceNodeToNeighbours_().

Here is the caller graph for this function:

◆ stopScheme_()

INLINE void gum::ApproximationScheme::stopScheme_ ( ApproximationSchemeSTATE new_state)
privateinherited

Stop the scheme given a new state.

Parameters
new_stateThe scheme new state.

Definition at line 301 of file approximationScheme_inl.h.

301 {
302 if (new_state == ApproximationSchemeSTATE::Continue) { return; }
303
304 if (new_state == ApproximationSchemeSTATE::Undefined) { return; }
305
306 current_state_ = new_state;
307 timer_.pause();
308
309 if (onStop.hasListener()) { GUM_EMIT1(onStop, messageApproximationScheme()); }
310 }
Signaler1< const std::string & > onStop
Criteria messageApproximationScheme.
#define GUM_EMIT1(signal, arg1)
Definition signaler1.h:61

References gum::IApproximationSchemeConfiguration::Continue, current_state_, and gum::IApproximationSchemeConfiguration::Undefined.

Referenced by gum::credal::MultipleInferenceEngine< GUM_SCALAR, LazyPropagation< GUM_SCALAR > >::disableMaxIter(), gum::credal::MultipleInferenceEngine< GUM_SCALAR, LazyPropagation< GUM_SCALAR > >::disableMaxTime(), gum::credal::MultipleInferenceEngine< GUM_SCALAR, LazyPropagation< GUM_SCALAR > >::isEnabledMaxIter(), gum::credal::MultipleInferenceEngine< GUM_SCALAR, LazyPropagation< GUM_SCALAR > >::maxTime(), and gum::credal::MultipleInferenceEngine< GUM_SCALAR, LazyPropagation< GUM_SCALAR > >::setPeriodSize().

Here is the caller graph for this function:

◆ targets()

◆ updateApproximationScheme()

INLINE void gum::ApproximationScheme::updateApproximationScheme ( unsigned int incr = 1)
inherited

Update the scheme w.r.t the new error and increment steps.

Parameters
incrThe new increment steps.

Definition at line 208 of file approximationScheme_inl.h.

208 {
209 current_step_ += incr;
210 }

References current_step_.

Referenced by gum::GibbsBNdistance< GUM_SCALAR >::computeKL_(), gum::learning::GreedyHillClimbing::learnStructure(), gum::learning::LocalSearchWithTabuList::learnStructure(), gum::SamplingInference< GUM_SCALAR >::loopApproxInference_(), gum::credal::CNLoopyPropagation< GUM_SCALAR >::makeInferenceByOrderedArcs_(), gum::credal::CNLoopyPropagation< GUM_SCALAR >::makeInferenceByRandomOrder_(), and gum::credal::CNLoopyPropagation< GUM_SCALAR >::makeInferenceNodeToNeighbours_().

Here is the caller graph for this function:

◆ updateOutdatedStructure_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::updateOutdatedStructure_ ( )
overrideprotectedvirtual

prepares inference when the latter is in OutdatedStructure state

Note that the values of evidence are not necessarily known and can be changed between updateOutdatedStructure_ and makeInference_.

Implements gum::GraphicalModelInference< GUM_SCALAR >.

Definition at line 230 of file samplingInference_tpl.h.

230{}

◆ updateOutdatedTensors_()

template<typename GUM_SCALAR>
void gum::SamplingInference< GUM_SCALAR >::updateOutdatedTensors_ ( )
overrideprotectedvirtual

prepares inference when the latter is in OutdatedTensors state

Note that the values of evidence are not necessarily known and can be changed between updateOutdatedTensors_ and makeInference_.

Implements gum::GraphicalModelInference< GUM_SCALAR >.

Definition at line 233 of file samplingInference_tpl.h.

233{}

◆ verbosity()

INLINE bool gum::ApproximationScheme::verbosity ( ) const
overridevirtualinherited

Returns true if verbosity is enabled.

Returns
Returns true if verbosity is enabled.

Implements gum::IApproximationSchemeConfiguration.

Definition at line 160 of file approximationScheme_inl.h.

160{ return verbosity_; }

References verbosity_.

Referenced by ApproximationScheme(), and gum::learning::EMApproximationScheme::EMApproximationScheme().

Here is the caller graph for this function:

Member Data Documentation

◆ _estimator_

template<typename GUM_SCALAR>
Estimator< GUM_SCALAR > gum::SamplingInference< GUM_SCALAR >::_estimator_
protected

Estimator object designed to approximate target posteriors.

Definition at line 194 of file samplingInference.h.

Referenced by SamplingInference(), currentPosterior(), loopApproxInference_(), onStateChanged_(), posterior_(), setEstimatorFromBN_(), and setEstimatorFromLBP_().

◆ _samplingBN_

template<typename GUM_SCALAR>
BayesNetFragment< GUM_SCALAR >* gum::SamplingInference< GUM_SCALAR >::_samplingBN_
private

◆ _targeted_mode_

template<typename GUM_SCALAR>
bool gum::MarginalTargetedInference< GUM_SCALAR >::_targeted_mode_
privateinherited

whether the actual targets are default

Definition at line 260 of file marginalTargetedInference.h.

Referenced by MarginalTargetedInference(), eraseTarget(), isInTargetMode(), isTargetedMode_(), onModelChanged_(), and setTargetedMode_().

◆ _targets_

template<typename GUM_SCALAR>
NodeSet gum::MarginalTargetedInference< GUM_SCALAR >::_targets_
privateinherited

◆ burn_in_

Size gum::ApproximationScheme::burn_in_
protectedinherited

◆ current_epsilon_

double gum::ApproximationScheme::current_epsilon_
protectedinherited

Current epsilon.

Definition at line 378 of file approximationScheme.h.

Referenced by initApproximationScheme().

◆ current_rate_

double gum::ApproximationScheme::current_rate_
protectedinherited

Current rate.

Definition at line 384 of file approximationScheme.h.

Referenced by initApproximationScheme().

◆ current_state_

ApproximationSchemeSTATE gum::ApproximationScheme::current_state_
protectedinherited

The current state.

Definition at line 393 of file approximationScheme.h.

Referenced by ApproximationScheme(), initApproximationScheme(), stateApproximationScheme(), and stopScheme_().

◆ current_step_

◆ enabled_eps_

bool gum::ApproximationScheme::enabled_eps_
protectedinherited

If true, the threshold convergence is enabled.

Definition at line 402 of file approximationScheme.h.

Referenced by ApproximationScheme(), disableEpsilon(), enableEpsilon(), isEnabledEpsilon(), and setEpsilon().

◆ enabled_max_iter_

bool gum::ApproximationScheme::enabled_max_iter_
protectedinherited

If true, the maximum iterations stopping criterion is enabled.

Definition at line 420 of file approximationScheme.h.

Referenced by ApproximationScheme(), disableMaxIter(), enableMaxIter(), isEnabledMaxIter(), and setMaxIter().

◆ enabled_max_time_

bool gum::ApproximationScheme::enabled_max_time_
protectedinherited

If true, the timeout is enabled.

Definition at line 414 of file approximationScheme.h.

Referenced by ApproximationScheme(), continueApproximationScheme(), disableMaxTime(), enableMaxTime(), isEnabledMaxTime(), and setMaxTime().

◆ enabled_min_rate_eps_

bool gum::ApproximationScheme::enabled_min_rate_eps_
protectedinherited

If true, the minimal threshold for epsilon rate is enabled.

Definition at line 408 of file approximationScheme.h.

Referenced by ApproximationScheme(), disableMinEpsilonRate(), enableMinEpsilonRate(), isEnabledMinEpsilonRate(), and setMinEpsilonRate().

◆ eps_

double gum::ApproximationScheme::eps_
protectedinherited

Threshold for convergence.

Definition at line 399 of file approximationScheme.h.

Referenced by ApproximationScheme(), epsilon(), and setEpsilon().

◆ history_

std::vector< double > gum::ApproximationScheme::history_
protectedinherited

The scheme history, used only if verbosity == true.

Definition at line 396 of file approximationScheme.h.

◆ isContextualized

template<typename GUM_SCALAR>
bool gum::SamplingInference< GUM_SCALAR >::isContextualized = false
protected

whether the referenced Bayesian network has been "contextualized"

Definition at line 200 of file samplingInference.h.

Referenced by ~SamplingInference(), contextualize(), and loopApproxInference_().

◆ isSetEstimator

template<typename GUM_SCALAR>
bool gum::SamplingInference< GUM_SCALAR >::isSetEstimator = false
protected

whether the Estimator object has been initialized

Definition at line 197 of file samplingInference.h.

Referenced by loopApproxInference_(), makeInference_(), setEstimatorFromBN_(), and setEstimatorFromLBP_().

◆ last_epsilon_

double gum::ApproximationScheme::last_epsilon_
protectedinherited

Last epsilon value.

Definition at line 381 of file approximationScheme.h.

◆ max_iter_

Size gum::ApproximationScheme::max_iter_
protectedinherited

The maximum iterations.

Definition at line 417 of file approximationScheme.h.

Referenced by ApproximationScheme(), maxIter(), and setMaxIter().

◆ max_time_

double gum::ApproximationScheme::max_time_
protectedinherited

The timeout.

Definition at line 411 of file approximationScheme.h.

Referenced by ApproximationScheme(), maxTime(), and setMaxTime().

◆ min_rate_eps_

double gum::ApproximationScheme::min_rate_eps_
protectedinherited

Threshold for the epsilon rate.

Definition at line 405 of file approximationScheme.h.

Referenced by ApproximationScheme(), minEpsilonRate(), and setMinEpsilonRate().

◆ onProgress

◆ onStop

Signaler1< const std::string& > gum::IApproximationSchemeConfiguration::onStop
inherited

Criteria messageApproximationScheme.

Definition at line 83 of file IApproximationSchemeConfiguration.h.

Referenced by gum::learning::IBNLearner::distributeStop().

◆ period_size_

Size gum::ApproximationScheme::period_size_
protectedinherited

Checking criteria frequency.

Definition at line 426 of file approximationScheme.h.

Referenced by ApproximationScheme(), periodSize(), and setPeriodSize().

◆ timer_

◆ verbosity_

bool gum::ApproximationScheme::verbosity_
protectedinherited

If true, verbosity is enabled.

Definition at line 429 of file approximationScheme.h.

Referenced by ApproximationScheme(), and verbosity().


The documentation for this class was generated from the following files: