aGrUM 2.3.2
a C++ library for (probabilistic) graphical models
DAGCycleDetector.cpp
Go to the documentation of this file.
1/****************************************************************************
2 * This file is part of the aGrUM/pyAgrum library. *
3 * *
4 * Copyright (c) 2005-2025 by *
5 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
6 * - Christophe GONZALES(_at_AMU) *
7 * *
8 * The aGrUM/pyAgrum library is free software; you can redistribute it *
9 * and/or modify it under the terms of either : *
10 * *
11 * - the GNU Lesser General Public License as published by *
12 * the Free Software Foundation, either version 3 of the License, *
13 * or (at your option) any later version, *
14 * - the MIT license (MIT), *
15 * - or both in dual license, as here. *
16 * *
17 * (see https://agrum.gitlab.io/articles/dual-licenses-lgplv3mit.html) *
18 * *
19 * This aGrUM/pyAgrum library is distributed in the hope that it will be *
20 * useful, but WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, *
21 * INCLUDING BUT NOT LIMITED TO THE WARRANTIES MERCHANTABILITY or FITNESS *
22 * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
23 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, *
25 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR *
26 * OTHER DEALINGS IN THE SOFTWARE. *
27 * *
28 * See LICENCES for more details. *
29 * *
30 * SPDX-FileCopyrightText: Copyright 2005-2025 *
31 * - Pierre-Henri WUILLEMIN(_at_LIP6) *
32 * - Christophe GONZALES(_at_AMU) *
33 * SPDX-License-Identifier: LGPL-3.0-or-later OR MIT *
34 * *
35 * Contact : info_at_agrum_dot_org *
36 * homepage : http://agrum.gitlab.io *
37 * gitlab : https://gitlab.com/agrumery/agrum *
38 * *
39 ****************************************************************************/
40
41
54
57
58#ifdef GUM_NO_INLINE
60#endif // GU%_NO_INLINE
61
62namespace gum {
63
65 void DAGCycleDetector::setDAG(const DAG& dag) {
66 // sets the dag
67 _dag_ = dag;
68
69 // get the set of roots and leaves of the dag
70 List< NodeId > roots, leaves;
71 NodeProperty< Size > nb_parents, nb_children;
72
73 for (const auto node: dag) {
74 Size nb_ch = dag.children(node).size();
75 nb_children.insert(node, nb_ch);
76
77 if (!nb_ch) leaves.insert(node);
78
79 Size nb_pa = dag.parents(node).size();
80 nb_parents.insert(node, nb_pa);
81
82 if (!nb_pa) roots.insert(node);
83 }
84
85 // recompute the set of ancestors
86 NodeProperty< Size > empty_set;
87 _ancestors_.clear();
88
89 for (NodeId node: dag) {
90 _ancestors_.insert(node, empty_set);
91 }
92
93 while (!roots.empty()) {
94 // get a node and update the ancestors of its children
95 NodeId node = roots.front();
96 NodeProperty< Size > node_ancestors = _ancestors_[node];
97 node_ancestors.insert(node, 1);
98 const NodeSet& node_children = dag.children(node);
99 roots.popFront();
100
101 for (const auto ch: node_children) {
102 _addWeightedSet_(_ancestors_[ch], node_ancestors, 1);
103 --nb_parents[ch];
104
105 if (!nb_parents[ch]) { roots.insert(ch); }
106 }
107 }
108
109 // recompute the set of descendants
110 _descendants_.clear();
111
112 for (const auto node: dag) {
113 _descendants_.insert(node, empty_set);
114 }
115
116 while (!leaves.empty()) {
117 // get a node and update the descendants of its parents
118 NodeId node = leaves.front();
119 NodeProperty< Size > node_descendants = _descendants_[node];
120 node_descendants.insert(node, 1);
121 const NodeSet& node_parents = dag.parents(node);
122 leaves.popFront();
123
124 for (const auto pa: node_parents) {
125 _addWeightedSet_(_descendants_[pa], node_descendants, 1);
126 --nb_children[pa];
127
128 if (!nb_children[pa]) { leaves.insert(pa); }
129 }
130 }
131 }
132
134 bool DAGCycleDetector::hasCycleFromModifications(const std::vector< Change >& modifs) const {
135 // first, we separate deletions and insertions (reversals are cut into
136 // a deletion + an insertion) and we check that no insertion also exists
137 // as a deletion (if so, we remove both operations). In addition, if
138 // we try to add an arc (X,X) we return that it induces a cycle
139 HashTable< Arc, Size > deletions(Size(modifs.size()));
140 HashTable< Arc, Size > additions(Size(modifs.size()));
141
142 for (const auto& modif: modifs) {
143 Arc arc(modif.tail(), modif.head());
144
145 switch (modif.type()) {
147 if (deletions.exists(arc)) ++deletions[arc];
148 else deletions.insert(arc, 1);
149
150 break;
151
153 if (modif.tail() == modif.head()) return true;
154
155 if (additions.exists(arc)) ++additions[arc];
156 else additions.insert(arc, 1);
157
158 break;
159
161 if (deletions.exists(arc)) ++deletions[arc];
162 else deletions.insert(arc, 1);
163
164 arc = Arc(modif.head(), modif.tail());
165
166 if (additions.exists(arc)) ++additions[arc];
167 else additions.insert(arc, 1);
168
169 break;
170
171 default : GUM_ERROR(OperationNotAllowed, "undefined change type")
172 }
173 }
174
175 for (auto iter = additions.beginSafe(); // safe iterator needed here
176 iter != additions.endSafe();
177 ++iter) {
178 if (deletions.exists(iter.key())) {
179 Size& nb_del = deletions[iter.key()];
180 Size& nb_add = iter.val();
181
182 if (nb_del > nb_add) {
183 additions.erase(iter);
184 nb_del -= nb_add;
185 } else if (nb_del < nb_add) {
186 deletions.erase(iter.key());
187 nb_add -= nb_del;
188 } else {
189 deletions.erase(iter.key());
190 additions.erase(iter);
191 }
192 }
193 }
194
195 // get the set of nodes involved in the modifications
196 NodeSet extremities;
197
198 for (const auto& modif: modifs) {
199 extremities.insert(modif.tail());
200 extremities.insert(modif.head());
201 }
202
203 // we now retrieve the set of ancestors and descendants of all the
204 // extremities of the arcs involved in the modifications. We also
205 // keep track of all the children and parents of these nodes
206 NodeProperty< NodeProperty< Size > > ancestors, descendants;
207
208 for (const auto& modif: modifs) {
209 if (!ancestors.exists(modif.tail())) {
210 NodeProperty< Size >& anc = ancestors.insert(modif.tail(), NodeProperty< Size >()).second;
211 _restrictWeightedSet_(anc, _ancestors_[modif.tail()], extremities);
212
214 = descendants.insert(modif.tail(), NodeProperty< Size >()).second;
215 _restrictWeightedSet_(desc, _descendants_[modif.tail()], extremities);
216 }
217
218 if (!ancestors.exists(modif.head())) {
219 NodeProperty< Size >& anc = ancestors.insert(modif.head(), NodeProperty< Size >()).second;
220 _restrictWeightedSet_(anc, _ancestors_[modif.head()], extremities);
221
223 = descendants.insert(modif.head(), NodeProperty< Size >()).second;
224 _restrictWeightedSet_(desc, _descendants_[modif.head()], extremities);
225 }
226 }
227
228 // we apply all the suppressions:
229 // assume that the modif concerns arc (X,Y). Then, after removing
230 // arc (X,Y), the sets of descendants of the nodes T in
231 // ( X + ancestors (X) ) are updated by subtracting the number of paths
232 // leading to Y and its set of descendants. These numbers are equal to
233 // the numbers found in descendants[X] * the numbers of paths leading
234 // to X, i.e., descendants[T][X].
235 // By symmetry, the set of ancestors of nodes T in ( Y + descendants (Y) )
236 // are
237 // updated by subtracting the number of paths leading to X and its
238 // ancestors.
239 for (auto iter = deletions.cbegin(); iter != deletions.cend(); ++iter) {
240 const Arc& arc = iter.key();
241 const NodeId tail = arc.tail();
242 const NodeProperty< Size >& anc_tail = ancestors[tail];
243 const NodeId head = arc.head();
244 const NodeProperty< Size >& desc_head = descendants[head];
245
246 // update the set of descendants
247 NodeProperty< Size > set_to_del = desc_head;
248 set_to_del.insert(head, 1);
249 _delWeightedSet_(descendants[tail], set_to_del, 1);
250
251 for (auto iter = anc_tail.cbegin(); iter != anc_tail.cend(); ++iter) {
252 _delWeightedSet_(descendants[iter.key()], set_to_del, descendants[iter.key()][tail]);
253 }
254
255 // update the set of ancestors
256 set_to_del = anc_tail;
257 set_to_del.insert(tail, 1);
258 _delWeightedSet_(ancestors[head], set_to_del, 1);
259
260 for (auto iter = desc_head.cbegin(); iter != desc_head.cend(); ++iter) {
261 _delWeightedSet_(ancestors[iter.key()], set_to_del, ancestors[iter.key()][head]);
262 }
263 }
264
265 // now we apply all the additions of arcs (including the additions after
266 // arc reversals). After adding arc (X,Y), the set of ancestors T of Y and
267 // its
268 // descendants shall be updated by adding the number of paths leading to
269 // X and its ancestors, i.e., ancestors[X] * the number of paths leading
270 // to Y, i.e., ancestors[T][Y]. Similarly, the set of descendants of X and
271 // its
272 // ancestors should be updated by adding the number of paths leading to Y
273 // and
274 // its descendants. Finally, an arc (X,Y) can be added if and
275 // only if Y does not belong to the ancestor set of X
276 for (auto iter = additions.cbegin(); iter != additions.cend(); ++iter) {
277 const Arc& arc = iter.key();
278 NodeId tail = arc.tail();
279 NodeId head = arc.head();
280
281 const NodeProperty< Size >& anc_tail = ancestors[tail];
282
283 if (anc_tail.exists(head)) { return true; }
284
285 const NodeProperty< Size >& desc_head = descendants[head];
286
287 // update the set of ancestors
288 NodeProperty< Size > set_to_add = anc_tail;
289 set_to_add.insert(tail, 1);
290 _addWeightedSet_(ancestors[head], set_to_add, 1);
291
292 for (auto iter = desc_head.cbegin(); iter != desc_head.cend(); ++iter) {
293 _addWeightedSet_(ancestors[iter.key()], set_to_add, ancestors[iter.key()][head]);
294 }
295
296 // update the set of descendants
297 set_to_add = desc_head;
298 set_to_add.insert(head, 1);
299 _addWeightedSet_(descendants[tail], set_to_add, 1);
300
301 for (auto iter = anc_tail.cbegin(); iter != anc_tail.cend(); ++iter) {
302 _addWeightedSet_(descendants[iter.key()], set_to_add, descendants[iter.key()][tail]);
303 }
304 }
305
306 return false;
307 }
308
311 // check that the arc does not already exist
312 if (_dag_.existsArc(tail, head)) return;
313
314 // check that the arc would not create a cycle
315 if (hasCycleFromAddition(tail, head)) {
316 GUM_ERROR(InvalidDirectedCycle, "the arc would create a directed into a DAG")
317 }
318
319 _dag_.addArc(tail, head);
320
321 // now we apply the addition of the arc as done in method
322 // hasCycleFromModifications
323 const NodeProperty< Size >& anc_tail = _ancestors_[tail];
324 const NodeProperty< Size >& desc_head = _descendants_[head];
325
326 // update the set of ancestors
327 NodeProperty< Size > set_to_add = anc_tail;
328 set_to_add.insert(tail, 1);
329 _addWeightedSet_(_ancestors_[head], set_to_add, 1);
330
331 for (auto iter = desc_head.cbegin(); iter != desc_head.cend(); ++iter) {
332 _addWeightedSet_(_ancestors_[iter.key()], set_to_add, _ancestors_[iter.key()][head]);
333 }
334
335 // update the set of descendants
336 set_to_add = desc_head;
337 set_to_add.insert(head, 1);
338 _addWeightedSet_(_descendants_[tail], set_to_add, 1);
339
340 for (auto iter = anc_tail.cbegin(); iter != anc_tail.cend(); ++iter) {
341 _addWeightedSet_(_descendants_[iter.key()], set_to_add, _descendants_[iter.key()][tail]);
342 }
343 }
344
347 // check that the arc exists
348 if (!_dag_.existsArc(tail, head)) return;
349
350 _dag_.eraseArc(Arc(tail, head));
351
352 // we apply the deletion of the arc as done in method
353 // hasCycleFromModifications
354 const NodeProperty< Size >& anc_tail = _ancestors_[tail];
355 const NodeProperty< Size >& desc_head = _descendants_[head];
356
357 // update the set of descendants
358 NodeProperty< Size > set_to_del = desc_head;
359 set_to_del.insert(head, 1);
360 _delWeightedSet_(_descendants_[tail], set_to_del, 1);
361
362 for (auto iter = anc_tail.cbegin(); iter != anc_tail.cend(); ++iter) {
363 _delWeightedSet_(_descendants_[iter.key()], set_to_del, _descendants_[iter.key()][tail]);
364 }
365
366 // update the set of ancestors
367 set_to_del = anc_tail;
368 set_to_del.insert(tail, 1);
369 _delWeightedSet_(_ancestors_[head], set_to_del, 1);
370
371 for (auto iter = desc_head.cbegin(); iter != desc_head.cend(); ++iter) {
372 _delWeightedSet_(_ancestors_[iter.key()], set_to_del, _ancestors_[iter.key()][head]);
373 }
374 }
375
376} /* namespace gum */
A class for detecting directed cycles in DAGs when trying to apply many changes to the graph.
A class for detecting directed cycles in DAGs when trying to apply many changes to the graph.
const NodeSet & parents(NodeId id) const
returns the set of nodes with arc ingoing to a given node
NodeSet children(const NodeSet &ids) const
returns the set of children of a set of nodes
The base class for all directed edges.
GUM_NODISCARD NodeId head() const
returns the head of the arc
GUM_NODISCARD NodeId tail() const
returns the tail of the arc
bool hasCycleFromModifications(const std::vector< Change > &modifs) const
indicates whether a set of modifications would create a cycle
NodeProperty< NodeProperty< Size > > _descendants_
the set of descendants of each node in the dag
void _addWeightedSet_(NodeProperty< Size > &nodeset, const NodeProperty< Size > &set_to_add, Size multiplier) const
adds a weighted nodeset to another (weights are added)
void addArc(NodeId x, NodeId y)
adds a new arc to the current DAG
void setDAG(const DAG &dag)
sets the initial DAG from which changes shall be applied
NodeProperty< NodeProperty< Size > > _ancestors_
the set of ancestors of each node in the dag
void _delWeightedSet_(NodeProperty< Size > &nodeset, const NodeProperty< Size > &set_to_del, Size multiplier) const
removes a weighted nodeset from another (weights are subtracted)
void eraseArc(NodeId x, NodeId y)
removes an arc from the current DAG
DiGraph _dag_
the initial dag from which modifications are applied
void _restrictWeightedSet_(NodeProperty< Size > &result_set, const NodeProperty< Size > &set_to_restrict, const NodeSet &extrmities) const
put into a weighted nodeset the nodes of another weighted set that belong to a set of arc extremities
bool hasCycleFromAddition(NodeId x, NodeId y) const noexcept
indicates whether an arc addition would create a cycle
Base class for dag.
Definition DAG.h:121
The class for generic Hash Tables.
Definition hashTable.h:637
const iterator_safe & endSafe() noexcept
Returns the safe iterator pointing to the end of the hashtable.
void erase(const Key &key)
Removes a given element from the hash table.
bool exists(const Key &key) const
Checks whether there exists an element with a given key in the hashtable.
const_iterator cbegin() const
Returns an unsafe const_iterator pointing to the beginning of the hashtable.
const const_iterator & cend() const noexcept
Returns the unsafe const_iterator pointing to the end of the hashtable.
value_type & insert(const Key &key, const Val &val)
Adds a new element (actually a copy of this element) into the hash table.
iterator_safe beginSafe()
Returns the safe iterator pointing to the beginning of the hashtable.
Exception : existence of a directed cycle in a graph.
Generic doubly linked lists.
Definition list.h:379
Val & front() const
Returns a reference to first element of a list, if any.
Definition list_tpl.h:1703
bool empty() const noexcept
Returns a boolean indicating whether the chained list is empty.
Definition list_tpl.h:1831
void popFront()
Removes the first element of a List, if any.
Definition list_tpl.h:1825
Val & insert(const Val &val)
Inserts a new element at the end of the chained list (alias of pushBack).
Definition list_tpl.h:1515
Exception : operation not allowed.
Size size() const noexcept
Returns the number of elements in the set.
Definition set_tpl.h:636
void insert(const Key &k)
Inserts a new element into the set.
Definition set_tpl.h:539
#define GUM_ERROR(type, msg)
Definition exceptions.h:72
std::size_t Size
In aGrUM, hashed values are unsigned long int.
Definition types.h:74
Size NodeId
Type for node ids.
HashTable< NodeId, VAL > NodeProperty
Property on graph elements.
Set< NodeId > NodeSet
Some typdefs and define for shortcuts ...
gum is the global namespace for all aGrUM entities
Definition agrum.h:46
Header file of gum::Sequence, a class for storing (ordered) sequences of objects.