"Fossies" - the Fresh Open Source Software Archive 
Member "dune-typetree-2.8.0/dune/typetree/accumulate_static.hh" (31 Aug 2021, 25970 Bytes) of package /linux/misc/dune/dune-typetree-2.8.0.tar.gz:
As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) C and C++ source code syntax highlighting (style:
standard) with prefixed line numbers and
code folding option.
Alternatively you can here
view or
download the uninterpreted source code file.
For more information about "accumulate_static.hh" see the
Fossies "Dox" file reference documentation.
1 // -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 // vi: set et ts=4 sw=2 sts=2:
3
4 #ifndef DUNE_TYPETREE_ACCUMULATE_STATIC_HH
5 #define DUNE_TYPETREE_ACCUMULATE_STATIC_HH
6
7 #include <dune/common/typetraits.hh>
8 #include <dune/typetree/nodeinterface.hh>
9 #include <dune/typetree/nodetags.hh>
10 #include <dune/typetree/treepath.hh>
11
12
13 namespace Dune {
14 namespace TypeTree {
15
16 /** \addtogroup Tree Traversal
17 * \ingroup TypeTree
18 * \{
19 */
20
21 //! Statically combine two values of type result_type using ||.
22 template<typename result_type>
23 struct or_
24 {
25 template<result_type r1, result_type r2>
26 struct reduce
27 {
28 static const result_type result = r1 || r2;
29 };
30 };
31
32 //! Statically combine two values of type result_type using &&.
33 template<typename result_type>
34 struct and_
35 {
36 template<result_type r1, result_type r2>
37 struct reduce
38 {
39 static const result_type result = r1 && r2;
40 };
41 };
42
43 //! Statically combine two values of type result_type using +.
44 template<typename result_type>
45 struct plus
46 {
47 template<result_type r1, result_type r2>
48 struct reduce
49 {
50 static const result_type result = r1 + r2;
51 };
52 };
53
54 //! Statically combine two values of type result_type using -.
55 template<typename result_type>
56 struct minus
57 {
58 template<result_type r1, result_type r2>
59 struct reduce
60 {
61 static const result_type result = r1 - r2;
62 };
63 };
64
65 //! Statically combine two values of type result_type using *.
66 template<typename result_type>
67 struct multiply
68 {
69 template<result_type r1, result_type r2>
70 struct reduce
71 {
72 static const result_type result = r1 * r2;
73 };
74 };
75
76 //! Statically combine two values of type result_type by returning their minimum.
77 template<typename result_type>
78 struct min
79 {
80 template<result_type r1, result_type r2>
81 struct reduce
82 {
83 static const result_type result = r1 < r2 ? r1 : r2;
84 };
85 };
86
87 //! Statically combine two values of type result_type by returning their maximum.
88 template<typename result_type>
89 struct max
90 {
91 template<result_type r1, result_type r2>
92 struct reduce
93 {
94 static const result_type result = r1 > r2 ? r1 : r2;
95 };
96 };
97
98
99 namespace {
100
101 // implementation of the traversal algorithm
102
103 //! helper struct to decide whether or not to perform the per-node calculation on the current node. Default case: ignore the node.
104 template<typename Node, typename Functor, typename Reduction, typename Functor::result_type current_value, typename TreePath, bool doVisit>
105 struct accumulate_node_helper
106 {
107
108 typedef typename Functor::result_type result_type;
109
110 static const result_type result = current_value;
111
112 };
113
114 //! helper struct to decide whether or not to perform the per-node calculation on the current node. Specialization: Perform the calculation.
115 template<typename Node, typename Functor, typename Reduction, typename Functor::result_type current_value, typename TreePath>
116 struct accumulate_node_helper<Node,Functor,Reduction,current_value,TreePath,true>
117 {
118
119 typedef typename Functor::result_type result_type;
120
121 static const result_type result = Reduction::template reduce<current_value,Functor::template visit<Node,TreePath>::result>::result;
122
123 };
124
125 //! Per node type algorithm struct. Prototype.
126 template<typename Tree, typename Functor, typename Reduction, typename ParentChildReduction, typename Functor::result_type current_value, typename TreePath, typename Tag>
127 struct accumulate_value;
128
129 //! Leaf node specialization.
130 template<typename LeafNode, typename Functor, typename Reduction, typename ParentChildReduction, typename Functor::result_type current_value, typename TreePath>
131 struct accumulate_value<LeafNode,Functor,Reduction,ParentChildReduction,current_value,TreePath,LeafNodeTag>
132 {
133
134 typedef typename Functor::result_type result_type;
135
136 static const result_type result =
137
138 accumulate_node_helper<LeafNode,Functor,Reduction,current_value,TreePath,Functor::template doVisit<LeafNode,TreePath>::value>::result;
139
140 };
141
142 //! Iteration over children of a composite node.
143 template<typename Node, typename Functor, typename Reduction, typename ParentChildReduction, typename Functor::result_type current_value, typename TreePath, std::size_t i, std::size_t n>
144 struct accumulate_over_children
145 {
146
147 typedef typename Functor::result_type result_type;
148
149 typedef decltype(push_back(TreePath{},index_constant<i>{})) child_tree_path;
150
151 typedef typename Node::template Child<i>::Type child;
152
153 static const result_type child_result = accumulate_value<child,Functor,Reduction,ParentChildReduction,current_value,child_tree_path,NodeTag<child>>::result;
154
155 static const result_type result = accumulate_over_children<Node,Functor,Reduction,ParentChildReduction,child_result,TreePath,i+1,n>::result;
156
157 };
158
159 //! end of iteration.
160 template<typename Node, typename Functor, typename Reduction, typename ParentChildReduction, typename Functor::result_type current_value, typename TreePath, std::size_t n>
161 struct accumulate_over_children<Node,Functor,Reduction,ParentChildReduction,current_value,TreePath,n,n>
162 {
163
164 typedef typename Functor::result_type result_type;
165
166 static const result_type result = current_value;
167
168 };
169
170 //! Generic composite node specialization. We are doing the calculation at compile time and thus have to use static iteration for
171 //! the PowerNode as well.
172 template<typename Node, typename Functor, typename Reduction, typename ParentChildReduction, typename Functor::result_type current_value, typename TreePath>
173 struct accumulate_value_generic_composite_node
174 {
175
176 typedef typename Functor::result_type result_type;
177
178 static const result_type child_result = accumulate_over_children<Node,Functor,Reduction,ParentChildReduction,current_value,TreePath,0,StaticDegree<Node>::value>::result;
179
180 static const result_type result =
181 accumulate_node_helper<Node,Functor,ParentChildReduction,child_result,TreePath,Functor::template doVisit<Node,TreePath>::value>::result;
182
183
184 };
185
186 //! PowerNode specialization.
187 template<typename PowerNode, typename Functor, typename Reduction, typename ParentChildReduction, typename Functor::result_type current_value, typename TreePath>
188 struct accumulate_value<PowerNode,Functor,Reduction,ParentChildReduction,current_value,TreePath,PowerNodeTag>
189 : public accumulate_value_generic_composite_node<PowerNode,Functor,Reduction,ParentChildReduction,current_value,TreePath>
190 {};
191
192 //! CompositeNode specialization.
193 template<typename CompositeNode, typename Functor, typename Reduction, typename ParentChildReduction, typename Functor::result_type current_value, typename TreePath>
194 struct accumulate_value<CompositeNode,Functor,Reduction,ParentChildReduction,current_value,TreePath,CompositeNodeTag>
195 : public accumulate_value_generic_composite_node<CompositeNode,Functor,Reduction,ParentChildReduction,current_value,TreePath>
196 {};
197
198 } // anonymous namespace
199
200 //! Statically accumulate a value over the nodes of a TypeTree.
201 /**
202 * This struct implements an algorithm for iterating over a tree and
203 * calculating an accumulated value at compile time.
204 *
205 * \tparam Tree The tree to iterate over.
206 * \tparam Functor The compile-time functor used for visiting each node.
207 *
208 * This functor must implement the following interface:
209 *
210 * \code
211 * struct AccumulationFunctor
212 * {
213 *
214 * // The result type of the overall computation.
215 * typedef ... result_type;
216 *
217 * // Decide whether to include the given node in the calculation
218 * // or to skip it.
219 * template<typename Node, typename TreePath>
220 * struct doVisit
221 * {
222 * static const bool value = true;
223 * };
224 *
225 * // Calculate the per-node result.
226 * template<typename Node, typename TreePath>
227 * struct visit
228 * {
229 * static const result_type result = ...;
230 * };
231 *
232 * };
233 * \endcode
234 *
235 * \tparam Reduction The reduction operator used to accumulate the per-node
236 * results.
237 *
238 * The reduction operator must implement the following interface:
239 *
240 * \code
241 * template<typename result_type>
242 * struct ReductionOperator
243 * {
244 *
245 * // combine two per-node results
246 * template<result_type r1, result_type r2>
247 * struct reduce
248 * {
249 * static const result_type result = ...;
250 * };
251 *
252 * };
253 * \endcode
254 *
255 * \tparam startValue The starting value fed into the initial accumulation step.
256 */
257 template<typename Tree, typename Functor, typename Reduction, typename Functor::result_type startValue, typename ParentChildReduction = Reduction>
258 struct AccumulateValue
259 {
260
261 //! The result type of the computation.
262 typedef typename Functor::result_type result_type;
263
264 //! The accumulated result of the computation.
265 static const result_type result = accumulate_value<Tree,Functor,Reduction,ParentChildReduction,startValue,HybridTreePath<>,NodeTag<Tree>>::result;
266
267 };
268
269 //! Tag selecting a type reduction algorithm that visits the tree in
270 //! postorder and performs a flat reduction over the resulting type list.
271 struct flattened_reduction;
272
273 //! Tag selecting a type reduction algorithm that visits the tree in
274 //! postorder and performs a bottom-up reduction over the resulting type list.
275 struct bottom_up_reduction;
276
277 namespace {
278
279 // implementation of the traversal algorithm
280
281 //! helper struct to decide whether or not to perform the per-node calculation on the current node. Default case: ignore the node.
282 //! The helper cannot use the Policy parameter, as we want to invoke it with different reductions.
283 template<typename Node, typename Functor, typename Reduction, typename current_type, typename TreePath, bool doVisit>
284 struct accumulate_type_node_helper
285 {
286
287 typedef current_type type;
288
289 };
290
291 //! helper struct to decide whether or not to perform the per-node calculation on the current node. Specialization: Perform the calculation.
292 template<typename Node, typename Functor, typename Reduction, typename current_type, typename TreePath>
293 struct accumulate_type_node_helper<Node,Functor,Reduction,current_type,TreePath,true>
294 {
295
296 typedef typename Reduction::template reduce<
297 current_type,
298 typename Functor::template visit<
299 Node,
300 TreePath
301 >::type
302 >::type type;
303
304 };
305
306 //! Per node type algorithm struct. Prototype.
307 template<typename Tree, typename Policy, typename current_type, typename TreePath, typename Tag>
308 struct accumulate_type;
309
310 //! Leaf node specialization.
311 template<typename LeafNode, typename Policy, typename current_type, typename TreePath>
312 struct accumulate_type<LeafNode,Policy,current_type,TreePath,LeafNodeTag>
313 {
314
315 typedef typename accumulate_type_node_helper<
316 LeafNode,
317 typename Policy::functor,
318 typename Policy::sibling_reduction,
319 current_type,
320 TreePath,
321 Policy::functor::template doVisit<
322 LeafNode,
323 TreePath>::value
324 >::type type;
325
326 };
327
328
329 //! Switch for propagation of current type down the tree based on the algorithm
330 //! specified in the policy.
331 template<typename current_type, typename tree_path, typename start_type, typename reduction_strategy>
332 struct propagate_type_down_tree;
333
334 //! Always continue reduction with the current result type
335 template<typename current_type, typename tree_path, typename start_type>
336 struct propagate_type_down_tree<
337 current_type,
338 tree_path,
339 start_type,
340 bottom_up_reduction
341 >
342 {
343 typedef current_type type;
344 };
345
346 //! When descending to a new node, do not propagate current result type
347 template<typename current_type, typename tree_path, typename start_type>
348 struct propagate_type_down_tree<
349 current_type,
350 tree_path,
351 start_type,
352 flattened_reduction
353 >
354 {
355 typedef typename std::conditional<
356 TreePathBack<tree_path>::value == 0,
357 start_type,
358 current_type
359 >::type type;
360 };
361
362
363 //! Iteration over children of a composite node.
364 template<typename Node, typename Policy, typename current_type, typename TreePath, std::size_t i, std::size_t n>
365 struct accumulate_type_over_children
366 {
367
368 typedef decltype(push_back(TreePath{},index_constant<i>{})) child_tree_path;
369
370 typedef typename Node::template Child<i>::Type child;
371
372 typedef typename accumulate_type<
373 child,
374 Policy,
375 // apply reduction choice (flat / hierarchic)
376 typename propagate_type_down_tree<
377 current_type,
378 child_tree_path,
379 typename Policy::start_type,
380 typename Policy::reduction_strategy
381 >::type,
382 child_tree_path,
383 NodeTag<child>
384 >::type child_result_type;
385
386 typedef typename accumulate_type_over_children<
387 Node,
388 Policy,
389 child_result_type,
390 TreePath,
391 i+1,
392 n
393 >::type type;
394
395 };
396
397 //! end of iteration.
398 template<typename Node, typename Policy, typename current_type, typename TreePath, std::size_t n>
399 struct accumulate_type_over_children<Node,Policy,current_type,TreePath,n,n>
400 {
401
402 typedef current_type type;
403
404 };
405
406
407 //! Generic composite node specialization. We are doing the calculation at compile time and thus have to use static iteration for
408 //! the PowerNode as well.
409 template<typename Node, typename Policy, typename current_type, typename TreePath>
410 struct accumulate_type_generic_composite_node
411 {
412
413 typedef typename accumulate_type_over_children<
414 Node,
415 Policy,
416 current_type,
417 TreePath,
418 0,
419 StaticDegree<Node>::value
420 >::type children_result_type;
421
422 typedef typename accumulate_type_node_helper<
423 Node,
424 typename Policy::functor,
425 typename Policy::parent_child_reduction,
426 children_result_type,
427 TreePath,
428 Policy::functor::template doVisit<
429 Node,
430 TreePath
431 >::value
432 >::type type;
433
434 };
435
436 //! PowerNode specialization.
437 template<typename PowerNode, typename Policy, typename current_type, typename TreePath>
438 struct accumulate_type<PowerNode,Policy,current_type,TreePath,PowerNodeTag>
439 : public accumulate_type_generic_composite_node<PowerNode,Policy,current_type,TreePath>
440 {};
441
442 //! CompositeNode specialization.
443 template<typename CompositeNode, typename Policy, typename current_type, typename TreePath>
444 struct accumulate_type<CompositeNode,Policy,current_type,TreePath,CompositeNodeTag>
445 : public accumulate_type_generic_composite_node<CompositeNode,Policy,current_type,TreePath>
446 {};
447
448 } // anonymous namespace
449
450
451 /**
452 * Policy for controlling the static type accumulation algorithm AccumulateType.
453 * See the documentation of nested types for further information.
454 *
455 *
456 * \tparam startType The start type fed into the initial accumulation step.
457 */
458 template<
459 typename Functor,
460 typename Reduction,
461 typename StartType,
462 typename ParentChildReduction = Reduction,
463 typename ReductionAlgorithm = flattened_reduction
464 >
465 struct TypeAccumulationPolicy
466 {
467
468 /**
469 * The compile-time functor used for visiting each node.
470 *
471 * This functor must implement the following interface:
472 *
473 * \code
474 * struct AccumulationFunctor
475 * {
476 *
477 * // Decide whether to include the given node in the calculation
478 * // or to skip it.
479 * template<typename Node, typename TreePath>
480 * struct doVisit
481 * {
482 * static const bool value = true;
483 * };
484 *
485 * // Calculate the per-node result.
486 * template<typename Node, typename TreePath>
487 * struct visit
488 * {
489 * typedef ... type;
490 * };
491 *
492 * };
493 * \endcode
494 */
495 typedef Functor functor;
496
497 /**
498 * The reduction operator used to accumulate the per-node results of sibling nodes.
499 *
500 * The reduction operator must implement the following interface:
501 *
502 * \code
503 * struct ReductionOperator
504 * {
505 *
506 * // combine two per-node results
507 * template<typename T1, typename T2>
508 * struct reduce
509 * {
510 * typedef ... type;
511 * };
512 *
513 * };
514 * \endcode
515 */
516 typedef Reduction sibling_reduction;
517
518 /**
519 * The reduction operator used to combine the accumulated result of all
520 * children of a node with the result of the parent node.
521 *
522 * This operator has the same interface as sibling_reduction.
523 */
524 typedef ParentChildReduction parent_child_reduction;
525
526 /**
527 * The initial result type.
528 * This type will be feed as first operand to the reduction operators
529 * when doing the first accumulation (and there is no calculated result
530 * to accumulate with yet).
531 */
532 typedef StartType start_type;
533
534 /**
535 * The strategy for performing the type reduction with regard to the tree structure.
536 * Valid values are flattened_reduction and bottom_up_reduction.
537 */
538 typedef ReductionAlgorithm reduction_strategy;
539 };
540
541
542 //! Statically accumulate a type over the nodes of a TypeTree.
543 /**
544 * This struct implements an algorithm for iterating over a tree and
545 * calculating an accumulated type at compile time.
546 *
547 * \tparam Tree The tree to iterate over.
548 * \tparam Policy Model of TypeAccumulationPolicy controlling the behavior
549 * of the algorithm.
550 */
551 template<typename Tree, typename Policy>
552 struct AccumulateType
553 {
554
555 //! The accumulated result of the computation.
556 typedef typename accumulate_type<
557 Tree,
558 Policy,
559 typename Policy::start_type,
560 HybridTreePath<>,
561 NodeTag<Tree>
562 >::type type;
563
564 };
565
566
567
568
569
570 /***************************************************/
571
572 namespace Experimental {
573 namespace Impl {
574
575 //! This is the specialization overload doing leaf traversal.
576 template<class T, class TreePath, class V, class U,
577 std::enable_if_t<std::decay_t<T>::isLeaf, int> = 0>
578 auto hybridApplyToTree(T&& tree, TreePath treePath, V&& visitor, U&& current_val)
579 {
580 return visitor.leaf(tree, treePath, std::forward<U>(current_val));
581 }
582
583 //! This is the general overload doing child traversal.
584 template<class T, class TreePath, class V, class U,
585 std::enable_if_t<not std::decay_t<T>::isLeaf, int> = 0>
586 auto hybridApplyToTree(T&& tree, TreePath treePath, V&& visitor, U&& current_val)
587 {
588 using Tree = std::remove_reference_t<T>;
589 using Visitor = std::remove_reference_t<V>;
590 auto pre_val = visitor.pre(tree, treePath, std::forward<U>(current_val));
591
592 // check which type of traversal is supported by the tree
593 using allowDynamicTraversal = Dune::Std::is_detected<Detail::DynamicTraversalConcept,Tree>;
594 using allowStaticTraversal = Dune::Std::is_detected<Detail::StaticTraversalConcept,Tree>;
595
596 // the tree must support either dynamic or static traversal
597 static_assert(allowDynamicTraversal::value || allowStaticTraversal::value);
598
599 // the visitor may specify preferred dynamic traversal
600 using preferDynamicTraversal = std::bool_constant<Visitor::treePathType == TreePathType::dynamic>;
601
602 // declare rule that applies visitor and current value to a child i. Returns next value
603 auto apply_i = [&](auto&& value, const auto& i){
604 auto&& child = tree.child(i);
605 using Child = std::decay_t<decltype(child)>;
606
607 auto val_before = visitor.beforeChild(tree, child, treePath, i, std::move(value));
608
609 // visits between children
610 auto val_in = Hybrid::ifElse(
611 Hybrid::equals(i,Indices::_0),
612 [&](auto id){return std::move(val_before);},
613 [&](auto id){return visitor.in(tree, treePath, std::move(val_before));}
614 );
615
616 constexpr bool visitChild = Visitor::template VisitChild<Tree,Child,TreePath>::value;
617 auto val_visit = [&](){
618 if constexpr (visitChild) {
619 auto childTreePath = Dune::TypeTree::push_back(treePath, i);
620 return hybridApplyToTree(child, childTreePath, visitor, std::move(val_in));
621 }
622 else
623 return std::move(val_in);
624 }();
625
626 return visitor.afterChild(tree, child, treePath, i, std::move(val_visit));
627 };
628
629 // apply visitor to children
630 auto in_val = [&](){
631 if constexpr (allowStaticTraversal::value && not preferDynamicTraversal::value) {
632 // get list of static indices
633 auto indices = std::make_index_sequence<Tree::degree()>{};
634
635 // unfold apply_i left to right
636 return unpackIntegerSequence([&](auto... i) {
637 /**
638 * This one-line is hard to digest:
639 * * We need to call apply_i(pre_val,i) and pipe the result
640 * into the next call of apply_i on the next i. Such result may
641 * have any type so a simple loop or `Hybrid::forEach` does not
642 * do the trick.
643 * * The left_fold function will apply the lambda consuming the
644 * first two arguments `r0=apply_i(pre_val,0)`, the result
645 * will be used as left argument of the next evaluation of the
646 * lambda `r1=apply_i(r0,1)` and so on.
647 * * In other words, it pipes the binary operator to the left and
648 * this line essentialy becomes `pre_val (apply_i) ... (apply_i) i`
649 * where the *binary operator* `apply_i` is unfolded from left
650 * to right according to c++17 fold expression rules.
651 * (we do not use c++17 fold expressions because gcc-7 fails to
652 * deduce the lambdas types here).
653 * * Additionally, the direction of the fold here is important
654 * because it will evaluate indices from 0 to (degree-1).
655 */
656 return left_fold(std::move(apply_i),std::move(pre_val), i...);
657 }, indices);
658
659 } else {
660 // unfold first child to get type
661 auto i_val = apply_i(std::move(pre_val),std::size_t{0});
662 // dynamically loop rest of the children to accumulate remindng values
663 for(std::size_t i = 1; i < tree.degree(); i++)
664 i_val = apply_i(i_val,i);
665 return i_val;
666 }
667 }();
668
669 return visitor.post(tree, treePath, in_val);
670 }
671
672 }
673
674 /**
675 * @brief Apply hybrid visitor to TypeTree.
676 * @details This function applies the given hybrid visitor to the
677 * tree in a depth-first traversal and returns the accumulated value.
678 * This method is able to accumulate/transform different types on both dynamic and
679 * static trees as long as they match on consecutive calls of the visitor.
680 * On calls between dynamic childen, the carried type should be
681 * consistent. On static children types may differ as long as they are expected
682 * on the next visited node.
683 *
684 * @note Tree may be const or non-const
685 * @note If the carried type is always the same, consider applyToTree
686 * which is less demanding on the compiler.
687 *
688 * @note The visitor must implement the interface laid out by DefaultHybridVisitor (most easily achieved by
689 * inheriting from it) and specify the required type of tree traversal preference (static or dynamic) by
690 * inheriting from either StaticTraversal or DynamicTraversal.
691 *
692 * @param tree The tree the visitor will be applied to.
693 * @param visitor The hybrid visitor to apply to the tree.
694 * @param init Initial value to pass to the visitor
695 * @return auto Result of the last call on the visitor
696 */
697 template<typename Tree, typename Visitor, typename Init>
698 auto hybridApplyToTree(Tree&& tree, Visitor&& visitor, Init&& init)
699 {
700 return Impl::hybridApplyToTree(tree, hybridTreePath(), visitor, init);
701 }
702
703 } // namespace Experimental
704
705 //! \} group Tree Traversal
706 } // namespace TypeTree
707 } //namespace Dune
708
709 #endif // DUNE_TYPETREE_ACCUMULATE_STATIC_HH