1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
use std::sync::{
    atomic::{
        AtomicBool,
        Ordering,
    },
    Arc,
    Mutex,
    MutexGuard,
};

use freya_native_core::NodeId;
use itertools::sorted;
use rustc_hash::{
    FxHashMap,
    FxHashSet,
};
use torin::prelude::Area;

use crate::Layers;

#[derive(Clone, Default)]
pub struct CompositorDirtyNodes(Arc<Mutex<FxHashMap<NodeId, DirtyTarget>>>);

impl CompositorDirtyNodes {
    pub fn invalidate(&self, node_id: NodeId) {
        self.0.lock().unwrap().insert(node_id, DirtyTarget::Itself);
    }

    pub fn invalidate_with_target(&self, node_id: NodeId, target: DirtyTarget) {
        self.0.lock().unwrap().insert(node_id, target);
    }

    pub fn get(&self) -> MutexGuard<FxHashMap<NodeId, DirtyTarget>> {
        self.0.lock().unwrap()
    }
}

#[derive(Default)]
pub struct Compositor {
    full_render: Arc<AtomicBool>,
}

#[derive(Clone, Copy, PartialEq)]
pub enum DirtyTarget {
    Itself,
    ItselfAndNested,
}

impl Compositor {
    pub fn run(
        &self,
        dirty_nodes: &CompositorDirtyNodes,
        get_affected: impl Fn(NodeId, bool) -> Vec<NodeId>,
        get_area: impl Fn(NodeId) -> Option<Area>,
        layers: &Layers,
    ) -> (Layers, Option<Area>) {
        let mut dirty_nodes = dirty_nodes.get();
        let (mut invalidated_nodes, mut dirty_nodes) = {
            (
                FxHashSet::from_iter(dirty_nodes.keys().copied()),
                dirty_nodes.drain().collect::<Vec<(NodeId, DirtyTarget)>>(),
            )
        };

        // Mark children
        while let Some((node_id, target)) = dirty_nodes.pop() {
            // Mark this node as invalidated
            invalidated_nodes.insert(node_id);

            let traverse_children = target == DirtyTarget::ItselfAndNested;
            let affected = get_affected(node_id, traverse_children)
                .into_iter()
                .filter(|id| !invalidated_nodes.contains(id));

            // Continue searching in the affected nodes
            dirty_nodes.extend(
                affected
                    .into_iter()
                    .map(|id| (id, DirtyTarget::ItselfAndNested)),
            );
        }

        let rendering_layers = Layers::default();
        let mut dirty_area: Option<Area> = None;

        let full_render = self.full_render.load(Ordering::Relaxed);

        let mut run_check = |layer: i16, nodes: &[NodeId]| {
            for node_id in nodes {
                let Some(area) = get_area(*node_id) else {
                    continue;
                };
                let is_invalidated = full_render || invalidated_nodes.contains(node_id);
                let is_area_invalidated = dirty_area
                    .map(|dirty_area| dirty_area.intersects(&area))
                    .unwrap_or_default();

                if is_invalidated || is_area_invalidated {
                    rendering_layers.insert_node_in_layer(*node_id, layer);
                    if is_invalidated {
                        if let Some(dirty_area) = &mut dirty_area {
                            *dirty_area = dirty_area.union(&area);
                        } else {
                            dirty_area = Some(area)
                        }
                    }
                }
            }
        };

        // From bottom to top
        for (layer, nodes) in sorted(layers.layers().iter()) {
            run_check(*layer, nodes);
        }

        // From top to bottom
        for (layer, nodes) in sorted(layers.layers().iter()).rev() {
            run_check(*layer, nodes);
        }

        self.full_render.store(false, Ordering::Relaxed);

        (rendering_layers, dirty_area)
    }

    pub fn reset(&self) {
        self.full_render.store(true, Ordering::Relaxed)
    }
}