1
+ package com .thealgorithms .tree ;
2
+
3
+ /**
4
+ * Heavy-Light Decomposition (HLD) implementation in Java.
5
+ *
6
+ * HLD is used to efficiently handle path queries on trees, such as maximum, sum, or updates.
7
+ * It decomposes the tree into heavy and light chains, enabling queries in O(log N) time.
8
+ *
9
+ * Wikipedia Reference: https://en.wikipedia.org/wiki/Heavy-light_decomposition
10
+ *
11
+ * Author: Nithin U.
12
+ * Github: https://github.com/NithinU2802
13
+ *
14
+ */
15
+
16
+ import java .util .ArrayList ;
17
+ import java .util .List ;
18
+
19
+ public class HeavyLightDecomposition {
20
+ private List <Integer >[] tree ;
21
+ private int [] parent , depth , subtreeSize , chainHead , position , nodeValue ;
22
+ private int [] segmentTree ;
23
+ private int positionIndex ;
24
+
25
+ public int getPosition (int index ){
26
+ return position [index ];
27
+ }
28
+
29
+ public int getPositionIndex (){
30
+ return positionIndex ;
31
+ }
32
+
33
+ @ SuppressWarnings ("unchecked" )
34
+ public HeavyLightDecomposition (int n ) {
35
+ tree = new ArrayList [n + 1 ]; // Causes "unchecked or unsafe operations" warning
36
+ parent = new int [n + 1 ];
37
+ depth = new int [n + 1 ];
38
+ subtreeSize = new int [n + 1 ];
39
+ chainHead = new int [n + 1 ];
40
+ position = new int [n + 1 ];
41
+ nodeValue = new int [n + 1 ];
42
+ segmentTree = new int [4 * (n + 1 )];
43
+
44
+ for (int i = 0 ; i <= n ; i ++) {
45
+ tree [i ] = new ArrayList <>();
46
+ chainHead [i ] = -1 ;
47
+ }
48
+ positionIndex = 0 ;
49
+ }
50
+
51
+ /**
52
+ * Adds an edge to the tree.
53
+ */
54
+ public void addEdge (int u , int v ) {
55
+ tree [u ].add (v );
56
+ tree [v ].add (u );
57
+ }
58
+
59
+ /**
60
+ * First DFS to calculate subtree sizes and determine heavy children.
61
+ */
62
+ private void dfsSize (int node , int parentNode ) {
63
+ parent [node ] = parentNode ;
64
+ subtreeSize [node ] = 1 ;
65
+
66
+ for (int child : tree [node ]) {
67
+ if (child != parentNode ) {
68
+ depth [child ] = depth [node ] + 1 ;
69
+ dfsSize (child , node );
70
+ subtreeSize [node ] += subtreeSize [child ];
71
+ }
72
+ }
73
+ }
74
+
75
+ /**
76
+ * Second DFS to perform Heavy-Light Decomposition.
77
+ */
78
+ private void decompose (int node , int head ) {
79
+ chainHead [node ] = head ;
80
+ position [node ] = positionIndex ++;
81
+
82
+ int heavyChild = -1 , maxSubtreeSize = -1 ;
83
+
84
+ for (int child : tree [node ]) {
85
+ if (child != parent [node ] && subtreeSize [child ] > maxSubtreeSize ) {
86
+ heavyChild = child ;
87
+ maxSubtreeSize = subtreeSize [child ];
88
+ }
89
+ }
90
+
91
+ if (heavyChild != -1 ) {
92
+ decompose (heavyChild , head );
93
+ }
94
+
95
+ for (int child : tree [node ]) {
96
+ if (child != parent [node ] && child != heavyChild ) {
97
+ decompose (child , child );
98
+ }
99
+ }
100
+ }
101
+
102
+ /**
103
+ * Builds a Segment Tree to handle path queries efficiently.
104
+ */
105
+ private void buildSegmentTree (int node , int start , int end ) {
106
+ if (start == end ) {
107
+ segmentTree [node ] = nodeValue [start ];
108
+ return ;
109
+ }
110
+
111
+ int mid = (start + end ) / 2 ;
112
+ buildSegmentTree (2 * node , start , mid );
113
+ buildSegmentTree (2 * node + 1 , mid + 1 , end );
114
+
115
+ segmentTree [node ] = Math .max (segmentTree [2 * node ], segmentTree [2 * node + 1 ]);
116
+ }
117
+
118
+ /**
119
+ * Updates a node's value in the Segment Tree.
120
+ */
121
+ public void updateSegmentTree (int node , int start , int end , int index , int value ) {
122
+ if (start == end ) {
123
+ segmentTree [node ] = value ;
124
+ return ;
125
+ }
126
+
127
+ int mid = (start + end ) / 2 ;
128
+ if (index <= mid ) {
129
+ updateSegmentTree (2 * node , start , mid , index , value );
130
+ } else {
131
+ updateSegmentTree (2 * node + 1 , mid + 1 , end , index , value );
132
+ }
133
+
134
+ segmentTree [node ] = Math .max (segmentTree [2 * node ], segmentTree [2 * node + 1 ]);
135
+ }
136
+
137
+ /**
138
+ * Queries the Segment Tree for the maximum value in a given range.
139
+ */
140
+ public int querySegmentTree (int node , int start , int end , int left , int right ) {
141
+ if (left > end || right < start ) {
142
+ return Integer .MIN_VALUE ;
143
+ }
144
+
145
+ if (left <= start && end <= right ) {
146
+ return segmentTree [node ];
147
+ }
148
+
149
+ int mid = (start + end ) / 2 ;
150
+ int leftQuery = querySegmentTree (2 * node , start , mid , left , right );
151
+ int rightQuery = querySegmentTree (2 * node + 1 , mid + 1 , end , left , right );
152
+
153
+ return Math .max (leftQuery , rightQuery );
154
+ }
155
+
156
+ /**
157
+ * Queries the maximum value in the path from node u to node v.
158
+ */
159
+ public int queryMaxInPath (int u , int v ) {
160
+ int result = Integer .MIN_VALUE ;
161
+
162
+ while (chainHead [u ] != chainHead [v ]) {
163
+ if (depth [chainHead [u ]] < depth [chainHead [v ]]) {
164
+ int temp = u ;
165
+ u = v ;
166
+ v = temp ;
167
+ }
168
+
169
+ result = Math .max (result , querySegmentTree (1 , 0 , positionIndex - 1 , position [chainHead [u ]], position [u ]));
170
+ u = parent [chainHead [u ]];
171
+ }
172
+
173
+ if (depth [u ] > depth [v ]) {
174
+ int temp = u ;
175
+ u = v ;
176
+ v = temp ;
177
+ }
178
+
179
+ result = Math .max (result , querySegmentTree (1 , 0 , positionIndex - 1 , position [u ], position [v ]));
180
+ return result ;
181
+ }
182
+
183
+ /**
184
+ * Initializes the HLD structure and Segment Tree.
185
+ */
186
+ public void initialize (int root , int [] values ) {
187
+ dfsSize (root , -1 );
188
+ decompose (root , root );
189
+ for (int i = 0 ; i < values .length ; i ++) {
190
+ nodeValue [position [i ]] = values [i ];
191
+ }
192
+ buildSegmentTree (1 , 0 , positionIndex - 1 );
193
+ }
194
+
195
+ }
196
+
0 commit comments