Heavy-Light Decomposition
Master tree heavy-light decomposition, path queries, and subtree operation optimization techniques
Overview
Heavy-light decomposition is a powerful technique for handling tree path queries and modifications. By decomposing trees into heavy chains and light edges, it transforms tree operations into sequence operations, supporting O(log²n) path queries, modifications, and LCA queries.
Core Content
1. Heavy-Light Decomposition Basics
// Heavy-light decomposition implementation
#include <iostream>
#include <vector>
using namespace std;
class HeavyLightDecomposition {
private:
int n, timer;
vector<vector<int>> adj;
vector<int> parent, depth, heavy, head, pos, subtreeSize;
// First DFS: calculate subtree size and heavy child
void dfs1(int v, int p, int d) {
parent[v] = p;
depth[v] = d;
subtreeSize[v] = 1;
int maxSize = 0;
for (int u : adj[v]) {
if (u != p) {
dfs1(u, v, d + 1);
subtreeSize[v] += subtreeSize[u];
if (subtreeSize[u] > maxSize) {
maxSize = subtreeSize[u];
heavy[v] = u;
}
}
}
}
// Second DFS: assign chain heads and positions
void dfs2(int v, int h) {
head[v] = h;
pos[v] = timer++;
// Traverse heavy child first
if (heavy[v] != -1) {
dfs2(heavy[v], h);
}
// Traverse light children, each starts new chain
for (int u : adj[v]) {
if (u != parent[v] && u != heavy[v]) {
dfs2(u, u);
}
}
}
public:
HeavyLightDecomposition(int size) : n(size), timer(0) {
adj.resize(n);
parent.resize(n);
depth.resize(n);
heavy.assign(n, -1);
head.resize(n);
pos.resize(n);
subtreeSize.resize(n);
}
void addEdge(int u, int v) {
adj[u].push_back(v);
adj[v].push_back(u);
}
void build(int root = 0) {
dfs1(root, -1, 0);
dfs2(root, root);
}
// Query all intervals on path
vector<pair<int, int>> getPathIntervals(int u, int v) {
vector<pair<int, int>> result;
while (head[u] != head[v]) {
if (depth[head[u]] > depth[head[v]]) {
result.push_back({pos[head[u]], pos[u]});
u = parent[head[u]];
} else {
result.push_back({pos[head[v]], pos[v]});
v = parent[head[v]];
}
}
// Handle part on same chain
if (pos[u] > pos[v]) swap(u, v);
result.push_back({pos[u], pos[v]});
return result;
}
// Query LCA
int lca(int u, int v) {
while (head[u] != head[v]) {
if (depth[head[u]] > depth[head[v]]) {
u = parent[head[u]];
} else {
v = parent[head[v]];
}
}
return depth[u] < depth[v] ? u : v;
}
// Get subtree interval
pair<int, int> getSubtreeInterval(int v) {
return {pos[v], pos[v] + subtreeSize[v] - 1};
}
// Get node position in sequence
int getPosition(int v) {
return pos[v];
}
// Get node depth
int getDepth(int v) {
return depth[v];
}
};
// Path queries with segment tree
class SegmentTree {
private:
vector<long long> tree, lazy;
int n;
void push(int node, int start, int end) {
if (lazy[node] != 0) {
tree[node] += lazy[node] * (end - start + 1);
if (start != end) {
lazy[2 * node] += lazy[node];
lazy[2 * node + 1] += lazy[node];
}
lazy[node] = 0;
}
}
void updateRange(int node, int start, int end, int l, int r, long long val) {
push(node, start, end);
if (start > r || end < l) return;
if (start >= l && end <= r) {
lazy[node] += val;
push(node, start, end);
return;
}
int mid = (start + end) / 2;
updateRange(2 * node, start, mid, l, r, val);
updateRange(2 * node + 1, mid + 1, end, l, r, val);
push(2 * node, start, mid);
push(2 * node + 1, mid + 1, end);
tree[node] = tree[2 * node] + tree[2 * node + 1];
}
long long queryRange(int node, int start, int end, int l, int r) {
if (start > r || end < l) return 0;
push(node, start, end);
if (start >= l && end <= r) return tree[node];
int mid = (start + end) / 2;
return queryRange(2 * node, start, mid, l, r) +
queryRange(2 * node + 1, mid + 1, end, l, r);
}
public:
SegmentTree(int size) : n(size) {
tree.resize(4 * n);
lazy.resize(4 * n);
}
void update(int l, int r, long long val) {
updateRange(1, 0, n - 1, l, r, val);
}
long long query(int l, int r) {
return queryRange(1, 0, n - 1, l, r);
}
};
// Tree path operations
class TreePathOperations {
private:
HeavyLightDecomposition hld;
SegmentTree seg;
public:
TreePathOperations(int n) : hld(n), seg(n) {}
void addEdge(int u, int v) {
hld.addEdge(u, v);
}
void build(int root = 0) {
hld.build(root);
}
// Path update
void updatePath(int u, int v, long long val) {
vector<pair<int, int>> intervals = hld.getPathIntervals(u, v);
for (auto [l, r] : intervals) {
seg.update(l, r, val);
}
}
// Path query
long long queryPath(int u, int v) {
long long result = 0;
vector<pair<int, int>> intervals = hld.getPathIntervals(u, v);
for (auto [l, r] : intervals) {
result += seg.query(l, r);
}
return result;
}
// Subtree update
void updateSubtree(int v, long long val) {
auto [l, r] = hld.getSubtreeInterval(v);
seg.update(l, r, val);
}
// Subtree query
long long querySubtree(int v) {
auto [l, r] = hld.getSubtreeInterval(v);
return seg.query(l, r);
}
// LCA query
int lca(int u, int v) {
return hld.lca(u, v);
}
};
int main() {
// Build test tree
TreePathOperations tpo(7);
// Add edges
tpo.addEdge(0, 1);
tpo.addEdge(0, 2);
tpo.addEdge(1, 3);
tpo.addEdge(1, 4);
tpo.addEdge(2, 5);
tpo.addEdge(2, 6);
tpo.build(0);
// Path update
tpo.updatePath(3, 5, 10);
cout << "Sum of path 3 to 5: "
<< tpo.queryPath(3, 5) << endl;
// Subtree operations
tpo.updateSubtree(1, 5);
cout << "Sum of subtree 1: "
<< tpo.querySubtree(1) << endl;
// LCA query
cout << "LCA of nodes 4 and 6: "
<< tpo.lca(4, 6) << endl;
return 0;
}Problem-Solving Tips
🎯 Decomposition Understanding
Understand heavy chains and light edges: heavy chains connect to children with largest subtrees, light edges connect to others.
⚡ Complexity Guarantee
Each node-to-root path crosses at most O(log n) light edges, guaranteeing logarithmic complexity of operations.
🔧 Implementation Points
Two DFS passes: first calculates heavy children, second assigns positions, combine with segment tree for efficient range operations.
🧮 Application Scenarios
Tree path queries/modifications, LCA queries, subtree operations - a universal technique for tree problems.