#include <bits/stdc++.h>
using namespace std;
// -------------------- Data Structure -------------------------
// For a given distance d (from some “root”) we store:
// cnt = number of nodes at that distance,
// cand = if cnt==1 then the candidate speciality from that unique node; otherwise, cand = -1.
struct PairEntry {
int cnt;
int cand;
};
// Combine two PairEntry values from the same distance slot.
inline PairEntry combinePair(const PairEntry &a, const PairEntry &b) {
PairEntry res;
res.cnt = a.cnt + b.cnt;
if(res.cnt == 1) {
// Exactly one nonzero contribution.
res.cand = (a.cnt == 1 ? a.cand : b.cand);
} else {
res.cand = -1;
}
return res;
}
// Given two distributions A and B (vectors indexed by distance),
// return their element–wise combination.
// (If one vector is shorter, treat the missing slots as {0, -1}.)
vector<PairEntry> combineDist(const vector<PairEntry>& A, const vector<PairEntry>& B) {
int n = max(A.size(), B.size());
vector<PairEntry> res(n, {0, -1});
for (int i = 0; i < n; i++){
PairEntry a = (i < A.size() ? A[i] : PairEntry{0, -1});
PairEntry b = (i < B.size() ? B[i] : PairEntry{0, -1});
res[i] = combinePair(a, b);
}
return res;
}
// When “merging” the distribution from a child into the parent’s distribution,
// we must shift the child’s distribution by 1 (since the distance increases by 1).
void mergeShift(vector<PairEntry>& base, const vector<PairEntry>& child) {
int shift = 1;
int newSize = max((int)base.size(), (int)child.size() + shift);
if(base.size() < newSize)
base.resize(newSize, {0, -1});
for (int i = 0; i < (int)child.size(); i++){
int pos = i + shift;
base[pos] = combinePair(base[pos], child[i]);
}
}
// --------------------- Global Variables ---------------------
int N, M;
vector<int> C; // Speciality for each city (0-indexed)
vector<vector<int>> adj; // Tree: adj[u] holds neighbors of u.
// dp[x] will store the distribution for the subtree rooted at x.
// In dp[x], index d (d>=0) represents: in the subtree of x, there are dp[x][d].cnt nodes at distance d (from x)
// and if dp[x][d].cnt==1, then dp[x][d].cand is that unique node’s speciality.
vector<vector<PairEntry>> dp;
// ans[x] will be the answer for city x.
vector<int> ans;
// --------------------- DFS 1: Down–DFS -------------------------
// Compute dp[x] for the subtree rooted at x.
// Initially, dp[x][0] = {1, C[x]} (city x itself).
// Then for every child, we shift its dp and merge it into dp[x].
void dfs_down(int x, int parent) {
dp[x].clear();
dp[x].push_back({1, C[x]}); // distance 0: city x.
for (int v : adj[x]) {
if(v == parent) continue;
dfs_down(v, x);
mergeShift(dp[x], dp[v]);
}
}
// --------------------- DFS 2: Up–DFS (Re–rooting) -------------------------
// For node x, given an "up" distribution (from nodes not in x’s subtree),
// we compute the full distribution F[x] = combineDist(up, dp[x]).
// Then we count the number of distinct specialities coming from unique layers (d>=1).
// Afterwards, using a prefix–suffix technique we compute an up–distribution for each child.
void dfs_up(int x, int parent, const vector<PairEntry>& up) {
// Full distribution for x = parent's up + dp[x].
vector<PairEntry> full = combineDist(up, dp[x]);
// For every distance d>=1, if full[d].cnt==1 then that unique node (at distance d) provides its speciality.
// Count distinct specialities (if the same speciality appears in two layers, count it once).
unordered_set<int> uniqSpecs;
for (int d = 1; d < (int)full.size(); d++){
if(full[d].cnt == 1)
uniqSpecs.insert(full[d].cand);
}
ans[x] = uniqSpecs.size();
// Prepare the list of children (neighbors except parent)
vector<int> children;
for (int v : adj[x])
if(v != parent)
children.push_back(v);
int k = children.size();
// Build prefix and suffix arrays for the contributions from x’s children.
// For each child v, we consider its branch distribution: dp[v] shifted by 1.
vector< vector<PairEntry> > pref(k+1), suf(k+1);
pref[0].clear();
for (int i = 0; i < k; i++){
int v = children[i];
vector<PairEntry> branch;
if(dp[v].empty()){
branch.clear();
} else {
branch.resize(dp[v].size() + 1, {0, -1});
for (int j = 0; j < (int)dp[v].size(); j++){
branch[j+1] = dp[v][j];
}
}
pref[i+1] = combineDist(pref[i], branch);
}
suf[k].clear();
for (int i = k-1; i >= 0; i--){
int v = children[i];
vector<PairEntry> branch;
if(dp[v].empty()){
branch.clear();
} else {
branch.resize(dp[v].size() + 1, {0, -1});
for (int j = 0; j < (int)dp[v].size(); j++){
branch[j+1] = dp[v][j];
}
}
suf[i] = combineDist(branch, suf[i+1]);
}
// For each child, compute its up distribution and DFS.
for (int i = 0; i < k; i++){
int v = children[i];
// x’s own contribution (as a distribution: one node at distance 0).
vector<PairEntry> baseVec;
baseVec.push_back({1, C[x]});
// Siblings’ contribution: merge contributions from children before and after v.
vector<PairEntry> siblings = combineDist(pref[i], suf[i+1]);
// R = (x itself) + (siblings).
vector<PairEntry> R = combineDist(baseVec, siblings);
// Combine with parent's up–distribution.
vector<PairEntry> newUp = combineDist(up, R);
// Now shift newUp by 1 so that distances are measured from child v.
vector<PairEntry> shiftedNewUp;
if(newUp.empty()){
shiftedNewUp.clear();
} else {
shiftedNewUp.resize(newUp.size() + 1, {0, -1});
for (int j = 0; j < (int)newUp.size(); j++){
shiftedNewUp[j+1] = newUp[j];
}
}
dfs_up(v, x, shiftedNewUp);
}
}
// ------------------------- Main -------------------------
int main(){
ios::sync_with_stdio(false);
cin.tie(nullptr);
cin >> N >> M;
adj.assign(N, vector<int>());
dp.assign(N, vector<PairEntry>());
ans.assign(N, 0);
C.resize(N);
// Read the N-1 roads.
for (int i = 0; i < N-1; i++){
int u, v;
cin >> u >> v;
u--; v--; // convert to 0-indexed.
adj[u].push_back(v);
adj[v].push_back(u);
}
// Read the speciality for each city.
for (int i = 0; i < N; i++){
cin >> C[i];
}
// First DFS: compute the dp distribution for each subtree.
dfs_down(0, -1);
// Second DFS: re–rooting.
// For the root, there is no parent's up–distribution.
vector<PairEntry> up;
up.clear();
dfs_up(0, -1, up);
// Output the answer for each city.
for (int i = 0; i < N; i++){
cout << ans[i] << "\n";
}
return 0;
}
# | Verdict | Execution time | Memory | Grader output |
---|
Fetching results... |
# | Verdict | Execution time | Memory | Grader output |
---|
Fetching results... |
# | Verdict | Execution time | Memory | Grader output |
---|
Fetching results... |
# | Verdict | Execution time | Memory | Grader output |
---|
Fetching results... |