import sys
import threading
def main():
import sys
data = sys.stdin
n = int(data.readline())
adj = [[] for _ in range(n+1)]
for _ in range(n-1):
u,v = map(int, data.readline().split())
adj[u].append(v)
adj[v].append(u)
# dp1: max depth downwards, count1: number of leaves at that depth
dp1 = [0]*(n+1)
count1 = [0]*(n+1)
parent = [0]*(n+1)
# iterative post-order DFS for dp1
stack = [(1, 0, 0)]
while stack:
u, p, state = stack.pop()
if state == 0:
parent[u] = p
stack.append((u, p, 1))
for v in adj[u]:
if v == p: continue
stack.append((v, u, 0))
else:
# compute dp1[u], count1[u]
maxd = 0
cnt = 1 # default leaf count
first = True
for v in adj[u]:
if v == p: continue
d = dp1[v] + 1
if first:
maxd = d
cnt = count1[v]
first = False
else:
if d > maxd:
maxd = d
cnt = count1[v]
elif d == maxd:
cnt += count1[v]
dp1[u] = maxd
count1[u] = cnt if not first else 1
# dp2: max depth upwards (outside subtree), count2: count leaves
dp2 = [0]*(n+1)
count2 = [0]*(n+1)
dp2[1] = 0
count2[1] = 1
# iterative pre-order for dp2
stack = [1]
while stack:
u = stack.pop()
p = parent[u]
L = adj[u]
m = len(L)
# build d array and leaf counts for neighbors
# we need for all neighbors to compute prefix/suffix
if m > 1:
d = [0]*m
cnt_arr = [0]*m
# fill d and cnt_arr
for i, w in enumerate(L):
if w == p:
d[i] = dp2[u]
cnt_arr[i] = count2[u]
else:
d[i] = dp1[w] + 1
cnt_arr[i] = count1[w]
# build prefix maxima
prefix_d = [0]*m
prefix_c = [0]*m
prefix_d[0] = d[0]
prefix_c[0] = cnt_arr[0]
for i in range(1, m):
if d[i] > prefix_d[i-1]:
prefix_d[i] = d[i]
prefix_c[i] = cnt_arr[i]
elif d[i] == prefix_d[i-1]:
prefix_d[i] = prefix_d[i-1]
prefix_c[i] = prefix_c[i-1] + cnt_arr[i]
else:
prefix_d[i] = prefix_d[i-1]
prefix_c[i] = prefix_c[i-1]
# build suffix maxima
suffix_d = [0]*m
suffix_c = [0]*m
suffix_d[m-1] = d[m-1]
suffix_c[m-1] = cnt_arr[m-1]
for i in range(m-2, -1, -1):
if d[i] > suffix_d[i+1]:
suffix_d[i] = d[i]
suffix_c[i] = cnt_arr[i]
elif d[i] == suffix_d[i+1]:
suffix_d[i] = suffix_d[i+1]
suffix_c[i] = suffix_c[i+1] + cnt_arr[i]
else:
suffix_d[i] = suffix_d[i+1]
suffix_c[i] = suffix_c[i+1]
# now compute dp2 for children
for i, w in enumerate(L):
if w == p: continue
# exclude index i
if i == 0:
best_d = suffix_d[1]
best_c = suffix_c[1]
elif i == m-1:
best_d = prefix_d[m-2]
best_c = prefix_c[m-2]
else:
ld = prefix_d[i-1]
lc = prefix_c[i-1]
rd = suffix_d[i+1]
rc = suffix_c[i+1]
if ld > rd:
best_d = ld; best_c = lc
elif rd > ld:
best_d = rd; best_c = rc
else:
best_d = ld; best_c = lc + rc
# if only neighbor is w, base case, but here m>1 so ok
dp2[w] = best_d + 1
count2[w] = best_c
stack.append(w)
else:
# only one neighbor; that neighbor if not parent => dp2[child]=1
for w in L:
if w == p: continue
dp2[w] = 1
count2[w] = 1
stack.append(w)
# compute max difficulty f_max and collect oriented candidates
f_max = 0
oriented = [] # list of (u,v,D,H)
# iterate through nodes
for u in range(1, n+1):
# need at least 3 neighbors to exclude one and pick two
if len(adj[u]) < 3:
continue
# find top3 neighbor entries by depth
ent3 = [] # list of (depth, neighbor)
pu = parent[u]
# manual top3 selection
for w in adj[u]:
# compute depth[u][w]
if w == pu:
dv = dp2[u]
else:
dv = dp1[w] + 1
if len(ent3) < 3:
ent3.append((dv, w))
if len(ent3) == 3:
# sort descending
ent3.sort(reverse=True)
else:
# compare with smallest in ent3 (which is ent3[2] after sorted)
# ensure ent3 sorted descending
if ent3[2][0] < dv:
# replace and resort
ent3[2] = (dv, w)
# bubble up
if ent3[2][0] > ent3[1][0]:
ent3[1], ent3[2] = ent3[2], ent3[1]
if ent3[1][0] > ent3[0][0]:
ent3[0], ent3[1] = ent3[1], ent3[0]
if len(ent3) < 3:
continue
# ensure ent3 sorted descending
# ent3 sorted descending by first element
ent3.sort(reverse=True)
d0, v0 = ent3[0]
d1, v1 = ent3[1]
d2, v2 = ent3[2]
# now for each neighbor v exclude and compute f_candidate
for v in adj[u]:
# compute H_candidate = depth[u][v]
if v == pu:
H = dp2[u]
else:
H = dp1[v] + 1
# compute best two depths excluding v
# d_first
if v == v0:
bd1 = d1
else:
bd1 = d0
# d_second
if v != v0 and v != v1:
bd2 = d1
else:
# v == v0 or v == v1 => skip these
if v == v1:
bd2 = d2
else:
# v == v0
bd2 = d2
D = bd1 + bd2
# ensure valid two
if bd2 < 0:
continue
f = H * D
if f > f_max:
f_max = f
oriented = [(u, v, D, H)]
elif f == f_max:
oriented.append((u, v, D, H))
# if no oriented candidates, only one path in chain
if not oriented:
print(0, 1)
return
# group oriented by u
by_u = {}
for u, v, D, H in oriented:
if u not in by_u:
by_u[u] = {'vs': [], 'D': D, 'H': H}
# same D,H for u assumed
by_u[u]['vs'].append(v)
total_count = 0
# for each u group, compute unique P_count
for u, info in by_u.items():
v_list = info['vs']
k = len(v_list)
# sum oriented P counts
sum_pc = 0
pu = parent[u]
for v in v_list:
# scan neighbors excluding v to compute P_count[u][v]
best1_d = -1
best1_branch = 0
best1_sum = 0
best1_sq = 0
best2_d = -1
best2_sum = 0
for w in adj[u]:
if w == v:
continue
# depth from u via w
if w == pu:
dv = dp2[u]
cv = count2[u]
else:
dv = dp1[w] + 1
cv = count1[w]
if dv > best1_d:
# shift to second
best2_d = best1_d
best2_sum = best1_sum
# new best1
best1_d = dv
best1_branch = 1
best1_sum = cv
best1_sq = cv*cv
elif dv == best1_d:
best1_branch += 1
best1_sum += cv
best1_sq += cv*cv
elif dv > best2_d:
best2_d = dv
best2_sum = cv
elif dv == best2_d:
best2_sum += cv
# compute P_count
if best1_branch >= 2:
# combinations across different branches
pc = (best1_sum*best1_sum - best1_sq) // 2
else:
# only one best branch, pair with second
pc = best1_sum * best2_sum
sum_pc += pc
# unique count for this u
# each P counted (k-2) times
if k > 2:
unique = sum_pc // (k - 2)
else:
unique = 0
total_count += unique
print(f_max, total_count)
if __name__ == "__main__":
main()
if 'threading' in globals():
threading.Thread(target=main).start()
Compilation message (stdout)
Compiling 'road.py'...
=======
adding: __main__.pyc (deflated 45%)
=======
# | Verdict | Execution time | Memory | Grader output |
---|
Fetching results... |
# | Verdict | Execution time | Memory | Grader output |
---|
Fetching results... |
# | Verdict | Execution time | Memory | Grader output |
---|
Fetching results... |