Submission #1167036

#TimeUsernameProblemLanguageResultExecution timeMemory
1167036thdh__New Home (APIO18_new_home)C++20
0 / 100
5098 ms142128 KiB
#include <bits/stdc++.h> using namespace std; // We will compress all coordinates (store locations, query locations, // plus sentinel -2e8 and +2e8). Then build a segment tree on [0..m-1]. // // Each update is for an interval (prv, nxt) belonging to some store type. // The code "places" that interval in exactly one leaf, determined by // comparing (prv + nxt)/2 to the node's 'midCoord'. At the leaf, we insert // (prv, nxt) into two multisets: one tracking all "prv"s, and one tracking // all "nxt"s. Then minv = smallest prv in that leaf, maxv = largest nxt. // // For queries, we do the same approach as original: // answer = max( l - queryMin(l, +∞), // queryMax(-∞, l) - l ) // If not all k store types are open, answer = -1. // // This replicates the "bumbaclot" logic but uses an array-based segtree // with coordinate compression. static const long long NEG_INF = -200000001; // slightly < -2e8 static const long long POS_INF = 200000001; // slightly > 2e8 // ----------------------------------------------------------- // Segment tree node struct Node { // index of children in seg[] array int leftChild = 0, rightChild = 0; // each internal node covers [start..end) in compressed indices // we store a "midCoord" = (vals[start] + vals[end-1]) / 2 // used to decide whether an interval's midpoint belongs left or right long long midCoord = 0; // If leaf, we store a multiset of "prv" and a multiset of "nxt". // Then minv = minimum of all prv in this leaf, // maxv = maximum of all nxt in this leaf. multiset<long long> mins; multiset<long long> maxs; // Aggregates from children: long long minv = +LLONG_MAX; long long maxv = -LLONG_MIN; // We'll set isLeaf if (end - start) == 1 bool isLeaf = false; }; // We'll have up to 4*m nodes if m is the number of distinct coordinates. static const int MAXN = 800000; Node seg[MAXN]; // The segment tree array int segAlloc = 1; // root = 1 // We'll store the compressed coords in vals[]. vector<long long> vals; // ----------------------------------------------------------- // Build the segment tree in array form // idx = current node index in seg[] // range [start..end) of the compressed array void buildTree(int idx, int start, int end) { if (end - start == 1) { // Leaf seg[idx].isLeaf = true; seg[idx].leftChild = seg[idx].rightChild = 0; seg[idx].mins.clear(); seg[idx].maxs.clear(); seg[idx].minv = LLONG_MAX; seg[idx].maxv = -LLONG_MAX; // midCoord not used in leaf return; } seg[idx].isLeaf = false; int mid = (start + end) >> 1; seg[idx].leftChild = ++segAlloc; buildTree(seg[idx].leftChild, start, mid); seg[idx].rightChild = ++segAlloc; buildTree(seg[idx].rightChild, mid, end); // midCoord is the midpoint in the original coordinate space: // (vals[start] + vals[end-1]) / 2 long long leftVal = vals[start], rightVal = vals[end - 1]; seg[idx].midCoord = (leftVal + rightVal) >> 1; // minv, maxv aggregated from children seg[idx].minv = min(seg[seg[idx].leftChild].minv, seg[seg[idx].rightChild].minv); seg[idx].maxv = max(seg[seg[idx].leftChild].maxv, seg[seg[idx].rightChild].maxv); } // ----------------------------------------------------------- // Insert or erase an interval (prv, nxt) at node idx covering [start..end). // We'll decide left or right by comparing midpoint = (prv + nxt)/2 // to seg[idx].midCoord. // // If it's a leaf, we insert/erase (prv,nxt) from that leaf's multisets. void update(int idx, int start, int end, long long prv, long long nxt, bool add) { if (seg[idx].isLeaf) { // Insert or erase from the leaf if (add) { seg[idx].mins.insert(prv); seg[idx].maxs.insert(nxt); } else { // guaranteed to exist, per problem logic seg[idx].mins.erase(seg[idx].mins.find(prv)); seg[idx].maxs.erase(seg[idx].maxs.find(nxt)); } // Recompute minv, maxv in this leaf if (seg[idx].mins.empty()) { seg[idx].minv = LLONG_MAX; seg[idx].maxv = -LLONG_MAX; } else { seg[idx].minv = *seg[idx].mins.begin(); seg[idx].maxv = *seg[idx].maxs.rbegin(); } return; } // Not a leaf => pick child long long midVal = seg[idx].midCoord; long long midPoint = (prv + nxt) >> 1; // integer division if (midPoint <= midVal) { // go left update(seg[idx].leftChild, start, (start+end)/2, prv, nxt, add); } else { // go right update(seg[idx].rightChild, (start+end)/2, end, prv, nxt, add); } // Recompute this node's minv, maxv from children seg[idx].minv = min(seg[seg[idx].leftChild].minv, seg[seg[idx].rightChild].minv); seg[idx].maxv = max(seg[seg[idx].leftChild].maxv, seg[seg[idx].rightChild].maxv); } // ----------------------------------------------------------- // Query the minimum "prv" in the subrange [L, R] of original coords // We'll pass the node range in compressed indices [start..end). // We'll skip nodes that do not intersect [L, R]. long long queryMin(int idx, int start, int end, long long L, long long R) { // If [vals[start], vals[end-1]] is entirely disjoint from [L,R], skip if (vals[end-1] < L || vals[start] > R) { return LLONG_MAX; } // If [vals[start], vals[end-1]] is fully inside [L,R], take this node's minv if (L <= vals[start] && vals[end-1] <= R) { return seg[idx].minv; } // If leaf, partially overlapping => just return its minv if (seg[idx].isLeaf) { return seg[idx].minv; } // Otherwise, split int mid = (start + end) >> 1; long long leftAns = queryMin(seg[idx].leftChild, start, mid, L, R); long long rightAns = queryMin(seg[idx].rightChild, mid, end, L, R); return min(leftAns, rightAns); } // ----------------------------------------------------------- // Query the maximum "nxt" in the subrange [L, R] of original coords long long queryMax(int idx, int start, int end, long long L, long long R) { if (vals[end-1] < L || vals[start] > R) { return -LLONG_MAX; } if (L <= vals[start] && vals[end-1] <= R) { return seg[idx].maxv; } if (seg[idx].isLeaf) { return seg[idx].maxv; } int mid = (start + end) >> 1; long long leftAns = queryMax(seg[idx].leftChild, start, mid, L, R); long long rightAns = queryMax(seg[idx].rightChild, mid, end, L, R); return max(leftAns, rightAns); } // ----------------------------------------------------------- int main(){ ios::sync_with_stdio(false); cin.tie(nullptr); int n, k, q; cin >> n >> k >> q; // We'll collect all events: open(0), close(1), query(2). // Also collect coordinates for compression: store locations, query locations. vector<array<long long,4>> events; events.reserve(n*2 + q); vals.reserve(n*2 + q + 4); // We also want sentinel -2e8, +2e8: vals.push_back(NEG_INF); vals.push_back(POS_INF); // read store info for (int i=0; i<n; i++){ long long x, t, a, b; cin >> x >> t >> a >> b; t--; // zero-based type // open at time=a => event (a,0,x,t) events.push_back({a, 0, x, t}); // close at time=b+1 => event (b+1,1,x,t) events.push_back({b+1,1, x, t}); // for compression vals.push_back(x); } vector<long long> ans(q, 0); // read queries for (int i=0; i<q; i++){ long long l, y; cin >> l >> y; // event => (y,2,l,i) events.push_back({y,2,l,i}); vals.push_back(l); } // sort events by time, then by type sort(events.begin(), events.end(), [](auto &A, auto &B){ if (A[0] != B[0]) return A[0] < B[0]; return A[1] < B[1]; }); // coordinate compress sort(vals.begin(), vals.end()); vals.erase(unique(vals.begin(), vals.end()), vals.end()); int m = (int)vals.size(); // build segment tree on [0..m) buildTree(1, 0, m); // track each type's open store locations in a balanced tree vector< multiset<long long> > storeOfType(k); vector<int> openCount(k, 0); int countTypesWithOpen = 0; // We also put sentinel MIN, MAX in each type's set from the start, // so that intervals (MIN, MAX) exist. But initially we do NOT add // that interval to the segment tree unless the type actually opens a store. // The original code does: s[t] = {MIN, MAX}. // Then the segment tree is updated with (MIN,MAX) once the type "exists." for(int t=0; t<k; t++){ storeOfType[t].insert(NEG_INF); storeOfType[t].insert(POS_INF); } // A small helper function to add or remove the interval (prv,nxt) auto doUpdate = [&](long long prv, long long nxt, bool add){ update(1, 0, m, prv, nxt, add); }; for (auto &e : events) { long long time = e[0]; int etype = (int)e[1]; long long xOrL = e[2]; int tOrQ = (int)e[3]; if (etype == 0) { // open store of type t int t = tOrQ; auto &S = storeOfType[t]; // find position of xOrL in that set // neighbors => prv < x < nxt auto it = S.lower_bound(xOrL); // 'it' is the first element >= xOrL long long nxt = *it; long long prv = *prev(it); // remove (prv, nxt) doUpdate(prv, nxt, false); // add (prv, xOrL) and (xOrL, nxt) doUpdate(prv, xOrL, true); doUpdate(xOrL, nxt, true); S.insert(xOrL); // if first real store for type t => openCount[t] was 0 => increment if (openCount[t] == 0) { countTypesWithOpen++; } openCount[t]++; } else if (etype == 1) { // close store int t = tOrQ; auto &S = storeOfType[t]; // find xOrL in S auto it = S.find(xOrL); // neighbors => (prv, x), (x, nxt) exist, remove them, add (prv,nxt) long long prv = *prev(it); long long nxt = *next(it); doUpdate(prv, xOrL, false); doUpdate(xOrL, nxt, false); doUpdate(prv, nxt, true); S.erase(it); openCount[t]--; if (openCount[t] == 0) { countTypesWithOpen--; } } else { // query int qidx = tOrQ; // check if all k types are open if (countTypesWithOpen < k) { ans[qidx] = -1; } else { long long l = xOrL; // get min boundary in [l, +∞) long long mn = queryMin(1, 0, m, l, POS_INF); // get max boundary in [-∞, l] long long mx = queryMax(1, 0, m, NEG_INF, l); // answer = max( l - mn, mx - l ) long long res = max(l - mn, mx - l); ans[qidx] = res; } } } // print answers for (auto &val : ans) { cout << val << "\n"; } return 0; }

Compilation message (stderr)

new_home.cpp:42:22: warning: integer overflow in expression of type 'long long int' results in '-9223372036854775808' [-Woverflow]
   42 |     long long maxv = -LLONG_MIN;
      |                      ^
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...