Submission #1311587

#TimeUsernameProblemLanguageResultExecution timeMemory
1311587eyangchFeast (NOI19_feast)Pypy 3
100 / 100
361 ms106340 KiB
import sys

input = sys.stdin.readline

N, K = map(int, input().split())
A = list(map(int, input().split()))

def solve_opt(l):
    dp = [tuple()] * 2  # (score, -num_subarr)
    dp[0] = (0, 0)
    dp[1] = (A[0] - l, -1)
    for i in range(1, N):
        tmp0 = max(dp[0], dp[1])
        tmp1 = max(
            (dp[1][0] + A[i], dp[1][1]),
            (dp[0][0] + A[i] - l, dp[0][1] - 1)
        )
        dp[0] = tmp0
        dp[1] = tmp1
    # end up with max score, with min number of subarrays used for that score
    return max(dp)

lo = 0
hi = int(3e14)
while lo < hi:
    mid = (lo + hi) // 2
    res = solve_opt(mid)
    k_prime = -res[1]
    if k_prime > K:
        lo = mid+1
    else:
        hi = mid

# lo is now optimal lambda
print(solve_opt(lo)[0] + K * lo)    

Compilation message (stdout)

Compiling 'feast.py'...

=======
  adding: __main__.pyc (deflated 34%)

=======
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...
#Verdict Execution timeMemoryGrader output
Fetching results...