Given an integer array nums and an integer k, return the maximum sum of a non-empty subsequence of that array such that for every two consecutive integers in the subsequence, nums[i] and nums[j], where i < j, the condition j - i <= k is satisfied.
A subsequence of an array is obtained by deleting some number of elements (can be zero) from the array, leaving the remaining elements in their original order.
Example 1:
Input: nums = [10,2,-10,5,20], k = 2 Output: 37 Explanation: The subsequence is [10, 2, 5, 20].
Example 2:
Input: nums = [-1,-2,-3], k = 1 Output: -1 Explanation: The subsequence must be non-empty, so we choose the largest number.
Example 3:
Input: nums = [10,-2,-10,-5,20], k = 2 Output: 23 Explanation: The subsequence is [10, -2, -5, 20].
Constraints:
1 <= k <= nums.length <= 10^5-10^4 <= nums[i] <= 10^4
Related Topics:
Dynamic Programming
If all the elements in A are zero or negative, the result is just the maximum value.
Otherwise, let dp[i] be the maximum result I can get on the subarray A[0..i] if I pick A[i].
The result is max(dp[i] | 0 <= i < N).
For dp[i], since we picked A[i], the previous pick must be one of A[i-k], A[i-k+1], ..., A[i-1].
So dp[i] = A[i] + max( dp[j] | i-k <= j < i ).
Here is a TLE version.
// OJ: https://leetcode.com/problems/constrained-subsequence-sum/
// Author: github.com/lzl124631x
// Time: O(NK)
// Space: O(N)
// NOTE: this will get TLE
class Solution {
public:
int constrainedSubsetSum(vector<int>& A, int k) {
int maxVal = *max_element(begin(A), end(A));
if (maxVal <= 0) return maxVal;
int N = A.size(), ans = 0;
vector<int> dp(N);
for (int i = 0; i < N; ++i) {
int mx = 0;
for (int j = 1; j <= k; ++j) {
if (i - j < 0) break;
mx = max(mx, dp[i - j]);
}
dp[i] = mx + A[i];
ans = max(ans, dp[i]);
}
return ans;
}
};One optimization is that we can break the j loop once A[i - j] > 0, because further extending j won't give us better result.
// OJ: https://leetcode.com/problems/constrained-subsequence-sum/
// Author: github.com/lzl124631x
// Time: O(NK)
// Space: O(N)
class Solution {
public:
int constrainedSubsetSum(vector<int>& A, int k) {
int maxVal = *max_element(begin(A), end(A));
if (maxVal <= 0) return maxVal;
int N = A.size(), ans = 0;
vector<int> dp(N);
for (int i = 0; i < N; ++i) {
int mx = 0;
for (int j = 1; j <= k; ++j) {
if (i - j < 0) break;
mx = max(mx, dp[i - j]);
if (A[i - j] > 0) break;
}
dp[i] = mx + A[i];
ans = max(ans, dp[i]);
}
return ans;
}
};Same idea as Solution 1, here we use a multiset to prevent linear search of the maximum dp value.
// OJ: https://leetcode.com/problems/constrained-subsequence-sum/
// Author: github.com/lzl124631x
// Time: O(NlogK)
// Space: O(N)
class Solution {
public:
int constrainedSubsetSum(vector<int>& A, int k) {
int maxVal = *max_element(begin(A), end(A));
if (maxVal <= 0) return maxVal;
int N = A.size(), ans = 0;
multiset<int> s{0};
vector<int> dp(N);
for (int i = 0; i < N; ++i) {
int sum = *s.rbegin() + A[i];
if (i >= k) s.erase(s.find(dp[i - k]));
s.insert(sum);
dp[i] = sum;
ans = max(ans, sum);
}
return ans;
}
};Assume the dp values are 3, 1, 2. Popping 3 will change the max value, but popping 1 won't. This tells us that we can just keep the 3, 2 dp sequence which forms a monotonically decreasing sequence.
We can store the indexes of these sequence in a deque<int> mx. dp[mx.front()] stores the maximum dp value within the dp[i-k] and dp[i-1] range.
dp[i] = max(0, dp[mx.front()]) + A[i]
Given dp[i], we can pop those indexes in mx which are pointing to dp values that are smaller or equal todp[i].
Then we can push i into the mx deque.
Don't forget popping the out-of-range index, i.e. when mx.front() === i - k - 1, we should pop front.
// OJ: https://leetcode.com/problems/constrained-subsequence-sum/
// Author: github.com/lzl124631x
// Time: O(N)
// Space: O(N)
class Solution {
public:
int constrainedSubsetSum(vector<int>& A, int k) {
int maxVal = *max_element(begin(A), end(A));
if (maxVal <= 0) return maxVal;
int N = A.size(), ans = 0;
deque<int> mx;
vector<int> dp(N);
for (int i = 0; i < N; ++i) {
if (mx.size() && mx.front() == i - k - 1) mx.pop_front();
dp[i] = max(0, mx.size() ? dp[mx.front()] : 0) + A[i];
while (mx.size() && dp[mx.back()] <= dp[i]) mx.pop_back();
mx.push_back(i);
ans = max(ans, dp[i]);
}
return ans;
}
};// OJ: https://leetcode.com/problems/constrained-subsequence-sum/
// Author: github.com/lzl124631x
// Time: O(N)
// Space: O(K)
// Ref: https://leetcode.com/problems/constrained-subsequence-sum/discuss/597751/JavaC%2B%2BPython-O(N)-Decreasing-Deque
class Solution {
public:
int constrainedSubsetSum(vector<int>& A, int k) {
deque<int> q;
int ans = A[0];
for (int i = 0; i < A.size(); ++i) {
A[i] += q.size() ? q.front() : 0;
ans = max(ans, A[i]);
while (q.size() && A[i] > q.back()) q.pop_back();
if (A[i] > 0) q.push_back(A[i]);
if (i >= k && q.size() && q.front() == A[i - k]) q.pop_front();
}
return ans;
}
};