I solved this problem using Sparse Table (Range Max Query data structure).
My Implementation:
class Solution {
struct ST {
vector<vector<int>> T;
int MAXK;
ST(vector<vector<int>> &A)
{
int n = A.size();
MAXK = log2(n) + 1;
T.assign(n, vector<int>(MAXK));
for(int i = 0; i < n; i++)
T[i][0] = A[i][1] + A[i][0];
for(int k = 1; k < MAXK; k++)
{
for(int i = 0; i < n; i++)
{
int m = i + (1 << (k-1));
if(m < n)
T[i][k] = max(T[i][k-1], T[m][k-1]);
}
}
}
int maximum(int l, int r)
{
int len = r - l + 1;
int k = log2(len);
int m = r - (1 << k) + 1;
return max(T[l][k], T[m][k]);
}
};
public:
int findMaxValueOfEquation(vector<vector<int>>& points, int k) {
int n = points.size(), ans = INT_MIN, r = 0;
ST st(points);
for(int i = 0; i < n - 1; i++)
{
r = max(r, i + 1);
while(r < n and points[r][0] <= points[i][0] + k) r++;
r--;
if(i == r) continue;
int x = points[i][0], y = points[i][1];
ans = max(ans, y - x + st.maximum(i + 1, r));
}
return ans;
}
};
And then I found this beautiful O(n) implementation with a stack!.
Stack
int findMaxValueOfEquation(vector<vector<int>>& p, int k, int res = INT_MIN) {
deque<int> d;
for (int j = 0; j < p.size(); ++j) {
while (!d.empty() && p[j][0] - p[d.front()][0] > k)
d.pop_front();
if (!d.empty())
res = max(res, p[d.front()][1] - p[d.front()][0] + p[j][1] + p[j][0]);
while (!d.empty() && p[d.back()][1] - p[d.back()][0] < p[j][1] - p[j][0])
d.pop_back();
d.push_back(j);
}
return res;
}
Well, strictly speaking, it’s a dequeue. But I don’t think it’s necessary to pop the elements from the front. We can just let it stay there(?).
So the question is, is there a general “type” of problems where a stack can be used like this? And can you post links to such problem?