题目来源
求一个矩阵中矩形的和不大于k最接近k的和是多少。
我想了想,只能想到最naive的做法,就是直接把所有的矩形都求和,然后比较。
代码如下:
class Solution {
public:
int maxSumSubmatrix(vector<vector<int>>& matrix, int k) {
int rows = matrix.size(), cols = matrix[0].size();
vector<vector<int>> sums(rows+1, vector<int>(cols+1, 0));
for (int i=1; i<=rows; i++)
for (int j=1; j<=cols; j++)
sums[i][j] = sums[i-1][j] + sums[i][j-1] - sums[i-1][j-1] + matrix[i-1][j-1];
int dif = INT_MAX, res = 0;
for (int i=1; i<=rows; i++)
for (int j=1; j<=cols; j++) {
for (int x=1; x<=i; x++)
for (int y=1; y<=j; y++) {
int endWithXYsums = sums[i][j] - sums[x-1][j] - sums[i][y-1] + sums[x-1][y-1];
if (endWithXYsums <= k && dif > k - endWithXYsums) {
dif = k - endWithXYsums;
res = endWithXYsums;
if (res == 0) //一个小剪枝,但是貌似并没有什么软用
return endWithXYsums;
}
}
}
return res;
}
};
可以AC,但是复杂度比较高。想想还有哪些是不需要重复计算的。想不出来…
总的思路就是设两个指针从左向右遍历,每次计算指针范围内的每一行的和。
然后利用lower_bound来找出最符合我们要求的。
class Solution {
public:
int maxSumSubmatrix(vector<vector<int>>& matrix, int k) {
int rows = matrix.size(), cols = matrix[0].size();
int res = INT_MIN;
for (int l=0; l<cols; l++) {
vector<int> sums(rows, 0);
for (int r=l; r<cols; r++) {
for (int i=0; i<rows; i++)
sums[i] += matrix[i][r];
set<int> cusums;
cusums.insert(0);
int cursum = 0, curMax = INT_MIN;
for (auto sum : sums) {
cursum += sum;
set<int>::iterator it = cusums.lower_bound(cursum - k);
if (it != cusums.end())
curMax = max(curMax, cursum - *it);
cusums.insert(cursum);
}
res = max(res, curMax);
}
}
return res;
}
};