Naive DP

The idea is pretty simple, kind of like Knapsack problem, we have two possible actions:

• Take
• Not take.

Specifically,

// take this start (i), the res will be profit + dp(j) where j is first non overlap index after i.
// not take this start (i), the res will be dp(i+1)
typedef pair<int, int> PII;
const int N = 5e4 + 10;
int memo[N];
class Solution {
public:
int dp(vector<pair<PII, int>>& startEnd_pro, int i){
const int n = startEnd_pro.size();
if (i == n) return 0;
if (memo[i] != -1)return memo[i];
auto [startEnd, pro] = startEnd_pro[i];
auto [start, end] = startEnd;
int ret = pro;
// take this start, the res will be pro + dp(j) where j is next non overlap index
int j = findNext(startEnd_pro, i);
ret = max(ret, pro + dp(startEnd_pro, j));
// not take this start, the res will be dp(i+1)
ret = max(ret, dp(startEnd_pro, i+1));
return memo[i] = ret;
}
int findNext(vector<pair<PII, int>>& startEnd_pro, int i){
for (int j = i + 1; j < startEnd_pro.size(); ++j){
if (startEnd_pro[j].first.first >= startEnd_pro[i].first.second){
return j;
}
}
return startEnd_pro.size();
}
int jobScheduling(vector<int>& startTime, vector<int>& endTime, vector<int>& profit) {
const int n = startTime.size();
vector<pair<PII, int>> startEnd_pro;
for (int i = 0; i < n; ++ i){
startEnd_pro.emplace_back(make_pair(startTime[i], endTime[i]), profit[i]);
}
sort(startEnd_pro.begin(), startEnd_pro.end());
memset(memo, -1, sizeof memo);
return dp(startEnd_pro, 0);
}
};

As the findNext has the complexity of O(n), the overall complextity is O(n²). This solution can already got AC with run time of 157 ms.

Improved one by binary search

only the findNext is updated, the run time is 149 ms.

int findNext(vector<pair<PII, int>>& startEnd_pro, int i){
pair<PII, int> key = make_pair(make_pair(startEnd_pro[i].first.second, -1), -1);
return upper_bound(startEnd_pro.begin(), startEnd_pro.end(), key) - startEnd_pro.begin();
}

Reference

Data Scientist/MLE/SWE @takemobi