While sorting the array is the standard answer, there are still cases where brute-force algorithms are useful. I came up with this thought when I accidently miswrote the '<' sign into the '<=' sign in my sorting comparator and failed the test due to time limit exceed. Actually, if N is very large but the range of each interval is limited (e.g. from -500 to +500), these intervals overlap heavily. In other words, the number of distinct intervals (after processing the kth interval) linearly scales with k at the beginning, but will converge to a constant with the increase of k. In that case, the time complexity of my brute-force algorithm can be reduced to O(N).

```
class Solution {
public:
vector<Interval> merge(vector<Interval>& intervals) {
int n = intervals.size();
list<Interval> intervalSet;
vector<Interval> res;
for (int i = 0; i < n; i++)
{
auto myInterval = intervals[i];
for (auto it = intervalSet.begin(); it != intervalSet.end(); )
{
auto currInterval = *it;
if (haveIntersection(currInterval, myInterval))
{
merge2Intervals(currInterval, myInterval);
it = intervalSet.erase(it);
}
else
it++;
}
intervalSet.push_back(myInterval);
}
for (auto& interval : intervalSet)
res.push_back(interval);
return res;
}
bool haveIntersection(Interval a1, Interval a2)
{
return (a2.start >= a1.start && a2.start <= a1.end) ||
(a2.end >= a1.start && a2.end <= a1.end) ||
(a1.start >= a2.start && a1.start <= a2.end) ||
(a1.end >= a2.start && a1.end <= a2.end);
}
void merge2Intervals(Interval& strt, Interval& dest)
{
dest.start = min(strt.start, dest.start);
dest.end = max(strt.end, dest.end);
}
};
```