Skip to content

Commit f2ecb1e

Browse files
committed
removeDuplicates func complexity analysis + refactoring for better efficiency
1 parent 08996f8 commit f2ecb1e

File tree

1 file changed

+37
-23
lines changed

1 file changed

+37
-23
lines changed

Sprint-1/JavaScript/removeDuplicates/removeDuplicates.mjs

Lines changed: 37 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -8,29 +8,43 @@
88
* @param {Array} inputSequence - Sequence to remove duplicates from
99
* @returns {Array} New sequence with duplicates removed
1010
*/
11-
export function removeDuplicates(inputSequence) {
12-
const uniqueItems = [];
11+
// export function removeDuplicates(inputSequence) {
12+
// const uniqueItems = [];
13+
14+
// for (
15+
// let currentIndex = 0;
16+
// currentIndex < inputSequence.length;
17+
// currentIndex++
18+
// ) {
19+
// let isDuplicate = false;
20+
// for (
21+
// let compareIndex = 0;
22+
// compareIndex < uniqueItems.length;
23+
// compareIndex++
24+
// ) {
25+
// if (inputSequence[currentIndex] === uniqueItems[compareIndex]) {
26+
// isDuplicate = true;
27+
// break;
28+
// }
29+
// }
30+
// if (!isDuplicate) {
31+
// uniqueItems.push(inputSequence[currentIndex]);
32+
// }
33+
// }
34+
35+
// return uniqueItems;
36+
// }
1337

14-
for (
15-
let currentIndex = 0;
16-
currentIndex < inputSequence.length;
17-
currentIndex++
18-
) {
19-
let isDuplicate = false;
20-
for (
21-
let compareIndex = 0;
22-
compareIndex < uniqueItems.length;
23-
compareIndex++
24-
) {
25-
if (inputSequence[currentIndex] === uniqueItems[compareIndex]) {
26-
isDuplicate = true;
27-
break;
28-
}
29-
}
30-
if (!isDuplicate) {
31-
uniqueItems.push(inputSequence[currentIndex]);
32-
}
33-
}
3438

35-
return uniqueItems;
39+
// My Analysis Result
40+
// Time Complexity- It has a nested loop structure, giving it a growth of O(n * n) = O(n²). quadratic growth.
41+
// Space Complexity- A new array 'uniqueItems' is created which can grow up to the size of the input, making the space complexity O(n).
42+
// The inefficiency of this program is due to the nested loop:
43+
// for every item, it slowly rescans the results to check for duplicates.
44+
45+
//refactored code
46+
export function removeDuplicates(inputSequence) {
47+
return [...new Set(inputSequence)];
3648
}
49+
// Here the time complexity is reduced to an optimal level O(n).
50+
// For the space complexity, I have O(n) as well because the new Set can store up to n unique items.

0 commit comments

Comments
 (0)