This time the sorting algorithm is optimized in order to use this algorithm in the priority queue.
I just copied a random sorting algorithm off the web, just picked it up, and let’s see how it works.
function Insertion(arr) {
let len = arr.length;
let preIndex, current;
for (let i = 1; i < len; i++) {
preIndex = i - 1;
current = arr[i];
while (preIndex >= 0 && current > arr[preIndex]) {
arr[preIndex + 1] = arr[preIndex];
preIndex--;
}
arr[preIndex + 1] = current;
}
return arr;
}
Copy the code
Here is a function to test a randomly generated array:
function randomArr(len, start, end) {
let arr = [];
while (arr.length < len) {
let d = Math.round(Math.random() * (end - start) + start);
arr.push(d);
}
return arr;
}
Copy the code
Here is the test function. The corresponding time can be obtained by executing the following function:
function test(n, start, end) {
console.time();
let a = randArr(n, start, end);
let b = Insertion(a);
console.timeEnd()
}
Copy the code
Where n represents the number of arrays, and start and end represent the beginning and end of the array, which is the interval.
Here is my optimized function:
function insertSort(arr = []) {
let sortArr = [];
// When the length of the array is less than 2
if (arr.length >= 2) {
if (arr[0] < arr[1]) {
sortArr.push(arr[1], arr[0]);
} else {
sortArr.push(arr[0], arr[1]); }}else{ sortArr.push(... arr); }for (let i = 2; i < arr.length; i++) {
let lPos = 0; / / the left
let rPos = sortArr.length; / / right
let j = Math.floor((rPos - lPos) / 2);
while(rPos - lPos ! = =1) {
if (arr[i] > sortArr[j]) {
rPos = j;
} else if (arr[i] < sortArr[j]) {
lPos = j;
} else {
lPos = j - 1;
rPos = j;
}
j = Math.floor((rPos + lPos) / 2); // The value of j is the value between the two
}
if (arr[i] > sortArr[j]) { // In the end, determine which value is larger and which is smaller
sortArr.splice(j, 0, arr[i]); // Insert at the current position
} else {
sortArr.splice(j + 1.0, arr[i]);// Insert at the next position}}return sortArr;
}
Copy the code
The implementation of the function is a little more complicated than the previous one. The idea of overall optimization lies in the second layer. So I’m doing binary search when I’m trying to find the insertion position, which is about nlogn time responsibility. It’s a little bit better at the n^2 level, but it’s certainly not as good as quicksort, and I didn’t choose quicksort because it’s not very stable. The priority queue I want to implement is very stable.
The following are the results of the 100,000-500,000 tests, with fewer data counts as you go (multiple tests are separated by commas). The comparison I made is not strictly a control variable method, but since the results I got after several times of execution are almost similar, it can basically explain the effect of optimization.
// (100000 data volume)
test(Insertion, 100000.0.10000); // 3.057, 3.101s, 3.072s, 3.078s
test(insertSort, 100000.0.10000);// 737.965ms, 764.219ms, 732.931ms, 739.714ms
Copy the code
// (200000 data volume)
test(Insertion, 200000.0.10000); / / 12.464 [s]
test(insertSort, 200000.0.10000);/ / 4.506 [s]
Copy the code
// (300000 data volume)
test(Insertion, 300000.0.10000); / / 27.746 s, 28.024 s
test(insertSort, 300000.0.10000);/ / 10.432 s, 10.695 s
Copy the code
// (400000 data volume)
test(Insertion, 400000.0.10000); / / 50.283 s
test(insertSort, 400000.0.10000);/ / 16.581 s
Copy the code
// (500000 data volume)
test(Insertion, 500000.0.10000); / / 1:18. 815 (m: ss. MMM), 1:19. 075 (m: ss. MMM)
test(insertSort, 500000.0.10000);/ / 43.778 s, 44.420 s
Copy the code