Sorting
Each record contains a field called the key.
– Linear order,comparison.
Measures of cost:
– Comparisons
– Swaps
Insertion Sort (1)
Insertion Sort (2)
template <class Elem,class Comp>void inssort(Elem A[],int n) {
for (int i=1; i<n; i++)for (int j=i; (j>0) &&
(Comp::lt(A[j],A[j-1])); j--)swap(A,j,j-1);
}
Best Case:
Worst Case:
Average Case:
Bubble Sort (1)
Bubble Sort (2)
template <class Elem,class Comp>void bubsort(Elem A[],int n) {
for (int i=0; i<n-1; i++)for (int j=n-1; j>i; j--)
if (Comp::lt(A[j],A[j-1]))swap(A,j,j-1);
}
Best Case:
Worst Case:
Average Case:
Selection Sort (1)
Selection Sort (2)
template <class Elem,class Comp>void selsort(Elem A[],int n) {
for (int i=0; i<n-1; i++) {int lowindex = i; // Remember its index
for (int j=n-1; j>i; j--) // Find leastif (Comp::lt(A[j],A[lowindex]))
lowindex = j; // Put it in placeswap(A,i,lowindex);
}}
Best Case:
Worst Case:
Average Case:
Pointer Swapping
Summary
Insertion Bubble Selection
Comparisons:
Best Case ?(n) ?(n2) ?(n2)
Average Case ?(n2) ?(n2) ?(n2)
Worst Case ?(n2) ?(n2) ?(n2)
Swaps
Best Case 0 0 ?(n)
Average Case ?(n2) ?(n2) ?(n)
Worst Case ?(n2) ?(n2) ?(n)
Exchange Sorting
All of the sorts so far rely on exchanges of
adjacent records.
What is the average number of exchanges
required?
– There are n! permutations
– Consider permuation X and its reverse,X’
– Together,every pair requires n(n-1)/2
exchanges.
Shellsort
Shellsort
// Modified version of Insertion Sorttemplate <class Elem,class Comp>
void inssort2(Elem A[],int n,int incr) {for (int i=incr; i<n; i+=incr)
for (int j=i;(j>=incr) &&
(Comp::lt(A[j],A[j-incr])); j-=incr)swap(A,j,j-incr);
}
template <class Elem,class Comp>void shellsort(Elem A[],int n) { // Shellsort
for (int i=n/2; i>2; i/=2) // For each incrfor (int j=0; j<i; j++) // Sort sublists
inssort2<Elem,Comp>(&A[j],n-j,i);inssort2<Elem,Comp>(A,n,1);
}
Quicksort
template <class Elem,class Comp>void qsort(Elem A[],int i,int j) {
if (j <= i) return; // List too smallint pivotindex = findpivot(A,i,j);
swap(A,pivotindex,j); // Put pivot at end// k will be first position on right side
int k = partition<Elem,Comp>(A,i-1,j,A[j]);
swap(A,k,j); // Put pivot in placeqsort<Elem,Comp>(A,i,k-1);
qsort<Elem,Comp>(A,k+1,j);}
template <class Elem>int findpivot(Elem A[],int i,int j)
{ return (i+j)/2; }
Quicksort Partition
template <class Elem,class Comp>int partition(Elem A[],int l,int r,
Elem& pivot) {do { // Move the bounds in until they meet
while (Comp::lt(A[++l],pivot));while ((r != 0) && Comp::gt(A[--r],
pivot));swap(A,l,r); // Swap out-of-place values
} while (l < r); // Stop when they crossswap(A,l,r); // Reverse last swap
return l; // Return first pos on right}
The cost for partition is ?(n).
Partition Example
Quicksort Example
Cost of Quicksort
Best case,Always partition in half.
Worst case,Bad partition.
Average case:
T(n) = n + 1 + 1/(n-1) ?(T(k) + T(n-k))
Optimizations for Quicksort:
– Better Pivot
– Better algorithm for small sublists
– Eliminate recursion
k=1
n-1
Mergesort
List mergesort(List inlist) {
if (inlist.length() <= 1)return inlist;
List l1 = half of the items from inlist;
List l2 = other half of items from inlist;
return merge(mergesort(l1),
mergesort(l2));
}
Mergesort Implementation
template <class Elem,class Comp>void mergesort(Elem A[],Elem temp[],
int left,int right) {int mid = (left+right)/2;
if (left == right) return; mergesort<Elem,Comp>(A,temp,left,mid);
mergesort<Elem,Comp>(A,temp,mid+1,right);for (int i=left; i<=right; i++) // Copy
temp[i] = A[i];int i1 = left; int i2 = mid + 1;
for (int curr=left; curr<=right; curr++) {if (i1 == mid+1) // Left exhausted
A[curr] = temp[i2++];else if (i2 > right) // Right exhausted
A[curr] = temp[i1++];else if (Comp::lt(temp[i1],temp[i2]))
A[curr] = temp[i1++];else A[curr] = temp[i2++];
}}
Optimized Mergesort
template <class Elem,class Comp>void mergesort(Elem A[],Elem temp[],
int left,int right) {if ((right-left) <= THRESHOLD) {
inssort<Elem,Comp>(&A[left],right-left+1);return;
}int i,j,k,mid = (left+right)/2;
if (left == right) return;mergesort<Elem,Comp>(A,temp,left,mid);
mergesort<Elem,Comp>(A,temp,mid+1,right);for (i=mid; i>=left; i--) temp[i] = A[i];
for (j=1; j<=right-mid; j++)temp[right-j+1] = A[j+mid];
for (i=left,j=right,k=left; k<=right; k++)if (temp[i] < temp[j]) A[k] = temp[i++];
else A[k] = temp[j--];}
Mergesort Cost
Mergesort cost:
Mergsort is also good for sorting linked lists.
Mergesort requires twice the space.
Heapsort
template <class Elem,class Comp>
void heapsort(Elem A[],int n) { // Heapsort
Elem mval;
maxheap<Elem,Comp> H(A,n,n);
for (int i=0; i<n; i++) // Now sort
H.removemax(mval); // Put max at end
}
Use a max-heap,so that elements end up
sorted within the array.
Cost of heapsort:
Cost of finding K largest elements:
Heapsort Example (1)
Heapsort Example (2)
Binsort (1)
A simple,efficient sort:
for (i=0; i<n; i++)
B[A[i]] = A[i];
Ways to generalize:
– Make each bin the head of a list.
– Allow more keys than records.
Binsort (2)
template <class Elem>
void binsort(Elem A[],int n) {
List<Elem> B[MaxKeyValue];
Elem item;
for (i=0; i<n; i++) B[A[i]].append(A[i]);
for (i=0; i<MaxKeyValue; i++)
for (B[i].setStart();
B[i].getValue(item); B[i].next())
output(item);
}
Cost:
Radix Sort (1)
Radix Sort (2)
template <class Elem,class Comp>
void radix(Elem A[],Elem B[],
int n,int k,int r,int cnt[]) {
// cnt[i] stores # of records in bin[i]
int j;
for (int i=0,rtok=1; i<k; i++,rtok*=r) {
for (j=0; j<r; j++) cnt[j] = 0;
// Count # of records for each bin
for(j=0; j<n; j++) cnt[(A[j]/rtok)%r]++;
// cnt[j] will be last slot of bin j.
for (j=1; j<r; j++)
cnt[j] = cnt[j-1] + cnt[j];
for (j=n-1; j>=0; j--)\
B[--cnt[(A[j]/rtok)%r]] = A[j];
for (j=0; j<n; j++) A[j] = B[j];
}}
Radix Sort Example
Radix Sort Cost
Cost,?(nk + rk)
How do n,k,and r relate?
If key range is small,then this can be ?(n).
If there are n distinct keys,then the length of
a key must be at least log n.
– Thus,Radix Sort is ?(n log n) in general case
Empirical Comparison (1)
Empirical Comparison (2)
Sorting Lower Bound
We would like to know a lower bound for all
possible sorting algorithms.
Sorting is O(n log n) (average,worst cases)
because we know of algorithms with this
upper bound.
Sorting I/O takes ?(n) time.
We will now prove ?(n log n) lower bound
for sorting.
Decision Trees
Lower Bound Proof
? There are n! permutations.
? A sorting algorithm can be viewed as
determining which permutation has been input.
? Each leaf node of the decision tree corresponds
to one permutation.
? A tree with n nodes has ?(log n) levels,so the
tree with n! leaves has ?(log n!) = ?(n log n)
levels.
Which node in the decision tree corresponds
to the worst case?