3
3
use crate :: mem:: { self , ManuallyDrop , MaybeUninit } ;
4
4
use crate :: slice:: sort:: shared:: FreezeMarker ;
5
5
use crate :: { intrinsics, ptr, slice} ;
6
+ use safety:: { ensures, requires} ;
7
+
8
+ #[ allow( unused_imports) ]
9
+ use crate :: ub_checks;
10
+
11
+ #[ cfg( kani) ]
12
+ use crate :: kani;
6
13
7
14
// It's important to differentiate between SMALL_SORT_THRESHOLD performance for
8
15
// small slices and small-sort performance sorting small sub-slices as part of
@@ -62,6 +69,26 @@ impl<T: FreezeMarker> StableSmallSortTypeImpl for T {
62
69
}
63
70
}
64
71
72
+ // This wrapper contract function exists because;
73
+ // - kani contract attribute macros doesnt't work with `default fn`
74
+ // - we cannot specify the trait member function with `proof_for_contract`
75
+ #[ cfg( kani) ]
76
+ #[ kani:: modifies( v) ]
77
+ #[ ensures( |_| {
78
+ let mut is_less = is_less. clone( ) ;
79
+ v. is_sorted_by( |a, b| !is_less( b, a) )
80
+ } ) ]
81
+ pub ( crate ) fn _stable_small_sort_type_impl_small_sort < T , F > (
82
+ v : & mut [ T ] ,
83
+ scratch : & mut [ MaybeUninit < T > ] ,
84
+ is_less : & mut F ,
85
+ )
86
+ where
87
+ F : FnMut ( & T , & T ) -> bool + Clone ,
88
+ {
89
+ <T as StableSmallSortTypeImpl >:: small_sort ( v, scratch, is_less) ;
90
+ }
91
+
65
92
/// Using a trait allows us to specialize on `Freeze` which in turn allows us to make safe
66
93
/// abstractions.
67
94
pub ( crate ) trait UnstableSmallSortTypeImpl : Sized {
@@ -102,6 +129,25 @@ impl<T: FreezeMarker> UnstableSmallSortTypeImpl for T {
102
129
}
103
130
}
104
131
132
+ // This wrapper contract function exists because;
133
+ // - kani contract attribute macros doesnt't work with `default fn`
134
+ // - we cannot specify the trait member function with `proof_for_contract`
135
+ #[ cfg( kani) ]
136
+ #[ kani:: modifies( v) ]
137
+ #[ ensures( |_| {
138
+ let mut is_less = is_less. clone( ) ;
139
+ v. is_sorted_by( |a, b| !is_less( b, a) )
140
+ } ) ]
141
+ pub ( crate ) fn _unstable_small_sort_type_impl_small_sort < T , F > (
142
+ v : & mut [ T ] ,
143
+ is_less : & mut F ,
144
+ )
145
+ where
146
+ F : FnMut ( & T , & T ) -> bool + Clone ,
147
+ {
148
+ <T as UnstableSmallSortTypeImpl >:: small_sort ( v, is_less) ;
149
+ }
150
+
105
151
/// FIXME(const_trait_impl) use original ipnsort approach with choose_unstable_small_sort,
106
152
/// as found here <https://github.com/Voultapher/sort-research-rs/blob/438fad5d0495f65d4b72aa87f0b62fc96611dff3/ipnsort/src/smallsort.rs#L83C10-L83C36>.
107
153
pub ( crate ) trait UnstableSmallSortFreezeTypeImpl : Sized + FreezeMarker {
@@ -170,6 +216,26 @@ impl<T: FreezeMarker + CopyMarker> UnstableSmallSortFreezeTypeImpl for T {
170
216
}
171
217
}
172
218
219
+ // This wrapper contract function exists because;
220
+ // - kani contract attribute macros doesnt't work with `default fn`
221
+ // - we cannot specify the trait member function with `proof_for_contract`
222
+ #[ cfg( kani) ]
223
+ #[ kani:: modifies( v) ]
224
+ #[ ensures( |_| {
225
+ let mut is_less = is_less. clone( ) ;
226
+ v. is_sorted_by( |a, b| !is_less( b, a) )
227
+ } ) ]
228
+ pub ( crate ) fn _unstable_small_sort_freeze_type_impl_small_sort < T , F > (
229
+ v : & mut [ T ] ,
230
+ is_less : & mut F ,
231
+ )
232
+ where
233
+ T : FreezeMarker + CopyMarker ,
234
+ F : FnMut ( & T , & T ) -> bool + Clone ,
235
+ {
236
+ <T as UnstableSmallSortFreezeTypeImpl >:: small_sort ( v, is_less) ;
237
+ }
238
+
173
239
/// Optimal number of comparisons, and good perf.
174
240
const SMALL_SORT_FALLBACK_THRESHOLD : usize = 16 ;
175
241
@@ -539,6 +605,22 @@ where
539
605
///
540
606
/// # Safety
541
607
/// begin < tail and p must be valid and initialized for all begin <= p <= tail.
608
+ #[ cfg_attr( kani, kani:: modifies( begin, tail) ) ]
609
+ #[ requires( begin. addr( ) < tail. addr( ) && {
610
+ let len = tail. addr( ) - begin. addr( ) ;
611
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
612
+ ( 0 ..=len) . into_iter( ) . all( |i| {
613
+ let p = begin. add( i) ;
614
+ ub_checks:: can_dereference( p as * const _) &&
615
+ ub_checks:: can_write( p) &&
616
+ ub_checks:: same_allocation( begin as * const _, p as * const _)
617
+ } ) && ( 0 ..( len - 1 ) ) . into_iter( ) . all( |i| !is_less( & * begin. add( i + 1 ) , & * begin. add( i) ) )
618
+ } ) ]
619
+ #[ ensures( |_| {
620
+ let len = tail. addr( ) - begin. addr( ) ;
621
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
622
+ ( 0 ..len) . into_iter( ) . all( |i| !is_less( & * begin. add( i + 1 ) , & * begin. add( i) ) )
623
+ } ) ]
542
624
unsafe fn insert_tail < T , F : FnMut ( & T , & T ) -> bool > ( begin : * mut T , tail : * mut T , is_less : & mut F ) {
543
625
// SAFETY: see individual comments.
544
626
unsafe {
@@ -556,7 +638,13 @@ unsafe fn insert_tail<T, F: FnMut(&T, &T) -> bool>(begin: *mut T, tail: *mut T,
556
638
let tmp = ManuallyDrop :: new ( tail. read ( ) ) ;
557
639
let mut gap_guard = CopyOnDrop { src : & * tmp, dst : tail, len : 1 } ;
558
640
559
- loop {
641
+ #[ safety:: loop_invariant(
642
+ sift. addr( ) >= begin. addr( ) && sift. addr( ) < tail. addr( )
643
+ ) ]
644
+ // FIXME: This was `loop` but kani's loop contract doesn't support `loop`.
645
+ // Once it is supported, replace `while true` with the original `loop`
646
+ #[ allow( while_true) ]
647
+ while true {
560
648
// SAFETY: we move sift into the gap (which is valid), and point the
561
649
// gap guard destination at sift, ensuring that if a panic occurs the
562
650
// gap is once again filled.
@@ -577,6 +665,15 @@ unsafe fn insert_tail<T, F: FnMut(&T, &T) -> bool>(begin: *mut T, tail: *mut T,
577
665
}
578
666
579
667
/// Sort `v` assuming `v[..offset]` is already sorted.
668
+ #[ cfg_attr( kani, kani:: modifies( v) ) ]
669
+ #[ requires( offset != 0 && offset <= v. len( ) && {
670
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
671
+ v[ ..offset] . is_sorted_by( |a, b| !is_less( b, a) )
672
+ } ) ]
673
+ #[ ensures( |_| {
674
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
675
+ v. is_sorted_by( |a, b| !is_less( b, a) )
676
+ } ) ]
580
677
pub fn insertion_sort_shift_left < T , F : FnMut ( & T , & T ) -> bool > (
581
678
v : & mut [ T ] ,
582
679
offset : usize ,
@@ -597,6 +694,14 @@ pub fn insertion_sort_shift_left<T, F: FnMut(&T, &T) -> bool>(
597
694
let v_end = v_base. add ( len) ;
598
695
let mut tail = v_base. add ( offset) ;
599
696
while tail != v_end {
697
+ // FIXME: This should be loop contract but sadly, making this into
698
+ // loop invariant causes OOM
699
+ #[ cfg( kani) ]
700
+ kani:: assert (
701
+ tail. addr ( ) > v_base. addr ( ) && tail. addr ( ) <= v_end. addr ( ) ,
702
+ "loop invariants" ,
703
+ ) ;
704
+
600
705
// SAFETY: v_base and tail are both valid pointers to elements, and
601
706
// v_base < tail since we checked offset != 0.
602
707
insert_tail ( v_base, tail, is_less) ;
@@ -609,6 +714,28 @@ pub fn insertion_sort_shift_left<T, F: FnMut(&T, &T) -> bool>(
609
714
610
715
/// SAFETY: The caller MUST guarantee that `v_base` is valid for 4 reads and
611
716
/// `dst` is valid for 4 writes. The result will be stored in `dst[0..4]`.
717
+ #[ cfg_attr( kani, kani:: modifies( dst, dst. add( 1 ) , dst. add( 2 ) , dst. add( 3 ) ) ) ]
718
+ #[ requires(
719
+ ( 0 ..4 ) . into_iter( ) . all( |i| {
720
+ let p = v_base. add( i) ;
721
+ ub_checks:: can_dereference( p) &&
722
+ ub_checks:: same_allocation( v_base, p)
723
+ } )
724
+ ) ]
725
+ #[ requires(
726
+ ( 0 ..4 ) . into_iter( ) . all( |i| {
727
+ let p = dst. add( i) ;
728
+ ub_checks:: can_write( p) &&
729
+ ub_checks:: same_allocation( dst, p)
730
+ } )
731
+ ) ]
732
+ #[ ensures( |_| {
733
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
734
+ ( 0 ..3 ) . into_iter( ) . all( |i| !is_less(
735
+ & * dst. add( i + 1 ) ,
736
+ & * dst. add( i) ,
737
+ ) )
738
+ } ) ]
612
739
pub unsafe fn sort4_stable < T , F : FnMut ( & T , & T ) -> bool > (
613
740
v_base : * const T ,
614
741
dst : * mut T ,
@@ -870,3 +997,136 @@ pub(crate) const fn has_efficient_in_place_swap<T>() -> bool {
870
997
// Heuristic that holds true on all tested 64-bit capable architectures.
871
998
mem:: size_of :: < T > ( ) <= 8 // mem::size_of::<u64>()
872
999
}
1000
+
1001
+ #[ cfg( kani) ]
1002
+ #[ unstable( feature = "kani" , issue = "none" ) ]
1003
+ mod verify {
1004
+ use super :: * ;
1005
+
1006
+ // The maximum length of the slice that `insertion_sort_shift_left`
1007
+ // is called upon.
1008
+ // The value is from the following line;
1009
+ // https://github.com/model-checking/verify-rust-std/blob/1a38674ad6753e3a78e0181d1fe613f3b25ebacd/library/core/src/slice/sort/shared/smallsort.rs#L330
1010
+ const INSERTION_SORT_MAX_LEN : usize = 17 ;
1011
+
1012
+ #[ kani:: proof]
1013
+ pub fn check_swap_if_less ( ) {
1014
+ let mut array: [ u8 ; SMALL_SORT_GENERAL_THRESHOLD ] = kani:: any ( ) ;
1015
+ let a_pos = kani:: any_where ( |x : & usize | * x < SMALL_SORT_GENERAL_THRESHOLD ) ;
1016
+ let b_pos = kani:: any_where ( |x : & usize | * x < SMALL_SORT_GENERAL_THRESHOLD ) ;
1017
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
1018
+ let expected = {
1019
+ let mut array = array. clone ( ) ;
1020
+ let a: u8 = array[ a_pos] ;
1021
+ let b: u8 = array[ b_pos] ;
1022
+ if is_less ( & b, & a) {
1023
+ array[ a_pos] = b;
1024
+ array[ b_pos] = a;
1025
+ }
1026
+ array
1027
+ } ;
1028
+ unsafe {
1029
+ swap_if_less ( array. as_mut_ptr ( ) , a_pos, b_pos, & mut is_less) ;
1030
+ }
1031
+ kani:: assert (
1032
+ array == expected,
1033
+ "swapped slice is different from the expectation"
1034
+ ) ;
1035
+ }
1036
+
1037
+ #[ kani:: proof_for_contract( insert_tail) ]
1038
+ #[ kani:: unwind( 17 ) ]
1039
+ pub fn check_insert_tail ( ) {
1040
+ let mut array: [ u8 ; INSERTION_SORT_MAX_LEN ] = kani:: any ( ) ;
1041
+ let tail = kani:: any_where ( |x : & usize | * x < INSERTION_SORT_MAX_LEN ) ;
1042
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
1043
+ unsafe {
1044
+ let begin = array. as_mut_ptr ( ) ;
1045
+ let tail = begin. add ( tail) ;
1046
+ insert_tail ( begin, tail, & mut is_less) ;
1047
+ }
1048
+ }
1049
+
1050
+ // FIXME: Ideally, this should be `proof_for_contract(insertion_sort_shift_left)`,
1051
+ // but this failes with OOM due to `proof_for_contract`'s perf issue.
1052
+ //
1053
+ // See https://github.com/model-checking/kani/issues/3797
1054
+ #[ kani:: proof]
1055
+ #[ kani:: stub_verified( insert_tail) ]
1056
+ #[ kani:: unwind( 17 ) ]
1057
+ pub fn check_insertion_sort_shift_left ( ) {
1058
+ let slice_len = kani:: any_where ( |x : & usize | {
1059
+ * x != 0 && * x <= INSERTION_SORT_MAX_LEN
1060
+ } ) ;
1061
+ let offset = kani:: any_where ( |x : & usize | * x != 0 && * x <= slice_len) ;
1062
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
1063
+ let mut array = kani:: any_where ( |x : & [ u8 ; INSERTION_SORT_MAX_LEN ] | {
1064
+ x[ ..offset] . is_sorted_by ( |a, b| !is_less ( b, a) )
1065
+ } ) ;
1066
+ insertion_sort_shift_left ( & mut array[ ..slice_len] , offset, & mut is_less) ;
1067
+ kani:: assert (
1068
+ array[ ..slice_len] . is_sorted_by ( |a, b| !is_less ( b, a) ) ,
1069
+ "slice is not sorted" ,
1070
+ ) ;
1071
+ }
1072
+
1073
+ #[ kani:: proof_for_contract( sort4_stable) ]
1074
+ pub fn check_sort4_stable ( ) {
1075
+ let src: [ u8 ; 4 ] = kani:: any ( ) ;
1076
+ let mut dst = MaybeUninit :: < [ u8 ; 4 ] > :: uninit ( ) ;
1077
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
1078
+ unsafe {
1079
+ sort4_stable ( src. as_ptr ( ) , dst. as_mut_ptr ( ) as * mut _ , & mut is_less) ;
1080
+ }
1081
+ }
1082
+
1083
+ #[ kani:: proof]
1084
+ pub fn check_sort4_stable_stability ( ) {
1085
+ let src: [ ( u8 , u8 ) ; 4 ] = [
1086
+ ( kani:: any ( ) , 0 ) ,
1087
+ ( kani:: any ( ) , 1 ) ,
1088
+ ( kani:: any ( ) , 2 ) ,
1089
+ ( kani:: any ( ) , 3 ) ,
1090
+ ] ;
1091
+ let mut dst = MaybeUninit :: < [ ( u8 , u8 ) ; 4 ] > :: uninit ( ) ;
1092
+ let mut is_less = |x : & ( u8 , u8 ) , y : & ( u8 , u8 ) | x. 0 < y. 0 ;
1093
+ unsafe {
1094
+ sort4_stable ( src. as_ptr ( ) , dst. as_mut_ptr ( ) as * mut _ , & mut is_less) ;
1095
+ }
1096
+ let dst = unsafe { dst. assume_init ( ) } ;
1097
+ let mut is_stably_less = |x : & ( u8 , u8 ) , y : & ( u8 , u8 ) | {
1098
+ if x. 0 == y. 0 {
1099
+ x. 1 < y. 1
1100
+ } else {
1101
+ x. 0 < y. 0
1102
+ }
1103
+ } ;
1104
+ kani:: assert (
1105
+ dst. is_sorted_by ( |a, b| !is_stably_less ( b, a) ) ,
1106
+ "slice is not stably sorted" ,
1107
+ ) ;
1108
+ }
1109
+
1110
+ #[ kani:: proof]
1111
+ pub fn check_has_efficient_in_place_swap ( ) {
1112
+ // There aren't much to verify as the function just calls `mem::size_of`.
1113
+ // So, just brought some tests written by the author of smallsort,
1114
+ // from Voultapher/sort-research-rs
1115
+ //
1116
+ // https://github.com/Voultapher/sort-research-rs/blob/c0cb46214a8d6e10ae5f4e0c363c2dbcbf71966f/ipnsort/src/smallsort.rs#L758-L771
1117
+ assert ! ( has_efficient_in_place_swap:: <i32 >( ) ) ;
1118
+ assert ! ( has_efficient_in_place_swap:: <u64 >( ) ) ;
1119
+ assert ! ( !has_efficient_in_place_swap:: <u128 >( ) ) ;
1120
+ }
1121
+
1122
+ #[ kani:: proof_for_contract( _stable_small_sort_type_impl_small_sort) ]
1123
+ #[ kani:: stub_verified( insertion_sort_shift_left) ]
1124
+ pub fn check_stable_small_sort_type_impl_small_sort ( ) {
1125
+ let mut array: [ u8 ; SMALL_SORT_FALLBACK_THRESHOLD ] = kani:: any ( ) ;
1126
+ let len = kani:: any_where ( |x : & usize | * x <= SMALL_SORT_FALLBACK_THRESHOLD ) ;
1127
+ let mut scratch: [ MaybeUninit < u8 > ; SMALL_SORT_GENERAL_SCRATCH_LEN ]
1128
+ = [ const { MaybeUninit :: uninit ( ) } ; SMALL_SORT_GENERAL_SCRATCH_LEN ] ;
1129
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
1130
+ _stable_small_sort_type_impl_small_sort ( & mut array[ ..len] , & mut scratch, & mut is_less) ;
1131
+ }
1132
+ }
0 commit comments