1
1
use std:: alloc:: Allocator ;
2
+ use std:: any:: Any ;
2
3
3
4
#[ rustc_on_unimplemented( message = "`{Self}` doesn't implement `DynSend`. \
4
5
Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`") ]
@@ -51,9 +52,20 @@ macro_rules! already_send {
51
52
52
53
// These structures are already `Send`.
53
54
already_send ! (
54
- [ std:: backtrace:: Backtrace ] [ std:: io:: Stdout ] [ std:: io:: Stderr ] [ std:: io:: Error ] [ std:: fs:: File ]
55
- [ rustc_arena:: DroplessArena ] [ crate :: memmap:: Mmap ] [ crate :: profiling:: SelfProfiler ]
56
- [ crate :: owned_slice:: OwnedSlice ]
55
+ [ std:: backtrace:: Backtrace ]
56
+ [ std:: io:: Stdout ]
57
+ [ std:: io:: Stderr ]
58
+ [ std:: io:: Error ]
59
+ [ std:: fs:: File ]
60
+ [ std:: sync:: Condvar ]
61
+ [ jobserver_crate:: Client ]
62
+ [ jobserver_crate:: HelperThread ]
63
+ [ jobserver_crate:: Acquired ]
64
+ [ Box <dyn Any + Send >]
65
+ [ rustc_arena:: DroplessArena ]
66
+ [ crate :: memmap:: Mmap ]
67
+ [ crate :: profiling:: SelfProfiler ]
68
+ [ crate :: owned_slice:: OwnedSlice ]
57
69
) ;
58
70
59
71
macro_rules! impl_dyn_send {
@@ -64,10 +76,14 @@ macro_rules! impl_dyn_send {
64
76
65
77
impl_dyn_send ! (
66
78
[ std:: sync:: atomic:: AtomicPtr <T > where T ]
67
- [ std:: sync:: Mutex <T > where T : ?Sized + DynSend ]
79
+ [ std:: sync:: Mutex <T > where T : ?Sized + DynSend ]
80
+ [ std:: sync:: RwLock <T > where T : ?Sized + DynSend ]
68
81
[ std:: sync:: mpsc:: Sender <T > where T : DynSend ]
82
+ [ std:: sync:: mpsc:: Receiver <T > where T : DynSend ]
69
83
[ std:: sync:: Arc <T > where T : ?Sized + DynSync + DynSend ]
84
+ [ std:: sync:: OnceLock <T > where T : DynSend ]
70
85
[ std:: sync:: LazyLock <T , F > where T : DynSend , F : DynSend ]
86
+ [ std:: thread:: JoinHandle <T > where T ]
71
87
[ std:: collections:: HashSet <K , S > where K : DynSend , S : DynSend ]
72
88
[ std:: collections:: HashMap <K , V , S > where K : DynSend , V : DynSend , S : DynSend ]
73
89
[ std:: collections:: BTreeMap <K , V , A > where K : DynSend , V : DynSend , A : std:: alloc:: Allocator + Clone + DynSend ]
@@ -119,9 +135,9 @@ macro_rules! already_sync {
119
135
// These structures are already `Sync`.
120
136
already_sync ! (
121
137
[ std:: sync:: atomic:: AtomicBool ] [ std:: sync:: atomic:: AtomicUsize ] [ std:: sync:: atomic:: AtomicU8 ]
122
- [ std:: sync:: atomic:: AtomicU32 ] [ std:: backtrace:: Backtrace ] [ std:: io :: Error ] [ std :: fs :: File ]
123
- [ jobserver_crate :: Client ] [ crate :: memmap :: Mmap ] [ crate :: profiling :: SelfProfiler ]
124
- [ crate :: owned_slice:: OwnedSlice ]
138
+ [ std:: sync:: atomic:: AtomicU32 ] [ std:: backtrace:: Backtrace ] [ std:: sync :: Condvar ]
139
+ [ std :: io :: Error ] [ std :: fs :: File ] [ jobserver_crate :: Client ] [ crate :: memmap :: Mmap ]
140
+ [ crate :: profiling :: SelfProfiler ] [ crate :: owned_slice:: OwnedSlice ]
125
141
) ;
126
142
127
143
// Use portable AtomicU64 for targets without native 64-bit atomics
@@ -142,7 +158,9 @@ impl_dyn_sync!(
142
158
[ std:: sync:: OnceLock <T > where T : DynSend + DynSync ]
143
159
[ std:: sync:: Mutex <T > where T : ?Sized + DynSend ]
144
160
[ std:: sync:: Arc <T > where T : ?Sized + DynSync + DynSend ]
161
+ [ std:: sync:: RwLock <T > where T : ?Sized + DynSend + DynSync ]
145
162
[ std:: sync:: LazyLock <T , F > where T : DynSend + DynSync , F : DynSend ]
163
+ [ std:: sync:: mpsc:: SyncSender <T > where T : DynSend ]
146
164
[ std:: collections:: HashSet <K , S > where K : DynSync , S : DynSync ]
147
165
[ std:: collections:: HashMap <K , V , S > where K : DynSync , V : DynSync , S : DynSync ]
148
166
[ std:: collections:: BTreeMap <K , V , A > where K : DynSync , V : DynSync , A : std:: alloc:: Allocator + Clone + DynSync ]
@@ -224,3 +242,15 @@ impl<T> std::ops::DerefMut for IntoDynSyncSend<T> {
224
242
& mut self . 0
225
243
}
226
244
}
245
+
246
+ #[ inline]
247
+ pub fn downcast_box_any_dyn_send < T : Any > ( this : Box < dyn Any + DynSend > ) -> Result < Box < T > , ( ) > {
248
+ if <dyn Any >:: is :: < T > ( & * this) {
249
+ unsafe {
250
+ let ( raw, alloc) : ( * mut ( dyn Any + DynSend ) , _ ) = Box :: into_raw_with_allocator ( this) ;
251
+ Ok ( Box :: from_raw_in ( raw as * mut T , alloc) )
252
+ }
253
+ } else {
254
+ Err ( ( ) )
255
+ }
256
+ }
0 commit comments