1use crate::{
2 ghost::{
3 Committer, FnGhost,
4 perm::{Container, Perm},
5 },
6 prelude::*,
7};
8#[cfg(feature = "nightly")]
9use core::alloc::Allocator;
10use std::sync::Arc;
11
12#[cfg(feature = "nightly")]
13impl<T: DeepModel + ?Sized, A: Allocator> DeepModel for Arc<T, A> {
14 type DeepModelTy = T::DeepModelTy;
15 #[logic(open, inline)]
16 fn deep_model(self) -> Self::DeepModelTy {
17 pearlite! { *self.view().deep_model() }
18 }
19}
20
21#[cfg(feature = "nightly")]
22impl<T: ?Sized, A: Allocator> View for Arc<T, A> {
23 type ViewTy = Box<T>;
24 #[logic(opaque)]
25 fn view(self) -> Self::ViewTy {
26 dead
27 }
28}
29
30extern_spec! {
31 mod std {
32 mod sync {
33 impl<T> Arc<T> {
34 #[check(ghost)]
35 #[ensures(*result@ == value)]
36 fn new(value: T) -> Self;
37 }
38
39 impl<T, A: Allocator> AsRef for Arc<T, A> {
40 #[check(ghost)]
41 #[ensures(*result == *(*self)@)]
42 fn as_ref(&self) -> &T;
43 }
44 }
45 }
46
47 impl<T: ?Sized, A: Allocator + Clone> Clone for Arc<T, A> {
48 #[check(ghost)]
49 #[ensures(result@ == (*self)@)]
50 fn clone(&self) -> Arc<T, A>;
51 }
52}
53
54#[cfg(not(feature = "nightly"))]
56impl<T: DeepModel> DeepModel for Arc<T> {
57 type DeepModelTy = T::DeepModelTy;
58}
59
60#[cfg(not(feature = "nightly"))]
61impl<T> View for Arc<T> {
62 type ViewTy = T;
63}
64
65pub struct AtomicI32(::std::sync::atomic::AtomicI32);
67
68impl Container for AtomicI32 {
69 type Value = i32;
70
71 #[logic(open, inline)]
72 fn is_disjoint(&self, _: &Self::Value, other: &Self, _: &Self::Value) -> bool {
73 self != other
74 }
75}
76
77impl AtomicI32 {
78 #[ensures(*result.1.val() == val)]
79 #[ensures(*result.1.ward() == result.0)]
80 #[trusted]
81 #[check(terminates)]
82 pub fn new(val: i32) -> (Self, Ghost<Box<Perm<AtomicI32>>>) {
83 (Self(std::sync::atomic::AtomicI32::new(val)), Ghost::conjure())
84 }
85
86 #[requires(forall<c: &mut Committer> !c.shot() ==> c.ward() == *self ==> c.new_value() == val + c.old_value() ==>
90 f.precondition((c,)) && forall<r> f.postcondition_once((c,), r) ==> (^c).shot()
91 )]
92 #[ensures(exists<c: &mut Committer>
93 !c.shot() && c.ward() == *self && c.new_value() == val + c.old_value() &&
94 c.old_value() == result.0 && f.postcondition_once((c,), *(result.1))
95 )]
96 #[trusted]
97 #[allow(unused_variables)]
98 pub fn fetch_add<'a, A, F>(&'a self, val: i32, f: Ghost<F>) -> (i32, Ghost<A>)
99 where
100 F: FnGhost + FnOnce(&'a mut Committer) -> A,
101 {
102 let res = self.0.fetch_add(val, std::sync::atomic::Ordering::SeqCst);
103 (res, Ghost::conjure())
104 }
105
106 #[requires(forall<c: &mut Committer> !c.shot() ==> c.ward() == *self ==> c.new_value() == val ==>
110 f.precondition((c,)) && (forall<r> f.postcondition_once((c,), r) ==> (^c).shot())
111 )]
112 #[ensures(exists<c: &mut Committer>
113 !c.shot() && c.ward() == *self && c.new_value() == val &&
114 f.postcondition_once((c,), *result)
115 )]
116 #[trusted]
117 #[allow(unused_variables)]
118 pub fn store<'a, A, F>(&'a self, val: i32, f: Ghost<F>) -> Ghost<A>
119 where
120 F: FnGhost + FnOnce(&'a mut Committer) -> A,
121 {
122 self.0.store(val, std::sync::atomic::Ordering::SeqCst);
123 Ghost::conjure()
124 }
125
126 #[requires(self == *own.ward())]
128 #[ensures(result == *own.val())]
129 #[trusted]
130 pub fn into_inner(self, own: Ghost<Box<Perm<AtomicI32>>>) -> i32 {
131 self.0.into_inner()
132 }
133}