1use crate::{
2 ghost::{
3 Committer, FnGhost,
4 perm::{Container, Perm},
5 },
6 prelude::*,
7};
8#[cfg(feature = "nightly")]
9use core::alloc::Allocator;
10use std::sync::Arc;
11
12#[cfg(feature = "nightly")]
13impl<T: DeepModel + ?Sized, A: Allocator> DeepModel for Arc<T, A> {
14 type DeepModelTy = T::DeepModelTy;
15 #[logic(open, inline)]
16 fn deep_model(self) -> Self::DeepModelTy {
17 pearlite! { *self.view().deep_model() }
18 }
19}
20
21#[cfg(feature = "nightly")]
22impl<T: ?Sized, A: Allocator> View for Arc<T, A> {
23 type ViewTy = Box<T>;
24 #[logic(opaque)]
25 fn view(self) -> Self::ViewTy {
26 dead
27 }
28}
29
30extern_spec! {
31 mod std {
32 mod sync {
33 impl<T> Arc<T> {
34 #[check(ghost)]
35 #[ensures(*result@ == value)]
36 fn new(value: T) -> Self;
37 }
38
39 impl<T, A: Allocator> AsRef for Arc<T, A> {
40 #[check(ghost)]
41 #[ensures(*result == *(*self)@)]
42 fn as_ref(&self) -> &T;
43 }
44 }
45 }
46
47 impl<T: ?Sized, A: Allocator + Clone> Clone for Arc<T, A> {
48 #[check(ghost)]
49 #[ensures(result@ == (*self)@)]
50 fn clone(&self) -> Arc<T, A>;
51 }
52}
53
54#[cfg(not(feature = "nightly"))]
56impl<T: DeepModel> DeepModel for Arc<T> {
57 type DeepModelTy = T::DeepModelTy;
58}
59
60#[cfg(not(feature = "nightly"))]
61impl<T> View for Arc<T> {
62 type ViewTy = T;
63}
64
65pub struct AtomicI32(::std::sync::atomic::AtomicI32);
67
68impl Container for AtomicI32 {
69 type Value = i32;
70
71 #[logic(open, inline)]
72 fn is_disjoint(&self, _: &Self::Value, other: &Self, _: &Self::Value) -> bool {
73 self != other
74 }
75}
76
77impl AtomicI32 {
78 #[ensures(*result.1.val() == val)]
79 #[ensures(*result.1.ward() == result.0)]
80 #[trusted]
81 #[check(terminates)]
82 pub fn new(val: i32) -> (Self, Ghost<Box<Perm<AtomicI32>>>) {
83 (Self(std::sync::atomic::AtomicI32::new(val)), Ghost::conjure())
84 }
85
86 #[requires(forall<c: &mut Committer> !c.shot() ==> c.ward() == *self ==> c.new_value() == val + c.old_value() ==>
90 f.precondition((c,)) &&
91 (forall<r> f.postcondition_once((c,), r) ==> (^c).shot())
92 )]
93 #[ensures(exists<c: &mut Committer>
94 !c.shot() && c.ward() == *self && c.new_value() == val + c.old_value() &&
95 c.old_value() == result.0 &&
96 f.postcondition_once((c,), *(result.1))
97 )]
98 #[trusted]
99 #[allow(unused_variables)]
100 pub fn fetch_add<'a, A, F>(&'a self, val: i32, f: Ghost<F>) -> (i32, Ghost<A>)
101 where
102 F: FnGhost + FnOnce(&'a mut Committer) -> A,
103 {
104 let res = self.0.fetch_add(val, std::sync::atomic::Ordering::SeqCst);
105 (res, Ghost::conjure())
106 }
107
108 #[requires(forall<c: &mut Committer> !c.shot() ==> c.ward() == *self ==> c.new_value() == val ==>
112 f.precondition((c,)) &&
113 (forall<r> f.postcondition_once((c,), r) ==> (^c).shot())
114 )]
115 #[ensures(exists<c: &mut Committer>
116 !c.shot() && c.ward() == *self && c.new_value() == val &&
117 f.postcondition_once((c,), *result)
118 )]
119 #[trusted]
120 #[allow(unused_variables)]
121 pub fn store<'a, A, F>(&'a self, val: i32, f: Ghost<F>) -> Ghost<A>
122 where
123 F: FnGhost + FnOnce(&'a mut Committer) -> A,
124 {
125 self.0.store(val, std::sync::atomic::Ordering::SeqCst);
126 Ghost::conjure()
127 }
128
129 #[requires(self == *own.ward())]
131 #[ensures(result == *own.val())]
132 #[trusted]
133 pub fn into_inner(self, own: Ghost<Box<Perm<AtomicI32>>>) -> i32 {
134 self.0.into_inner()
135 }
136}