cycle_ptr
base_control.h
1 #ifndef CYCLE_PTR_DETAIL_BASE_CONTROL_H
2 #define CYCLE_PTR_DETAIL_BASE_CONTROL_H
3 
4 #include <atomic>
5 #include <cassert>
6 #include <cstdint>
7 #include <functional>
8 #include <map>
9 #include <mutex>
10 #include <shared_mutex>
11 #include <tuple>
12 #include <cycle_ptr/detail/color.h>
13 #include <cycle_ptr/detail/hazard.h>
14 #include <cycle_ptr/detail/llist.h>
15 #include <cycle_ptr/detail/vertex.h>
16 #include <cycle_ptr/detail/intrusive_ptr.h>
17 
18 namespace cycle_ptr {
19 template<typename> class cycle_allocator;
20 } /* namespace cycle_ptr */
21 
22 namespace cycle_ptr::detail {
23 
24 
31 : public link<base_control>
32 {
33  friend class generation;
34  friend class vertex;
35  template<typename> friend class cycle_ptr::cycle_allocator;
36 
40  noexcept
41  -> void {
42  assert(bc != nullptr);
43 
44  [[maybe_unused]]
45  std::uintptr_t old = bc->control_refs_.fetch_add(1u, std::memory_order_acquire);
46  assert(old > 0u && old < UINTPTR_MAX);
47  }
48 
53  noexcept
54  -> void {
55  assert(bc != nullptr);
56 
57  std::uintptr_t old = bc->control_refs_.fetch_sub(1u, std::memory_order_release);
58  assert(old > 0u);
59 
60  if (old == 1u) std::invoke(bc->get_deleter_(), bc);
61  }
62 
63  base_control(const base_control&) = delete;
64 
65  protected:
66  class publisher;
67 
69  base_control();
73  ~base_control() noexcept;
74 
75  public:
77  static auto unowned_control() -> intrusive_ptr<base_control>;
78 
80  auto expired() const
81  noexcept
82  -> bool {
83  return get_color(store_refs_.load(std::memory_order_relaxed)) == color::black;
84  }
85 
87  static auto publisher_lookup(void* addr, std::size_t len) -> intrusive_ptr<base_control>;
88 
91  auto weak_acquire() noexcept -> bool;
92 
103  noexcept
104  -> void {
105  [[maybe_unused]]
106  std::uintptr_t old = store_refs_.fetch_add(1u << color_shift, std::memory_order_relaxed);
107  assert(get_color(old) != color::black && get_color(old) != color::red);
108  }
109 
117  auto acquire() noexcept -> void;
118 
129  auto release(bool skip_gc = false)
130  noexcept
131  -> void {
132  const std::uintptr_t old = store_refs_.fetch_sub(
133  1u << color_shift,
134  std::memory_order_release);
135  assert(get_refs(old) > 0u);
136 
137  if (!skip_gc && get_refs(old) == 1u) gc();
138  }
139 
143  auto gc() noexcept -> void;
144 
146  auto push_back(vertex& v)
147  noexcept
148  -> void {
149  std::lock_guard<std::mutex> lck{ mtx_ };
150  edges_.push_back(v);
151  }
152 
154  auto erase(vertex& v)
155  noexcept
156  -> void {
157  std::lock_guard<std::mutex> lck{ mtx_ };
158  edges_.erase(edges_.iterator_to(v));
159  }
160 
162  virtual auto is_unowned() const noexcept -> bool;
163 
164  private:
166  virtual auto clear_data_() noexcept -> void = 0;
173  virtual auto get_deleter_() const noexcept -> void (*)(base_control*) noexcept = 0;
174 
177  std::atomic<std::uintptr_t> store_refs_{ make_refcounter(1u, color::white) };
180  std::atomic<std::uintptr_t> control_refs_{ std::uintptr_t(1) };
182  hazard_ptr<generation> generation_;
184  std::mutex mtx_;
186  llist<vertex, vertex> edges_;
187 
188  public:
204  bool under_construction = true;
205 };
206 
207 
216  private:
218  struct address_range {
220  void* addr;
222  std::size_t len;
223 
225  auto operator==(const address_range& other) const
226  noexcept
227  -> bool {
228  return std::tie(addr, len) == std::tie(other.addr, other.len);
229  }
230 
232  auto operator!=(const address_range& other) const
233  noexcept
234  -> bool {
235  return !(*this == other);
236  }
237 
239  auto operator<(const address_range& other) const
240  noexcept
241  -> bool {
242  return addr < other.addr;
243  }
244  };
245 
264  using map_type = std::map<address_range, base_control*>;
265 
266  publisher() = delete;
267  publisher(const publisher&) = delete;
268 
269  public:
271  publisher(void* addr, std::size_t len, base_control& bc);
273  ~publisher() noexcept;
274 
282  static auto lookup(void* addr, std::size_t len) -> intrusive_ptr<base_control>;
283 
284  private:
296  static auto singleton_map_() noexcept
297  -> std::tuple<std::shared_mutex&, map_type&>;
298 
300  map_type::const_iterator iter_;
301 };
302 
303 
304 inline auto base_control::publisher_lookup(void* addr, std::size_t len)
306  return publisher::lookup(addr, len);
307 }
308 
309 
310 } /* namespace cycle_ptr::detail */
311 
312 #endif /* CYCLE_PTR_DETAIL_BASE_CONTROL_H */
Definition: generation.h:19
auto gc() noexcept -> void
Run GC.
friend auto intrusive_ptr_add_ref(base_control *bc) noexcept -> void
Increment reference counter.
Definition: base_control.h:39
static auto lookup(void *addr, std::size_t len) -> intrusive_ptr< base_control >
Perform a lookup, to figure out which control manages the given address range.
Adaptor for collections with member types.
Definition: allocator.h:19
auto release(bool skip_gc=false) noexcept -> void
Release reference counter.
Definition: base_control.h:129
auto push_back(vertex &v) noexcept -> void
Register a vertex.
Definition: base_control.h:146
auto acquire() noexcept -> void
Acquire reference.
bool under_construction
This variable indicates the managed object is under construction.
Definition: base_control.h:204
Address range publisher.
Definition: base_control.h:215
Intrusive pointer.
Definition: intrusive_ptr.h:21
base_control()
Default constructor allocates a new generation.
~base_control() noexcept
Destructor.
Definition: vertex.h:14
~publisher() noexcept
Destructor, unpublishes the range.
static auto publisher_lookup(void *addr, std::size_t len) -> intrusive_ptr< base_control >
Implements publisher lookup based on address range.
Definition: base_control.h:304
static auto unowned_control() -> intrusive_ptr< base_control >
Create a control block that represents no ownership.
friend auto intrusive_ptr_release(base_control *bc) noexcept -> void
Decrement reference counter.
Definition: base_control.h:52
auto acquire_no_red() noexcept -> void
Acquire reference.
Definition: base_control.h:102
Base class for all control blocks.
Definition: base_control.h:30
auto weak_acquire() noexcept -> bool
Used by weak to strong reference promotion.
auto erase(vertex &v) noexcept -> void
Deregister a vertex.
Definition: base_control.h:154
auto expired() const noexcept -> bool
Test if the object managed by this control is expired.
Definition: base_control.h:80
virtual auto is_unowned() const noexcept -> bool
Test if this control block represents an unowned object.