Commit 20fa7182 authored by Dominik Charousset's avatar Dominik Charousset

Simplify memory cache using new `embedded` class

parent 1fa62d11
......@@ -27,7 +27,7 @@ namespace caf {
namespace detail {
template <class Base>
class embedded : public Base {
class embedded final : public Base {
public:
template <class... Vs>
embedded(intrusive_ptr<ref_counted> storage, Vs&&... vs)
......
......@@ -24,23 +24,26 @@
#include "caf/ref_counted.hpp"
#include "caf/mixin/memory_cached.hpp"
#include "caf/detail/memory.hpp"
#include "caf/detail/type_traits.hpp"
namespace caf {
namespace detail {
template <class T, class... Ts>
typename std::enable_if<mixin::is_memory_cached<T>::value,
intrusive_ptr<T>>::type
typename std::enable_if<
detail::is_memory_cached<T>::value,
intrusive_ptr<T>
>::type
make_counted(Ts&&... args) {
return {detail::memory::create<T>(std::forward<Ts>(args)...)};
}
template <class T, class... Ts>
typename std::enable_if<!mixin::is_memory_cached<T>::value,
intrusive_ptr<T>>::type
typename std::enable_if<
!detail::is_memory_cached<T>::value,
intrusive_ptr<T>
>::type
make_counted(Ts&&... args) {
return {new T(std::forward<Ts>(args)...)};
}
......
......@@ -29,6 +29,9 @@
#include "caf/config.hpp"
#include "caf/ref_counted.hpp"
#include "caf/detail/embedded.hpp"
#include "caf/detail/memory_cache_flag_type.hpp"
namespace caf {
class mailbox_element;
} // namespace caf
......@@ -45,140 +48,133 @@ constexpr size_t s_max_elements = 20; // don't create > 20 elements
} // namespace <anonymous>
class instance_wrapper {
public:
virtual ~instance_wrapper();
// calls the destructor
virtual void destroy() = 0;
// releases memory
virtual void deallocate() = 0;
};
using embedded_storage = std::pair<intrusive_ptr<ref_counted>, void*>;
class memory_cache {
public:
virtual ~memory_cache();
// calls dtor and either releases memory or re-uses it later
virtual void release_instance(void*) = 0;
virtual std::pair<instance_wrapper*, void*> new_instance() = 0;
// casts `ptr` to the derived type and returns it
virtual void* downcast(memory_managed* ptr) = 0;
virtual embedded_storage new_embedded_storage() = 0;
};
class instance_wrapper;
template <class T>
class basic_memory_cache;
#ifdef CAF_NO_MEM_MANAGEMENT
class memory {
template <class T>
struct rc_storage : public ref_counted {
T instance;
template <class... Vs>
rc_storage(Vs&&... vs) : instance(this, std::forward<Vs>(vs)...) {
// nop
}
};
memory() = delete;
template <class T>
T* unbox_rc_storage(T* ptr) {
return ptr;
}
template <class T>
T* unbox_rc_storage(rc_storage<T>* ptr) {
return &(ptr->instance);
}
class memory {
public:
memory() = delete;
// Allocates storage, initializes a new object, and returns the new instance.
template <class T, class... Ts>
static T* create(Ts&&... args) {
return new T(std::forward<Ts>(args)...);
using embedded_t =
typename std::conditional<
T::memory_cache_flag == provides_embedding,
rc_storage<T>,
T
>::type;
return unbox_rc_storage(new embedded_t(std::forward<Ts>(args)...));
}
static inline memory_cache* get_cache_map_entry(const std::type_info*) {
return nullptr;
}
};
#else // CAF_NO_MEM_MANAGEMENT
template <class T>
class basic_memory_cache : public memory_cache {
public:
static constexpr size_t ne = s_alloc_size / sizeof(T);
static constexpr size_t ms = ne < s_min_elements ? s_min_elements : ne;
static constexpr size_t dsize = ms > s_max_elements ? s_max_elements : ms;
struct wrapper : instance_wrapper {
ref_counted* parent;
union {
T instance;
static_assert(dsize > 0, "dsize == 0");
using embedded_t =
typename std::conditional<
T::memory_cache_flag == needs_embedding,
embedded<T>,
T
>::type;
struct wrapper {
union {
embedded_t instance;
};
wrapper() : parent(nullptr) {}
~wrapper() {}
void destroy() { instance.~T(); }
void deallocate() { parent->deref(); }
wrapper() {
// nop
}
~wrapper() {
// nop
}
};
class storage : public ref_counted {
public:
storage() {
for (auto& elem : data) {
// each instance has a reference to its parent
elem.parent = this;
ref(); // deref() is called in wrapper::deallocate
}
storage() : m_pos(0) {
// nop
}
using iterator = wrapper*;
~storage() {
// nop
}
iterator begin() { return data; }
bool has_next() {
return m_pos < dsize;
}
iterator end() { return begin() + dsize; }
embedded_t* next() {
return &(m_data[m_pos++].instance);
}
private:
wrapper data[dsize];
size_t m_pos;
wrapper m_data[dsize];
};
public:
std::vector<wrapper*> cached_elements;
basic_memory_cache() { cached_elements.reserve(dsize); }
~basic_memory_cache() {
for (auto e : cached_elements) e->deallocate();
}
void* downcast(memory_managed* ptr) { return static_cast<T*>(ptr); }
void release_instance(void* vptr) override {
CAF_REQUIRE(vptr != nullptr);
auto ptr = reinterpret_cast<T*>(vptr);
CAF_REQUIRE(ptr->outer_memory != nullptr);
auto wptr = static_cast<wrapper*>(ptr->outer_memory);
wptr->destroy();
wptr->deallocate();
}
std::pair<instance_wrapper*, void*> new_instance() override {
if (cached_elements.empty()) {
auto elements = new storage;
for (auto i = elements->begin(); i != elements->end(); ++i) {
cached_elements.push_back(i);
}
embedded_storage new_embedded_storage() override {
// allocate cache on-the-fly
if (!m_cache) {
m_cache.reset(new storage);
}
auto res = m_cache->next();
if (m_cache->has_next()) {
return {m_cache, res};
}
wrapper* wptr = cached_elements.back();
cached_elements.pop_back();
return std::make_pair(wptr, &(wptr->instance));
// we got the last element out of the cache; pass the reference to the
// client to avoid pointless increase/decrease ops on the reference count
embedded_storage result;
result.first.reset(m_cache.release(), false);
result.second = res;
return result;
}
private:
intrusive_ptr<storage> m_cache;
};
class memory {
......@@ -191,13 +187,19 @@ class memory {
public:
// Allocates storage, initializes a new object, and returns the new instance.
template <class T, class... Ts>
static T* create(Ts&&... args) {
template <class T, class... Vs>
static T* create(Vs&&... vs) {
using embedded_t =
typename std::conditional<
T::memory_cache_flag == needs_embedding,
embedded<T>,
T
>::type;
auto mc = get_or_set_cache_map_entry<T>();
auto p = mc->new_instance();
auto result = new (p.second) T(std::forward<Ts>(args)...);
result->outer_memory = p.first;
return result;
auto es = mc->new_embedded_storage();
auto ptr = reinterpret_cast<embedded_t*>(es.second);
new (ptr) embedded_t(std::move(es.first), std::forward<Vs>(vs)...);
return ptr;
}
static memory_cache* get_cache_map_entry(const std::type_info* tinf);
......@@ -205,7 +207,7 @@ class memory {
private:
static void add_cache_map_entry(const std::type_info* tinf,
memory_cache* instance);
memory_cache* instance);
template <class T>
static inline memory_cache* get_or_set_cache_map_entry() {
......
......@@ -17,76 +17,19 @@
* http://www.boost.org/LICENSE_1_0.txt. *
******************************************************************************/
#ifndef CAF_MIXIN_MEMORY_CACHED_HPP
#define CAF_MIXIN_MEMORY_CACHED_HPP
#include <utility>
#include <type_traits>
#include "caf/detail/memory.hpp"
#ifndef CAF_DETAIL_MEMORY_CACHE_FLAG_TYPE
#define CAF_DETAIL_MEMORY_CACHE_FLAG_TYPE
namespace caf {
namespace mixin {
/**
* This mixin adds all member functions and member variables needed
* by the memory management subsystem.
*/
template <class Base, class Subtype>
class memory_cached : public Base {
friend class detail::memory;
template <class>
friend class detail::basic_memory_cache;
protected:
using combined_type = memory_cached;
public:
static constexpr bool is_memory_cached_type = true;
void request_deletion() override {
auto mc = detail::memory::get_cache_map_entry(&typeid(*this));
if (!mc) {
auto om = outer_memory;
if (om) {
om->destroy();
om->deallocate();
} else
delete this;
} else
mc->release_instance(mc->downcast(this));
}
template <class... Ts>
memory_cached(Ts&&... args)
: Base(std::forward<Ts>(args)...), outer_memory(nullptr) {}
private:
detail::instance_wrapper* outer_memory;
};
template <class T>
struct is_memory_cached {
template <class U, bool = U::is_memory_cached_type>
static std::true_type check(int);
template <class>
static std::false_type check(...);
public:
static constexpr bool value = decltype(check<T>(0))::value;
namespace detail {
enum memory_cache_flag_type {
needs_embedding,
provides_embedding
};
} // namespace mixin
} // namespace detail
} // namespace caf
#endif // CAF_MIXIN_MEMORY_CACHED_HPP
#endif // CAF_DETAIL_MEMORY_CACHE_FLAG_TYPE
......@@ -24,43 +24,68 @@
#include "caf/ref_counted.hpp"
#include "caf/detail/embedded.hpp"
#include "caf/mixin/memory_cached.hpp"
#include "caf/detail/memory_cache_flag_type.hpp"
namespace caf {
namespace detail {
// Reduces memory allocations by placing two independent types on one
// memory block. Typical use case is to combine the content of a message
// (tuple_vals) with its "context" (message ID and sender; mailbox_element).
//
// pair_storage<mailbox_element, tuple_vals<Ts...>>:
//
// +-----------------------------------------------+
// | |
// | +------------+ |
// | | | intrusive_ptr | intrusive_ptr
// v v | |
// +------------+-------------------+---------------------+
// | refcount | mailbox_element | tuple_vals<Ts...> |
// +------------+-------------------+---------------------+
// ^ ^
// | |
// unique_ptr<mailbox_element, |
// detail::disposer> |
// |
// |
// intrusive_ptr<message_data>
template <class FirstType, class SecondType>
class pair_storage : public extend<ref_counted>::
with<mixin::memory_cached> {
class pair_storage {
public:
union { embedded<FirstType> first; };
union { embedded<SecondType> second; };
template <class... Vs>
pair_storage(std::integral_constant<size_t, 0>, Vs&&... vs)
: first(this),
second(this, std::forward<Vs>(vs)...) {
pair_storage(intrusive_ptr<ref_counted> storage,
std::integral_constant<size_t, 0>, Vs&&... vs)
: first(storage),
second(std::move(storage), std::forward<Vs>(vs)...) {
// nop
}
template <class V0, class... Vs>
pair_storage(std::integral_constant<size_t, 1>, V0&& v0, Vs&&... vs)
: first(this, std::forward<V0>(v0)),
second(this, std::forward<Vs>(vs)...) {
pair_storage(intrusive_ptr<ref_counted> storage,
std::integral_constant<size_t, 1>, V0&& v0, Vs&&... vs)
: first(storage, std::forward<V0>(v0)),
second(std::move(storage), std::forward<Vs>(vs)...) {
// nop
}
template <class V0, class V1, class... Vs>
pair_storage(std::integral_constant<size_t, 2>, V0&& v0, V1&& v1, Vs&&... vs)
: first(this, std::forward<V0>(v0), std::forward<V1>(v1)),
second(this, std::forward<Vs>(vs)...) {
pair_storage(intrusive_ptr<ref_counted> storage,
std::integral_constant<size_t, 2>, V0&& v0, V1&& v1, Vs&&... vs)
: first(storage, std::forward<V0>(v0), std::forward<V1>(v1)),
second(std::move(storage), std::forward<Vs>(vs)...) {
// nop
}
~pair_storage() {
// nop
}
static constexpr auto memory_cache_flag = provides_embedding;
};
} // namespace detail
......
......@@ -267,7 +267,7 @@ class has_static_type_name {
private:
template <class U,
class = typename std::enable_if<
!std::is_member_pointer<decltype(&U::is_baz)>::value
!std::is_member_pointer<decltype(&U::static_type_name)>::value
>::type>
static std::true_type sfinae_fun(int);
template <class>
......@@ -276,6 +276,22 @@ class has_static_type_name {
static constexpr bool value = decltype(sfinae_fun<T>(0))::value;
};
/**
* Checks whether `T::memory_cache_flag` exists.
*/
template <class T>
class is_memory_cached {
private:
template <class U, bool = U::memory_cache_flag>
static std::true_type check(int);
template <class>
static std::false_type check(...);
public:
static constexpr bool value = decltype(check<T>(0))::value;
};
/**
* Returns either `T` or `T::type` if `T` is an option.
*/
......
......@@ -46,13 +46,12 @@
#include "caf/message_priority.hpp"
#include "caf/check_typed_input.hpp"
#include "caf/mixin/memory_cached.hpp"
#include "caf/detail/logging.hpp"
#include "caf/detail/disposer.hpp"
#include "caf/detail/behavior_stack.hpp"
#include "caf/detail/typed_actor_util.hpp"
#include "caf/detail/single_reader_queue.hpp"
#include "caf/detail/memory_cache_flag_type.hpp"
namespace caf {
......@@ -64,11 +63,13 @@ class sync_handle_helper;
* @warning Instances of `local_actor` start with a reference count of 1
* @extends abstract_actor
*/
class local_actor : public extend<abstract_actor>::with<mixin::memory_cached> {
class local_actor : public abstract_actor {
public:
using del = detail::disposer;
using mailbox_type = detail::single_reader_queue<mailbox_element, del>;
static constexpr auto memory_cache_flag = detail::needs_embedding;
~local_actor();
/**************************************************************************
......@@ -573,7 +574,7 @@ class local_actor : public extend<abstract_actor>::with<mixin::memory_cached> {
void delayed_send_impl(message_priority prio, const channel& whom,
const duration& rtime, message data);
using super = combined_type;
std::function<void()> m_sync_failure_handler;
std::function<void()> m_sync_timeout_handler;
};
......
......@@ -28,19 +28,20 @@
#include "caf/message_id.hpp"
#include "caf/ref_counted.hpp"
#include "caf/mixin/memory_cached.hpp"
#include "caf/detail/memory.hpp"
#include "caf/detail/embedded.hpp"
#include "caf/detail/disposer.hpp"
#include "caf/detail/tuple_vals.hpp"
#include "caf/detail/pair_storage.hpp"
#include "caf/detail/message_data.hpp"
#include "caf/detail/memory_cache_flag_type.hpp"
namespace caf {
class mailbox_element : public extend<memory_managed>::
with<mixin::memory_cached> {
class mailbox_element : public memory_managed {
public:
static constexpr auto memory_cache_flag = detail::needs_embedding;
mailbox_element* next; // intrusive next pointer
mailbox_element* prev; // intrusive previous pointer
bool marked; // denotes if this node is currently processed
......
......@@ -76,9 +76,9 @@ inline void insert_dmsg(Map& storage, const duration& d, Ts&&... vs) {
storage.insert(std::make_pair(std::move(tout), std::move(dmsg)));
}
class timer_actor final : public detail::proper_actor<blocking_actor,
timer_actor_policies>,
public spawn_as_is {
class timer_actor : public detail::proper_actor<blocking_actor,
timer_actor_policies>,
public spawn_as_is {
public:
inline mailbox_element_ptr dequeue() {
await_data();
......
......@@ -32,7 +32,7 @@ namespace caf {
// later on in spawn(); this prevents subtle bugs that lead to segfaults,
// e.g., when calling address() in the ctor of a derived class
local_actor::local_actor()
: super(size_t{1}),
: abstract_actor(size_t{1}),
m_planned_exit_reason(exit_reason::not_exited) {
// nop
}
......@@ -165,7 +165,7 @@ response_promise local_actor::make_response_promise() {
void local_actor::cleanup(uint32_t reason) {
CAF_LOG_TRACE(CAF_ARG(reason));
super::cleanup(reason);
abstract_actor::cleanup(reason);
// tell registry we're done
is_registered(false);
}
......
......@@ -88,10 +88,6 @@ void memory::add_cache_map_entry(const type_info* tinf,
cache[tinf].reset(instance);
}
instance_wrapper::~instance_wrapper() {
// nop
}
} // namespace detail
} // namespace caf
......
......@@ -31,8 +31,6 @@
#include "caf/exception.hpp"
#include "caf/ref_counted.hpp"
#include "caf/mixin/memory_cached.hpp"
#include "caf/io/fwd.hpp"
#include "caf/io/accept_handle.hpp"
#include "caf/io/receive_policy.hpp"
......
......@@ -34,10 +34,9 @@
#include "caf/io/network/protocol.hpp"
#include "caf/io/network/native_socket.hpp"
#include "caf/mixin/memory_cached.hpp"
#include "caf/detail/memory.hpp"
#include "caf/detail/disposer.hpp"
#include "caf/detail/memory_cache_flag_type.hpp"
namespace boost {
namespace asio {
......@@ -117,7 +116,8 @@ class multiplexer {
/**
* Simple wrapper for runnables
*/
struct runnable : extend<memory_managed>::with<mixin::memory_cached> {
struct runnable : memory_managed {
static constexpr auto memory_cache_flag = detail::needs_embedding;
virtual void run() = 0;
virtual ~runnable();
};
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment