enb - rnti memory pool now fallbacks to new/delete if rnti-specific memory block is full

This commit is contained in:
Francisco 2021-04-08 13:44:34 +01:00 committed by Francisco Paisana
parent a780a6a7b6
commit a890a22ccd
3 changed files with 58 additions and 42 deletions

View File

@ -74,6 +74,14 @@ public:
}
}
unique_pool_ptr<T> make() final
{
return unique_pool_ptr<T>(do_allocate(), [this](T* ptr) {
// dtor is not called, as object is going to be recycled
do_deallocate(ptr);
});
}
void allocate_batch()
{
uint8_t* batch_payload = static_cast<uint8_t*>(allocated.allocate_block());
@ -89,7 +97,7 @@ public:
private:
friend class background_obj_pool<T>;
T* do_allocate() final
T* do_allocate()
{
if (cache.empty()) {
allocate_batch();
@ -99,10 +107,10 @@ private:
return static_cast<T*>(top);
}
void do_deallocate(void* payload_ptr) final
void do_deallocate(T* payload_ptr)
{
recycle_oper(*static_cast<T*>(payload_ptr));
void* header_ptr = cache.get_node_header(payload_ptr);
recycle_oper(*payload_ptr);
void* header_ptr = cache.get_node_header(static_cast<void*>(payload_ptr));
cache.push(header_ptr);
}
@ -146,10 +154,18 @@ public:
grow_pool.clear();
}
unique_pool_ptr<T> make() final
{
return unique_pool_ptr<T>(do_allocate(), [this](T* ptr) {
// dtor is not called, as object is going to be recycled
do_deallocate(ptr);
});
}
size_t cache_size() const { return grow_pool.cache_size(); }
private:
T* do_allocate() final
T* do_allocate()
{
std::lock_guard<std::mutex> lock(state->mutex);
T* obj = grow_pool.do_allocate();
@ -158,7 +174,7 @@ private:
}
return obj;
}
void do_deallocate(void* ptr) final
void do_deallocate(T* ptr)
{
std::lock_guard<std::mutex> lock(state->mutex);
return grow_pool.do_deallocate(ptr);

View File

@ -17,20 +17,16 @@
namespace srsran {
/// unique ptr with type-erased dtor, so that it can be used by any object or memory pool
constexpr size_t unique_pool_deleter_small_buffer = sizeof(void*) * 2u;
template <typename T>
using unique_pool_ptr = std::unique_ptr<T, srsran::move_callback<void(T*), unique_pool_deleter_small_buffer> >;
/// Common object pool interface
template <typename T>
class obj_pool_itf
{
public:
struct pool_deallocator {
obj_pool_itf<T>* pool;
explicit pool_deallocator(obj_pool_itf<T>* pool_ = nullptr) : pool(pool_) {}
void operator()(void* ptr)
{
// dtor is not called, as object is going to be recycled
pool->do_deallocate(ptr);
}
};
using object_type = T;
obj_pool_itf() = default;
@ -40,22 +36,26 @@ public:
obj_pool_itf& operator=(const obj_pool_itf&) = delete;
obj_pool_itf& operator=(obj_pool_itf&&) = delete;
virtual ~obj_pool_itf() = default;
std::unique_ptr<T, pool_deallocator> make()
{
return std::unique_ptr<T, pool_deallocator>(do_allocate(), pool_deallocator(this));
}
private:
// defined in child class
virtual T* do_allocate() = 0;
virtual void do_deallocate(void* ptr) = 0;
virtual ~obj_pool_itf() = default;
virtual unique_pool_ptr<T> make() = 0;
};
/// unique ptr with type-erased dtor, so that it can be used by any object pool
template <typename T>
using unique_pool_ptr = std::unique_ptr<T, typename obj_pool_itf<T>::pool_deallocator>;
/// Allocate object in memory pool
template <typename T, typename MemPool, typename... Args>
unique_pool_ptr<T> make_pool_obj_with_heap_fallback(MemPool& mempool, Args&&... args)
{
void* block = mempool.allocate(sizeof(T), alignof(T));
if (block == nullptr) {
return unique_pool_ptr<T>(new T(std::forward<Args>(args)...), std::default_delete<T>());
}
new (block) T(std::forward<Args>(args)...);
return unique_pool_ptr<T>(block, [&mempool](T* ptr) {
if (ptr != nullptr) {
ptr->~T();
mempool.deallocate(ptr);
}
});
}
} // namespace srsran

View File

@ -13,34 +13,34 @@
#ifndef SRSRAN_RNTI_POOL_H
#define SRSRAN_RNTI_POOL_H
#include "srsran/adt/pool/pool_interface.h"
#include "srsran/phy/common/phy_common.h"
#include <memory>
namespace srsenb {
// Allocation of Objects in UE dedicated memory pool
// Allocation of objects in rnti-dedicated memory pool
void reserve_rnti_memblocks(size_t nof_blocks);
void* allocate_rnti_dedicated_mem(uint16_t rnti, std::size_t size, std::size_t align);
void deallocate_rnti_dedicated_mem(uint16_t rnti, void* p);
template <typename T>
struct rnti_obj_deleter {
uint16_t rnti;
explicit rnti_obj_deleter(uint16_t rnti_ = 0) : rnti(rnti_) {}
void operator()(void* p)
{
static_cast<T*>(p)->~T();
deallocate_rnti_dedicated_mem(rnti, p);
}
};
template <typename T>
using unique_rnti_ptr = std::unique_ptr<T, rnti_obj_deleter<T> >;
using unique_rnti_ptr = srsran::unique_pool_ptr<T>;
template <typename T, typename... Args>
unique_rnti_ptr<T> make_rnti_obj(uint16_t rnti, Args&&... args)
{
void* block = allocate_rnti_dedicated_mem(rnti, sizeof(T), alignof(T));
if (block == nullptr) {
// allocated with "new" as a fallback
return unique_rnti_ptr<T>(new T(std::forward<Args>(args)...), std::default_delete<T>());
}
// allocation using rnti-dedicated memory pool was successful
new (block) T(std::forward<Args>(args)...);
return unique_rnti_ptr<T>(static_cast<T*>(block), rnti_obj_deleter<T>(rnti));
return unique_rnti_ptr<T>(static_cast<T*>(block), [rnti](T* ptr) {
ptr->~T();
deallocate_rnti_dedicated_mem(rnti, ptr);
});
}
} // namespace srsenb