Skip to content

Commit

Permalink
Apply clang-format on some infrequently-updated files
Browse files Browse the repository at this point in the history
  • Loading branch information
sipa committed Sep 19, 2014
1 parent 2fc6c67 commit 20e01b1
Show file tree
Hide file tree
Showing 30 changed files with 847 additions and 744 deletions.
225 changes: 110 additions & 115 deletions src/addrman.cpp

Large diffs are not rendered by default.

7 changes: 3 additions & 4 deletions src/allocators.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,13 @@ static inline size_t GetSystemPageSize()
page_size = sSysInfo.dwPageSize;
#elif defined(PAGESIZE) // defined in limits.h
page_size = PAGESIZE;
#else // assume some POSIX OS
#else // assume some POSIX OS
page_size = sysconf(_SC_PAGESIZE);
#endif
return page_size;
}

bool MemoryPageLocker::Lock(const void *addr, size_t len)
bool MemoryPageLocker::Lock(const void* addr, size_t len)
{
#ifdef WIN32
return VirtualLock(const_cast<void*>(addr), len) != 0;
Expand All @@ -52,7 +52,7 @@ bool MemoryPageLocker::Lock(const void *addr, size_t len)
#endif
}

bool MemoryPageLocker::Unlock(const void *addr, size_t len)
bool MemoryPageLocker::Unlock(const void* addr, size_t len)
{
#ifdef WIN32
return VirtualUnlock(const_cast<void*>(addr), len) != 0;
Expand All @@ -64,4 +64,3 @@ bool MemoryPageLocker::Unlock(const void *addr, size_t len)
LockedPageManager::LockedPageManager() : LockedPageManagerBase<MemoryPageLocker>(GetSystemPageSize())
{
}

90 changes: 49 additions & 41 deletions src/allocators.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,14 @@
* small objects that span up to a few pages, mostly smaller than a page. To support large allocations,
* something like an interval tree would be the preferred data structure.
*/
template <class Locker> class LockedPageManagerBase
template <class Locker>
class LockedPageManagerBase
{
public:
LockedPageManagerBase(size_t page_size):
page_size(page_size)
LockedPageManagerBase(size_t page_size) : page_size(page_size)
{
// Determine bitmask for extracting page from address
assert(!(page_size & (page_size-1))); // size must be power of two
assert(!(page_size & (page_size - 1))); // size must be power of two
page_mask = ~(page_size - 1);
}

Expand All @@ -44,43 +44,42 @@ template <class Locker> class LockedPageManagerBase


// For all pages in affected range, increase lock count
void LockRange(void *p, size_t size)
void LockRange(void* p, size_t size)
{
boost::mutex::scoped_lock lock(mutex);
if(!size) return;
if (!size)
return;
const size_t base_addr = reinterpret_cast<size_t>(p);
const size_t start_page = base_addr & page_mask;
const size_t end_page = (base_addr + size - 1) & page_mask;
for(size_t page = start_page; page <= end_page; page += page_size)
{
for (size_t page = start_page; page <= end_page; page += page_size) {
Histogram::iterator it = histogram.find(page);
if(it == histogram.end()) // Newly locked page
if (it == histogram.end()) // Newly locked page
{
locker.Lock(reinterpret_cast<void*>(page), page_size);
histogram.insert(std::make_pair(page, 1));
}
else // Page was already locked; increase counter
} else // Page was already locked; increase counter
{
it->second += 1;
}
}
}

// For all pages in affected range, decrease lock count
void UnlockRange(void *p, size_t size)
void UnlockRange(void* p, size_t size)
{
boost::mutex::scoped_lock lock(mutex);
if(!size) return;
if (!size)
return;
const size_t base_addr = reinterpret_cast<size_t>(p);
const size_t start_page = base_addr & page_mask;
const size_t end_page = (base_addr + size - 1) & page_mask;
for(size_t page = start_page; page <= end_page; page += page_size)
{
for (size_t page = start_page; page <= end_page; page += page_size) {
Histogram::iterator it = histogram.find(page);
assert(it != histogram.end()); // Cannot unlock an area that was not locked
// Decrease counter for page, when it is zero, the page will be unlocked
it->second -= 1;
if(it->second == 0) // Nothing on the page anymore that keeps it locked
if (it->second == 0) // Nothing on the page anymore that keeps it locked
{
// Unlock page and remove the count from histogram
locker.Unlock(reinterpret_cast<void*>(page), page_size);
Expand All @@ -101,7 +100,7 @@ template <class Locker> class LockedPageManagerBase
boost::mutex mutex;
size_t page_size, page_mask;
// map of page base address to lock count
typedef std::map<size_t,int> Histogram;
typedef std::map<size_t, int> Histogram;
Histogram histogram;
};

Expand All @@ -116,11 +115,11 @@ class MemoryPageLocker
/** Lock memory pages.
* addr and len must be a multiple of the system page size
*/
bool Lock(const void *addr, size_t len);
bool Lock(const void* addr, size_t len);
/** Unlock memory pages.
* addr and len must be a multiple of the system page size
*/
bool Unlock(const void *addr, size_t len);
bool Unlock(const void* addr, size_t len);
};

/**
Expand All @@ -134,10 +133,10 @@ class MemoryPageLocker
* secure_allocator are created. So instead of having LockedPageManager also be
* static-initialized, it is created on demand.
*/
class LockedPageManager: public LockedPageManagerBase<MemoryPageLocker>
class LockedPageManager : public LockedPageManagerBase<MemoryPageLocker>
{
public:
static LockedPageManager& Instance()
static LockedPageManager& Instance()
{
boost::call_once(LockedPageManager::CreateInstance, LockedPageManager::init_flag);
return *LockedPageManager::_instance;
Expand Down Expand Up @@ -165,11 +164,15 @@ class LockedPageManager: public LockedPageManagerBase<MemoryPageLocker>
// Functions for directly locking/unlocking memory objects.
// Intended for non-dynamically allocated structures.
//
template<typename T> void LockObject(const T &t) {
template <typename T>
void LockObject(const T& t)
{
LockedPageManager::Instance().LockRange((void*)(&t), sizeof(T));
}

template<typename T> void UnlockObject(const T &t) {
template <typename T>
void UnlockObject(const T& t)
{
OPENSSL_cleanse((void*)(&t), sizeof(T));
LockedPageManager::Instance().UnlockRange((void*)(&t), sizeof(T));
}
Expand All @@ -178,13 +181,12 @@ template<typename T> void UnlockObject(const T &t) {
// Allocator that locks its contents from being paged
// out of memory and clears its contents before deletion.
//
template<typename T>
struct secure_allocator : public std::allocator<T>
{
template <typename T>
struct secure_allocator : public std::allocator<T> {
// MSVC8 default copy constructor is broken
typedef std::allocator<T> base;
typedef typename base::size_type size_type;
typedef typename base::difference_type difference_type;
typedef typename base::difference_type difference_type;
typedef typename base::pointer pointer;
typedef typename base::const_pointer const_pointer;
typedef typename base::reference reference;
Expand All @@ -193,14 +195,18 @@ struct secure_allocator : public std::allocator<T>
secure_allocator() throw() {}
secure_allocator(const secure_allocator& a) throw() : base(a) {}
template <typename U>
secure_allocator(const secure_allocator<U>& a) throw() : base(a) {}
secure_allocator(const secure_allocator<U>& a) throw() : base(a)
{
}
~secure_allocator() throw() {}
template<typename _Other> struct rebind
{ typedef secure_allocator<_Other> other; };
template <typename _Other>
struct rebind {
typedef secure_allocator<_Other> other;
};

T* allocate(std::size_t n, const void *hint = 0)
T* allocate(std::size_t n, const void* hint = 0)
{
T *p;
T* p;
p = std::allocator<T>::allocate(n, hint);
if (p != NULL)
LockedPageManager::Instance().LockRange(p, sizeof(T) * n);
Expand All @@ -209,8 +215,7 @@ struct secure_allocator : public std::allocator<T>

void deallocate(T* p, std::size_t n)
{
if (p != NULL)
{
if (p != NULL) {
OPENSSL_cleanse(p, sizeof(T) * n);
LockedPageManager::Instance().UnlockRange(p, sizeof(T) * n);
}
Expand All @@ -222,13 +227,12 @@ struct secure_allocator : public std::allocator<T>
//
// Allocator that clears its contents before deletion.
//
template<typename T>
struct zero_after_free_allocator : public std::allocator<T>
{
template <typename T>
struct zero_after_free_allocator : public std::allocator<T> {
// MSVC8 default copy constructor is broken
typedef std::allocator<T> base;
typedef typename base::size_type size_type;
typedef typename base::difference_type difference_type;
typedef typename base::difference_type difference_type;
typedef typename base::pointer pointer;
typedef typename base::const_pointer const_pointer;
typedef typename base::reference reference;
Expand All @@ -237,10 +241,14 @@ struct zero_after_free_allocator : public std::allocator<T>
zero_after_free_allocator() throw() {}
zero_after_free_allocator(const zero_after_free_allocator& a) throw() : base(a) {}
template <typename U>
zero_after_free_allocator(const zero_after_free_allocator<U>& a) throw() : base(a) {}
zero_after_free_allocator(const zero_after_free_allocator<U>& a) throw() : base(a)
{
}
~zero_after_free_allocator() throw() {}
template<typename _Other> struct rebind
{ typedef zero_after_free_allocator<_Other> other; };
template <typename _Other>
struct rebind {
typedef zero_after_free_allocator<_Other> other;
};

void deallocate(T* p, std::size_t n)
{
Expand Down
Loading

0 comments on commit 20e01b1

Please sign in to comment.