SimulationCraft
SimulationCraft is a tool to explore combat mechanics in the popular MMO RPG World of Warcraft (tm).
allocators.h
1 // Tencent is pleased to support the open source community by making RapidJSON available.
2 //
3 // Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip.
4 //
5 // Licensed under the MIT License (the "License"); you may not use this file except
6 // in compliance with the License. You may obtain a copy of the License at
7 //
8 // http://opensource.org/licenses/MIT
9 //
10 // Unless required by applicable law or agreed to in writing, software distributed
11 // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
12 // CONDITIONS OF ANY KIND, either express or implied. See the License for the
13 // specific language governing permissions and limitations under the License.
14 
15 #ifndef RAPIDJSON_ALLOCATORS_H_
16 #define RAPIDJSON_ALLOCATORS_H_
17 
18 #include "rapidjson.h"
19 #include "internal/meta.h"
20 
21 #include <memory>
22 
23 #if RAPIDJSON_HAS_CXX11
24 #include <type_traits>
25 #endif
26 
28 
30 // Allocator
31 
70 #ifndef RAPIDJSON_ALLOCATOR_DEFAULT_CHUNK_CAPACITY
71 #define RAPIDJSON_ALLOCATOR_DEFAULT_CHUNK_CAPACITY (64 * 1024)
72 #endif
73 
74 
76 // CrtAllocator
77 
79 
82 class CrtAllocator {
83 public:
84  static const bool kNeedFree = true;
85  void* Malloc(size_t size) {
86  if (size) // behavior of malloc(0) is implementation defined.
87  return RAPIDJSON_MALLOC(size);
88  else
89  return NULL; // standardize to returning NULL.
90  }
91  void* Realloc(void* originalPtr, size_t originalSize, size_t newSize) {
92  (void)originalSize;
93  if (newSize == 0) {
94  RAPIDJSON_FREE(originalPtr);
95  return NULL;
96  }
97  return RAPIDJSON_REALLOC(originalPtr, newSize);
98  }
99  static void Free(void *ptr) RAPIDJSON_NOEXCEPT { RAPIDJSON_FREE(ptr); }
100 
101  bool operator==(const CrtAllocator&) const RAPIDJSON_NOEXCEPT {
102  return true;
103  }
104  bool operator!=(const CrtAllocator&) const RAPIDJSON_NOEXCEPT {
105  return false;
106  }
107 };
108 
110 // MemoryPoolAllocator
111 
113 
128 template <typename BaseAllocator = CrtAllocator>
131 
133  struct ChunkHeader {
134  size_t capacity;
135  size_t size;
136  ChunkHeader *next;
137  };
138 
139  struct SharedData {
140  ChunkHeader *chunkHead;
141  BaseAllocator* ownBaseAllocator;
142  size_t refcount;
143  bool ownBuffer;
144  };
145 
146  static const size_t SIZEOF_SHARED_DATA = RAPIDJSON_ALIGN(sizeof(SharedData));
147  static const size_t SIZEOF_CHUNK_HEADER = RAPIDJSON_ALIGN(sizeof(ChunkHeader));
148 
149  static inline ChunkHeader *GetChunkHead(SharedData *shared)
150  {
151  return reinterpret_cast<ChunkHeader*>(reinterpret_cast<uint8_t*>(shared) + SIZEOF_SHARED_DATA);
152  }
153  static inline uint8_t *GetChunkBuffer(SharedData *shared)
154  {
155  return reinterpret_cast<uint8_t*>(shared->chunkHead) + SIZEOF_CHUNK_HEADER;
156  }
157 
158  static const size_t kDefaultChunkCapacity = RAPIDJSON_ALLOCATOR_DEFAULT_CHUNK_CAPACITY;
159 
160 public:
161  static const bool kNeedFree = false;
162  static const bool kRefCounted = true;
163 
165 
168  explicit
169  MemoryPoolAllocator(size_t chunkSize = kDefaultChunkCapacity, BaseAllocator* baseAllocator = 0) :
170  chunk_capacity_(chunkSize),
171  baseAllocator_(baseAllocator ? baseAllocator : RAPIDJSON_NEW(BaseAllocator)()),
172  shared_(static_cast<SharedData*>(baseAllocator_ ? baseAllocator_->Malloc(SIZEOF_SHARED_DATA + SIZEOF_CHUNK_HEADER) : 0))
173  {
174  RAPIDJSON_ASSERT(baseAllocator_ != 0);
175  RAPIDJSON_ASSERT(shared_ != 0);
176  if (baseAllocator) {
177  shared_->ownBaseAllocator = 0;
178  }
179  else {
180  shared_->ownBaseAllocator = baseAllocator_;
181  }
182  shared_->chunkHead = GetChunkHead(shared_);
183  shared_->chunkHead->capacity = 0;
184  shared_->chunkHead->size = 0;
185  shared_->chunkHead->next = 0;
186  shared_->ownBuffer = true;
187  shared_->refcount = 1;
188  }
189 
191 
200  MemoryPoolAllocator(void *buffer, size_t size, size_t chunkSize = kDefaultChunkCapacity, BaseAllocator* baseAllocator = 0) :
201  chunk_capacity_(chunkSize),
202  baseAllocator_(baseAllocator),
203  shared_(static_cast<SharedData*>(AlignBuffer(buffer, size)))
204  {
205  RAPIDJSON_ASSERT(size >= SIZEOF_SHARED_DATA + SIZEOF_CHUNK_HEADER);
206  shared_->chunkHead = GetChunkHead(shared_);
207  shared_->chunkHead->capacity = size - SIZEOF_SHARED_DATA - SIZEOF_CHUNK_HEADER;
208  shared_->chunkHead->size = 0;
209  shared_->chunkHead->next = 0;
210  shared_->ownBaseAllocator = 0;
211  shared_->ownBuffer = false;
212  shared_->refcount = 1;
213  }
214 
215  MemoryPoolAllocator(const MemoryPoolAllocator& rhs) RAPIDJSON_NOEXCEPT :
216  chunk_capacity_(rhs.chunk_capacity_),
217  baseAllocator_(rhs.baseAllocator_),
218  shared_(rhs.shared_)
219  {
220  RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
221  ++shared_->refcount;
222  }
223  MemoryPoolAllocator& operator=(const MemoryPoolAllocator& rhs) RAPIDJSON_NOEXCEPT
224  {
225  RAPIDJSON_NOEXCEPT_ASSERT(rhs.shared_->refcount > 0);
226  ++rhs.shared_->refcount;
227  this->~MemoryPoolAllocator();
228  baseAllocator_ = rhs.baseAllocator_;
229  chunk_capacity_ = rhs.chunk_capacity_;
230  shared_ = rhs.shared_;
231  return *this;
232  }
233 
234 #if RAPIDJSON_HAS_CXX11_RVALUE_REFS
235  MemoryPoolAllocator(MemoryPoolAllocator&& rhs) RAPIDJSON_NOEXCEPT :
236  chunk_capacity_(rhs.chunk_capacity_),
237  baseAllocator_(rhs.baseAllocator_),
238  shared_(rhs.shared_)
239  {
240  RAPIDJSON_NOEXCEPT_ASSERT(rhs.shared_->refcount > 0);
241  rhs.shared_ = 0;
242  }
243  MemoryPoolAllocator& operator=(MemoryPoolAllocator&& rhs) RAPIDJSON_NOEXCEPT
244  {
245  RAPIDJSON_NOEXCEPT_ASSERT(rhs.shared_->refcount > 0);
246  this->~MemoryPoolAllocator();
247  baseAllocator_ = rhs.baseAllocator_;
248  chunk_capacity_ = rhs.chunk_capacity_;
249  shared_ = rhs.shared_;
250  rhs.shared_ = 0;
251  return *this;
252  }
253 #endif
254 
256 
258  ~MemoryPoolAllocator() RAPIDJSON_NOEXCEPT {
259  if (!shared_) {
260  // do nothing if moved
261  return;
262  }
263  if (shared_->refcount > 1) {
264  --shared_->refcount;
265  return;
266  }
267  Clear();
268  BaseAllocator *a = shared_->ownBaseAllocator;
269  if (shared_->ownBuffer) {
270  baseAllocator_->Free(shared_);
271  }
272  RAPIDJSON_DELETE(a);
273  }
274 
276  void Clear() RAPIDJSON_NOEXCEPT {
277  RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
278  for (;;) {
279  ChunkHeader* c = shared_->chunkHead;
280  if (!c->next) {
281  break;
282  }
283  shared_->chunkHead = c->next;
284  baseAllocator_->Free(c);
285  }
286  shared_->chunkHead->size = 0;
287  }
288 
290 
292  size_t Capacity() const RAPIDJSON_NOEXCEPT {
293  RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
294  size_t capacity = 0;
295  for (ChunkHeader* c = shared_->chunkHead; c != 0; c = c->next)
296  capacity += c->capacity;
297  return capacity;
298  }
299 
301 
303  size_t Size() const RAPIDJSON_NOEXCEPT {
304  RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
305  size_t size = 0;
306  for (ChunkHeader* c = shared_->chunkHead; c != 0; c = c->next)
307  size += c->size;
308  return size;
309  }
310 
312 
314  bool Shared() const RAPIDJSON_NOEXCEPT {
315  RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
316  return shared_->refcount > 1;
317  }
318 
320  void* Malloc(size_t size) {
321  RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
322  if (!size)
323  return NULL;
324 
325  size = RAPIDJSON_ALIGN(size);
326  if (RAPIDJSON_UNLIKELY(shared_->chunkHead->size + size > shared_->chunkHead->capacity))
327  if (!AddChunk(chunk_capacity_ > size ? chunk_capacity_ : size))
328  return NULL;
329 
330  void *buffer = GetChunkBuffer(shared_) + shared_->chunkHead->size;
331  shared_->chunkHead->size += size;
332  return buffer;
333  }
334 
336  void* Realloc(void* originalPtr, size_t originalSize, size_t newSize) {
337  if (originalPtr == 0)
338  return Malloc(newSize);
339 
340  RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
341  if (newSize == 0)
342  return NULL;
343 
344  originalSize = RAPIDJSON_ALIGN(originalSize);
345  newSize = RAPIDJSON_ALIGN(newSize);
346 
347  // Do not shrink if new size is smaller than original
348  if (originalSize >= newSize)
349  return originalPtr;
350 
351  // Simply expand it if it is the last allocation and there is sufficient space
352  if (originalPtr == GetChunkBuffer(shared_) + shared_->chunkHead->size - originalSize) {
353  size_t increment = static_cast<size_t>(newSize - originalSize);
354  if (shared_->chunkHead->size + increment <= shared_->chunkHead->capacity) {
355  shared_->chunkHead->size += increment;
356  return originalPtr;
357  }
358  }
359 
360  // Realloc process: allocate and copy memory, do not free original buffer.
361  if (void* newBuffer = Malloc(newSize)) {
362  if (originalSize)
363  std::memcpy(newBuffer, originalPtr, originalSize);
364  return newBuffer;
365  }
366  else
367  return NULL;
368  }
369 
371  static void Free(void *ptr) RAPIDJSON_NOEXCEPT { (void)ptr; } // Do nothing
372 
374  bool operator==(const MemoryPoolAllocator& rhs) const RAPIDJSON_NOEXCEPT {
375  RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
376  RAPIDJSON_NOEXCEPT_ASSERT(rhs.shared_->refcount > 0);
377  return shared_ == rhs.shared_;
378  }
380  bool operator!=(const MemoryPoolAllocator& rhs) const RAPIDJSON_NOEXCEPT {
381  return !operator==(rhs);
382  }
383 
384 private:
386 
389  bool AddChunk(size_t capacity) {
390  if (!baseAllocator_)
391  shared_->ownBaseAllocator = baseAllocator_ = RAPIDJSON_NEW(BaseAllocator)();
392  if (ChunkHeader* chunk = static_cast<ChunkHeader*>(baseAllocator_->Malloc(SIZEOF_CHUNK_HEADER + capacity))) {
393  chunk->capacity = capacity;
394  chunk->size = 0;
395  chunk->next = shared_->chunkHead;
396  shared_->chunkHead = chunk;
397  return true;
398  }
399  else
400  return false;
401  }
402 
403  static inline void* AlignBuffer(void* buf, size_t &size)
404  {
405  RAPIDJSON_NOEXCEPT_ASSERT(buf != 0);
406  const uintptr_t mask = sizeof(void*) - 1;
407  const uintptr_t ubuf = reinterpret_cast<uintptr_t>(buf);
408  if (RAPIDJSON_UNLIKELY(ubuf & mask)) {
409  const uintptr_t abuf = (ubuf + mask) & ~mask;
410  RAPIDJSON_ASSERT(size >= abuf - ubuf);
411  buf = reinterpret_cast<void*>(abuf);
412  size -= abuf - ubuf;
413  }
414  return buf;
415  }
416 
417  size_t chunk_capacity_;
418  BaseAllocator* baseAllocator_;
419  SharedData *shared_;
420 };
421 
422 namespace internal {
423  template<typename, typename = void>
424  struct IsRefCounted :
425  public FalseType
426  { };
427  template<typename T>
428  struct IsRefCounted<T, typename internal::EnableIfCond<T::kRefCounted>::Type> :
429  public TrueType
430  { };
431 }
432 
433 template<typename T, typename A>
434 inline T* Realloc(A& a, T* old_p, size_t old_n, size_t new_n)
435 {
436  RAPIDJSON_NOEXCEPT_ASSERT(old_n <= SIZE_MAX / sizeof(T) && new_n <= SIZE_MAX / sizeof(T));
437  return static_cast<T*>(a.Realloc(old_p, old_n * sizeof(T), new_n * sizeof(T)));
438 }
439 
440 template<typename T, typename A>
441 inline T *Malloc(A& a, size_t n = 1)
442 {
443  return Realloc<T, A>(a, NULL, 0, n);
444 }
445 
446 template<typename T, typename A>
447 inline void Free(A& a, T *p, size_t n = 1)
448 {
449  static_cast<void>(Realloc<T, A>(a, p, n, 0));
450 }
451 
452 #ifdef __GNUC__
453 RAPIDJSON_DIAG_PUSH
454 RAPIDJSON_DIAG_OFF(effc++) // std::allocator can safely be inherited
455 #endif
456 
457 template <typename T, typename BaseAllocator = CrtAllocator>
459  public std::allocator<T>
460 {
461  typedef std::allocator<T> allocator_type;
462 #if RAPIDJSON_HAS_CXX11
463  typedef std::allocator_traits<allocator_type> traits_type;
464 #else
465  typedef allocator_type traits_type;
466 #endif
467 
468 public:
469  typedef BaseAllocator BaseAllocatorType;
470 
471  StdAllocator() RAPIDJSON_NOEXCEPT :
472  allocator_type(),
473  baseAllocator_()
474  { }
475 
476  StdAllocator(const StdAllocator& rhs) RAPIDJSON_NOEXCEPT :
477  allocator_type(rhs),
478  baseAllocator_(rhs.baseAllocator_)
479  { }
480 
481  template<typename U>
482  StdAllocator(const StdAllocator<U, BaseAllocator>& rhs) RAPIDJSON_NOEXCEPT :
483  allocator_type(rhs),
484  baseAllocator_(rhs.baseAllocator_)
485  { }
486 
487 #if RAPIDJSON_HAS_CXX11_RVALUE_REFS
488  StdAllocator(StdAllocator&& rhs) RAPIDJSON_NOEXCEPT :
489  allocator_type(std::move(rhs)),
490  baseAllocator_(std::move(rhs.baseAllocator_))
491  { }
492 #endif
493 #if RAPIDJSON_HAS_CXX11
494  using propagate_on_container_move_assignment = std::true_type;
495  using propagate_on_container_swap = std::true_type;
496 #endif
497 
498  /* implicit */
499  StdAllocator(const BaseAllocator& allocator) RAPIDJSON_NOEXCEPT :
500  allocator_type(),
501  baseAllocator_(allocator)
502  { }
503 
504  ~StdAllocator() RAPIDJSON_NOEXCEPT
505  { }
506 
507  template<typename U>
508  struct rebind {
510  };
511 
512  typedef typename traits_type::size_type size_type;
513  typedef typename traits_type::difference_type difference_type;
514 
515  typedef typename traits_type::value_type value_type;
516  typedef typename traits_type::pointer pointer;
517  typedef typename traits_type::const_pointer const_pointer;
518 
519 #if RAPIDJSON_HAS_CXX11
520 
521  typedef typename std::add_lvalue_reference<value_type>::type &reference;
522  typedef typename std::add_lvalue_reference<typename std::add_const<value_type>::type>::type &const_reference;
523 
524  pointer address(reference r) const RAPIDJSON_NOEXCEPT
525  {
526  return std::addressof(r);
527  }
528  const_pointer address(const_reference r) const RAPIDJSON_NOEXCEPT
529  {
530  return std::addressof(r);
531  }
532 
533  size_type max_size() const RAPIDJSON_NOEXCEPT
534  {
535  return traits_type::max_size(*this);
536  }
537 
538  template <typename ...Args>
539  void construct(pointer p, Args&&... args)
540  {
541  traits_type::construct(*this, p, std::forward<Args>(args)...);
542  }
543  void destroy(pointer p)
544  {
545  traits_type::destroy(*this, p);
546  }
547 
548 #else // !RAPIDJSON_HAS_CXX11
549 
550  typedef typename allocator_type::reference reference;
551  typedef typename allocator_type::const_reference const_reference;
552 
553  pointer address(reference r) const RAPIDJSON_NOEXCEPT
554  {
555  return allocator_type::address(r);
556  }
557  const_pointer address(const_reference r) const RAPIDJSON_NOEXCEPT
558  {
559  return allocator_type::address(r);
560  }
561 
562  size_type max_size() const RAPIDJSON_NOEXCEPT
563  {
564  return allocator_type::max_size();
565  }
566 
567  void construct(pointer p, const_reference r)
568  {
569  allocator_type::construct(p, r);
570  }
571  void destroy(pointer p)
572  {
573  allocator_type::destroy(p);
574  }
575 
576 #endif // !RAPIDJSON_HAS_CXX11
577 
578  template <typename U>
579  U* allocate(size_type n = 1, const void* = 0)
580  {
581  return RAPIDJSON_NAMESPACE::Malloc<U>(baseAllocator_, n);
582  }
583  template <typename U>
584  void deallocate(U* p, size_type n = 1)
585  {
586  RAPIDJSON_NAMESPACE::Free<U>(baseAllocator_, p, n);
587  }
588 
589  pointer allocate(size_type n = 1, const void* = 0)
590  {
591  return allocate<value_type>(n);
592  }
593  void deallocate(pointer p, size_type n = 1)
594  {
595  deallocate<value_type>(p, n);
596  }
597 
598 #if RAPIDJSON_HAS_CXX11
599  using is_always_equal = std::is_empty<BaseAllocator>;
600 #endif
601 
602  template<typename U>
603  bool operator==(const StdAllocator<U, BaseAllocator>& rhs) const RAPIDJSON_NOEXCEPT
604  {
605  return baseAllocator_ == rhs.baseAllocator_;
606  }
607  template<typename U>
608  bool operator!=(const StdAllocator<U, BaseAllocator>& rhs) const RAPIDJSON_NOEXCEPT
609  {
610  return !operator==(rhs);
611  }
612 
614  static const bool kNeedFree = BaseAllocator::kNeedFree;
615  static const bool kRefCounted = internal::IsRefCounted<BaseAllocator>::Value;
616  void* Malloc(size_t size)
617  {
618  return baseAllocator_.Malloc(size);
619  }
620  void* Realloc(void* originalPtr, size_t originalSize, size_t newSize)
621  {
622  return baseAllocator_.Realloc(originalPtr, originalSize, newSize);
623  }
624  static void Free(void *ptr) RAPIDJSON_NOEXCEPT
625  {
626  BaseAllocator::Free(ptr);
627  }
628 
629 private:
630  template <typename, typename>
631  friend class StdAllocator; // access to StdAllocator<!T>.*
632 
633  BaseAllocator baseAllocator_;
634 };
635 
636 #if !RAPIDJSON_HAS_CXX17 // std::allocator<void> deprecated in C++17
637 template <typename BaseAllocator>
638 class StdAllocator<void, BaseAllocator> :
639  public std::allocator<void>
640 {
641  typedef std::allocator<void> allocator_type;
642 
643 public:
644  typedef BaseAllocator BaseAllocatorType;
645 
646  StdAllocator() RAPIDJSON_NOEXCEPT :
647  allocator_type(),
648  baseAllocator_()
649  { }
650 
651  StdAllocator(const StdAllocator& rhs) RAPIDJSON_NOEXCEPT :
652  allocator_type(rhs),
653  baseAllocator_(rhs.baseAllocator_)
654  { }
655 
656  template<typename U>
657  StdAllocator(const StdAllocator<U, BaseAllocator>& rhs) RAPIDJSON_NOEXCEPT :
658  allocator_type(rhs),
659  baseAllocator_(rhs.baseAllocator_)
660  { }
661 
662  /* implicit */
663  StdAllocator(const BaseAllocator& baseAllocator) RAPIDJSON_NOEXCEPT :
664  allocator_type(),
665  baseAllocator_(baseAllocator)
666  { }
667 
668  ~StdAllocator() RAPIDJSON_NOEXCEPT
669  { }
670 
671  template<typename U>
672  struct rebind {
674  };
675 
676  typedef typename allocator_type::value_type value_type;
677 
678 private:
679  template <typename, typename>
680  friend class StdAllocator; // access to StdAllocator<!T>.*
681 
682  BaseAllocator baseAllocator_;
683 };
684 #endif
685 
686 #ifdef __GNUC__
687 RAPIDJSON_DIAG_POP
688 #endif
689 
691 
692 #endif // RAPIDJSON_ENCODINGS_H_
bool Shared() const RAPIDJSON_NOEXCEPT
Whether the allocator is shared.
Definition: allocators.h:314
Definition: allocators.h:458
constexpr auto size() const FMT_NOEXCEPT -> size_t
Returns the size of this buffer.
Definition: core.h:820
void * Malloc(size_t size)
Allocates a memory block. (concept Allocator)
Definition: allocators.h:320
void Clear() RAPIDJSON_NOEXCEPT
Deallocates all memory chunks, excluding the first/user one.
Definition: allocators.h:276
C-runtime library allocator.
Definition: allocators.h:82
void * Realloc(void *originalPtr, size_t originalSize, size_t newSize)
Resizes a memory block (concept Allocator)
Definition: allocators.h:336
MemoryPoolAllocator(void *buffer, size_t size, size_t chunkSize=kDefaultChunkCapacity, BaseAllocator *baseAllocator=0)
Constructor with user-supplied buffer.
Definition: allocators.h:200
#define RAPIDJSON_ALLOCATOR_DEFAULT_CHUNK_CAPACITY
User-defined kDefaultChunkCapacity definition.
Definition: allocators.h:71
size_t Capacity() const RAPIDJSON_NOEXCEPT
Computes the total capacity of allocated memory chunks.
Definition: allocators.h:292
#define RAPIDJSON_NAMESPACE_BEGIN
provide custom rapidjson namespace (opening expression)
Definition: rapidjson.h:121
static void Free(void *ptr) RAPIDJSON_NOEXCEPT
Frees a memory block (concept Allocator)
Definition: allocators.h:371
A contiguous memory buffer with an optional growing ability.
Definition: core.h:778
#define RAPIDJSON_NOEXCEPT_ASSERT(x)
Assertion (in non-throwing contexts).
Definition: rapidjson.h:687
bool operator==(const MemoryPoolAllocator &rhs) const RAPIDJSON_NOEXCEPT
Compare (equality) with another MemoryPoolAllocator.
Definition: allocators.h:374
Default memory allocator used by the parser and DOM.
Definition: allocators.h:129
#define RAPIDJSON_UNLIKELY(x)
Compiler branching hint for expression with low probability to be true.
Definition: rapidjson.h:507
#define RAPIDJSON_NAMESPACE_END
provide custom rapidjson namespace (closing expression)
Definition: rapidjson.h:124
#define RAPIDJSON_ALIGN(x)
Data alignment of the machine.
Definition: rapidjson.h:307
Definition: allocators.h:508
bool operator!=(const MemoryPoolAllocator &rhs) const RAPIDJSON_NOEXCEPT
Compare (inequality) with another MemoryPoolAllocator.
Definition: allocators.h:380
Definition: allocators.h:422
MemoryPoolAllocator(size_t chunkSize=kDefaultChunkCapacity, BaseAllocator *baseAllocator=0)
Constructor with chunkSize.
Definition: allocators.h:169
~MemoryPoolAllocator() RAPIDJSON_NOEXCEPT
Destructor.
Definition: allocators.h:258
size_t Size() const RAPIDJSON_NOEXCEPT
Computes the memory blocks allocated.
Definition: allocators.h:303
#define RAPIDJSON_ASSERT(x)
Assertion.
Definition: rapidjson.h:437
Definition: allocators.h:424