"Fossies" - the Fresh Open Source Software Archive

Member "ponyc-0.33.2/src/libponyrt/mem/heap.c" (3 Feb 2020, 19201 Bytes) of package /linux/misc/ponyc-0.33.2.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) C and C++ source code syntax highlighting (style: standard) with prefixed line numbers and code folding option. Alternatively you can here view or download the uninterpreted source code file. For more information about "heap.c" see the Fossies "Dox" file reference documentation and the latest Fossies "Diffs" side-by-side code changes report: 0.33.1_vs_0.33.2.

    1 #include "heap.h"
    2 #include "pagemap.h"
    3 #include "../ds/fun.h"
    4 #include "ponyassert.h"
    5 #include <string.h>
    6 
    7 #include <platform.h>
    8 #include <dtrace.h>
    9 
   10 typedef struct chunk_t
   11 {
   12   // immutable
   13   pony_actor_t* actor;
   14   char* m;
   15   size_t size;
   16 
   17   // mutable
   18   uint32_t slots;
   19   uint32_t shallow;
   20   uint32_t finalisers;
   21 
   22   struct chunk_t* next;
   23 } chunk_t;
   24 
   25 typedef char block_t[POOL_ALIGN];
   26 typedef void (*chunk_fn)(chunk_t* chunk, uint32_t mark);
   27 
   28 #define SIZECLASS_SIZE(sizeclass) (HEAP_MIN << (sizeclass))
   29 #define SIZECLASS_MASK(sizeclass) (~(SIZECLASS_SIZE(sizeclass) - 1))
   30 
   31 #define EXTERNAL_PTR(p, sizeclass) \
   32   ((void*)((uintptr_t)p & SIZECLASS_MASK(sizeclass)))
   33 
   34 #define FIND_SLOT(ext, base) \
   35   (1 << ((uintptr_t)((char*)(ext) - (char*)(base)) >> HEAP_MINBITS))
   36 
   37 static const uint32_t sizeclass_empty[HEAP_SIZECLASSES] =
   38 {
   39   0xFFFFFFFF,
   40   0x55555555,
   41   0x11111111,
   42   0x01010101,
   43   0x00010001
   44 };
   45 
   46 static const uint32_t sizeclass_init[HEAP_SIZECLASSES] =
   47 {
   48   0xFFFFFFFE,
   49   0x55555554,
   50   0x11111110,
   51   0x01010100,
   52   0x00010000
   53 };
   54 
   55 static const uint8_t sizeclass_table[HEAP_MAX / HEAP_MIN] =
   56 {
   57   0, 1, 2, 2, 3, 3, 3, 3,
   58   4, 4, 4, 4, 4, 4, 4, 4
   59 };
   60 
   61 static size_t heap_initialgc = 1 << 14;
   62 static double heap_nextgc_factor = 2.0;
   63 
   64 #ifdef USE_MEMTRACK
   65 /** Get the memory used by the heap.
   66  */
   67 size_t ponyint_heap_mem_size(heap_t* heap)
   68 {
   69   // include memory that is in use by the heap but not counted as part of
   70   // `used` like `chunk_t`. also, don't include "fake used" for purposes of
   71   // triggering GC.
   72   return heap->mem_used;
   73 }
   74 
   75 /** Get the memory allocated by the heap.
   76  */
   77 size_t ponyint_heap_alloc_size(heap_t* heap)
   78 {
   79   return heap->mem_allocated;
   80 }
   81 #endif
   82 
   83 static void large_pagemap(char* m, size_t size, chunk_t* chunk)
   84 {
   85   ponyint_pagemap_set_bulk(m, chunk, size);
   86 }
   87 
   88 static void clear_chunk(chunk_t* chunk, uint32_t mark)
   89 {
   90   chunk->slots = mark;
   91   chunk->shallow = mark;
   92 }
   93 
   94 static void final_small(chunk_t* chunk, uint32_t mark)
   95 {
   96   // run any finalisers that need to be run
   97   void* p = NULL;
   98 
   99   uint32_t finalisers = chunk->finalisers;
  100   uint64_t bit = 0;
  101 
  102   // if there's a finaliser to run for a used slot
  103   while(finalisers != 0)
  104   {
  105     bit = __pony_ctz(finalisers);
  106     p = chunk->m + (bit << HEAP_MINBITS);
  107 
  108     // run finaliser
  109     pony_assert((*(pony_type_t**)p)->final != NULL);
  110     (*(pony_type_t**)p)->final(p);
  111 
  112     // clear finaliser in chunk
  113     chunk->finalisers &= ~((uint32_t)1 << bit);
  114 
  115     // clear bit just found in our local finaliser map
  116     finalisers &= (finalisers - 1);
  117   }
  118   (void)mark;
  119 }
  120 
  121 static void final_small_freed(chunk_t* chunk)
  122 {
  123   // run any finalisers that need to be run for any newly freed slots
  124   void* p = NULL;
  125 
  126   uint32_t finalisers = chunk->finalisers & chunk->slots;
  127   chunk->finalisers = chunk->finalisers & ~chunk->slots;
  128   uint64_t bit = 0;
  129 
  130   // if there's a finaliser to run for a used slot
  131   while(finalisers != 0)
  132   {
  133     bit = __pony_ctz(finalisers);
  134     p = chunk->m + (bit << HEAP_MINBITS);
  135 
  136     // run finaliser
  137     pony_assert((*(pony_type_t**)p)->final != NULL);
  138     (*(pony_type_t**)p)->final(p);
  139 
  140     // clear bit just found in our local finaliser map
  141     finalisers &= (finalisers - 1);
  142   }
  143 }
  144 
  145 static void final_large(chunk_t* chunk, uint32_t mark)
  146 {
  147   if(chunk->finalisers == 1)
  148   {
  149     // run finaliser
  150     pony_assert((*(pony_type_t**)chunk->m)->final != NULL);
  151     (*(pony_type_t**)chunk->m)->final(chunk->m);
  152     chunk->finalisers = 0;
  153   }
  154   (void)mark;
  155 }
  156 
  157 static void destroy_small(chunk_t* chunk, uint32_t mark)
  158 {
  159   (void)mark;
  160 
  161   // run any finalisers that need running
  162   final_small(chunk, mark);
  163 
  164   ponyint_pagemap_set(chunk->m, NULL);
  165   POOL_FREE(block_t, chunk->m);
  166   POOL_FREE(chunk_t, chunk);
  167 }
  168 
  169 static void destroy_large(chunk_t* chunk, uint32_t mark)
  170 {
  171 
  172   (void)mark;
  173 
  174   // run any finalisers that need running
  175   final_large(chunk, mark);
  176 
  177   large_pagemap(chunk->m, chunk->size, NULL);
  178 
  179   if(chunk->m != NULL)
  180     ponyint_pool_free_size(chunk->size, chunk->m);
  181 
  182   POOL_FREE(chunk_t, chunk);
  183 }
  184 
  185 static size_t sweep_small(chunk_t* chunk, chunk_t** avail, chunk_t** full,
  186 #ifdef USE_MEMTRACK
  187   uint32_t empty, size_t size, size_t* mem_allocated, size_t* mem_used)
  188 #else
  189   uint32_t empty, size_t size)
  190 #endif
  191 {
  192   size_t used = 0;
  193   chunk_t* next;
  194 
  195   while(chunk != NULL)
  196   {
  197     next = chunk->next;
  198     chunk->slots &= chunk->shallow;
  199 
  200     if(chunk->slots == 0)
  201     {
  202 #ifdef USE_MEMTRACK
  203       *mem_allocated += POOL_ALLOC_SIZE(chunk_t);
  204       *mem_allocated += POOL_ALLOC_SIZE(block_t);
  205       *mem_used += sizeof(chunk_t);
  206       *mem_used += sizeof(block_t);
  207 #endif
  208       used += sizeof(block_t);
  209       chunk->next = *full;
  210       *full = chunk;
  211     } else if(chunk->slots == empty) {
  212       destroy_small(chunk, 0);
  213     } else {
  214 #ifdef USE_MEMTRACK
  215       *mem_allocated += POOL_ALLOC_SIZE(chunk_t);
  216       *mem_allocated += POOL_ALLOC_SIZE(block_t);
  217       *mem_used += sizeof(chunk_t);
  218       *mem_used += sizeof(block_t);
  219 #endif
  220       used += (sizeof(block_t) -
  221         (__pony_popcount(chunk->slots) * size));
  222 
  223       // run finalisers for freed slots
  224       final_small_freed(chunk);
  225 
  226       // make chunk available for allocations only after finalisers have been
  227       // run to prevent premature reuse of memory slots by an allocation
  228       // required for finaliser execution
  229       chunk->next = *avail;
  230       *avail = chunk;
  231     }
  232 
  233     chunk = next;
  234   }
  235 
  236   return used;
  237 }
  238 
  239 #ifdef USE_MEMTRACK
  240 static chunk_t* sweep_large(chunk_t* chunk, size_t* used, size_t* mem_allocated,
  241   size_t* mem_used)
  242 #else
  243 static chunk_t* sweep_large(chunk_t* chunk, size_t* used)
  244 #endif
  245 {
  246   chunk_t* list = NULL;
  247   chunk_t* next;
  248 
  249   while(chunk != NULL)
  250   {
  251     next = chunk->next;
  252     chunk->slots &= chunk->shallow;
  253 
  254     if(chunk->slots == 0)
  255     {
  256       chunk->next = list;
  257       list = chunk;
  258 #ifdef USE_MEMTRACK
  259       *mem_allocated += POOL_ALLOC_SIZE(chunk_t);
  260       *mem_allocated += ponyint_pool_used_size(chunk->size);
  261       *mem_used += sizeof(chunk_t);
  262       *mem_used += chunk->size;
  263 #endif
  264       *used += chunk->size;
  265     } else {
  266       destroy_large(chunk, 0);
  267     }
  268 
  269     chunk = next;
  270   }
  271 
  272   return list;
  273 }
  274 
  275 static void chunk_list(chunk_fn f, chunk_t* current, uint32_t mark)
  276 {
  277   chunk_t* next;
  278 
  279   while(current != NULL)
  280   {
  281     next = current->next;
  282     f(current, mark);
  283     current = next;
  284   }
  285 }
  286 
  287 uint32_t ponyint_heap_index(size_t size)
  288 {
  289   // size is in range 1..HEAP_MAX
  290   // change to 0..((HEAP_MAX / HEAP_MIN) - 1) and look up in table
  291   return sizeclass_table[(size - 1) >> HEAP_MINBITS];
  292 }
  293 
  294 void ponyint_heap_setinitialgc(size_t size)
  295 {
  296   heap_initialgc = (size_t)1 << size;
  297 }
  298 
  299 void ponyint_heap_setnextgcfactor(double factor)
  300 {
  301   if(factor < 1.0)
  302     factor = 1.0;
  303 
  304   DTRACE1(GC_THRESHOLD, factor);
  305   heap_nextgc_factor = factor;
  306 }
  307 
  308 void ponyint_heap_init(heap_t* heap)
  309 {
  310   memset(heap, 0, sizeof(heap_t));
  311   heap->next_gc = heap_initialgc;
  312 }
  313 
  314 void ponyint_heap_destroy(heap_t* heap)
  315 {
  316   chunk_list(destroy_large, heap->large, 0);
  317 
  318   for(int i = 0; i < HEAP_SIZECLASSES; i++)
  319   {
  320     chunk_list(destroy_small, heap->small_free[i], 0);
  321     chunk_list(destroy_small, heap->small_full[i], 0);
  322   }
  323 }
  324 
  325 void ponyint_heap_final(heap_t* heap)
  326 {
  327   chunk_list(final_large, heap->large, 0);
  328 
  329   for(int i = 0; i < HEAP_SIZECLASSES; i++)
  330   {
  331     chunk_list(final_small, heap->small_free[i], 0);
  332     chunk_list(final_small, heap->small_full[i], 0);
  333   }
  334 }
  335 
  336 void* ponyint_heap_alloc(pony_actor_t* actor, heap_t* heap, size_t size)
  337 {
  338   if(size == 0)
  339   {
  340     return NULL;
  341   } else if(size <= HEAP_MAX) {
  342     return ponyint_heap_alloc_small(actor, heap, ponyint_heap_index(size));
  343   } else {
  344     return ponyint_heap_alloc_large(actor, heap, size);
  345   }
  346 }
  347 
  348 void* ponyint_heap_alloc_final(pony_actor_t* actor, heap_t* heap, size_t size)
  349 {
  350   if(size == 0)
  351   {
  352     return NULL;
  353   } else if(size <= HEAP_MAX) {
  354     return ponyint_heap_alloc_small_final(actor, heap,
  355       ponyint_heap_index(size));
  356   } else {
  357     return ponyint_heap_alloc_large_final(actor, heap, size);
  358   }
  359 }
  360 
  361 void* ponyint_heap_alloc_small(pony_actor_t* actor, heap_t* heap,
  362   uint32_t sizeclass)
  363 {
  364   chunk_t* chunk = heap->small_free[sizeclass];
  365   void* m;
  366 
  367   // If there are none in this size class, get a new one.
  368   if(chunk != NULL)
  369   {
  370     // Clear and use the first available slot.
  371     uint32_t slots = chunk->slots;
  372     uint32_t bit = __pony_ctz(slots);
  373     slots &= ~(1 << bit);
  374 
  375     m = chunk->m + (bit << HEAP_MINBITS);
  376     chunk->slots = slots;
  377 
  378     if(slots == 0)
  379     {
  380       heap->small_free[sizeclass] = chunk->next;
  381       chunk->next = heap->small_full[sizeclass];
  382       heap->small_full[sizeclass] = chunk;
  383     }
  384   } else {
  385     chunk_t* n = (chunk_t*) POOL_ALLOC(chunk_t);
  386     n->actor = actor;
  387     n->m = (char*) POOL_ALLOC(block_t);
  388     n->size = sizeclass;
  389 #ifdef USE_MEMTRACK
  390     heap->mem_used += sizeof(chunk_t);
  391     heap->mem_used += POOL_ALLOC_SIZE(block_t);
  392     heap->mem_used -= SIZECLASS_SIZE(sizeclass);
  393     heap->mem_allocated += POOL_ALLOC_SIZE(chunk_t);
  394     heap->mem_allocated += POOL_ALLOC_SIZE(block_t);
  395 #endif
  396 
  397     // note that no finaliser needs to run
  398     n->finalisers = 0;
  399 
  400     // Clear the first bit.
  401     n->shallow = n->slots = sizeclass_init[sizeclass];
  402     n->next = NULL;
  403 
  404     ponyint_pagemap_set(n->m, n);
  405 
  406     heap->small_free[sizeclass] = n;
  407     chunk = n;
  408 
  409     // Use the first slot.
  410     m = chunk->m;
  411   }
  412 
  413 #ifdef USE_MEMTRACK
  414   heap->mem_used += SIZECLASS_SIZE(sizeclass);
  415 #endif
  416   heap->used += SIZECLASS_SIZE(sizeclass);
  417   return m;
  418 }
  419 
  420 void* ponyint_heap_alloc_small_final(pony_actor_t* actor, heap_t* heap,
  421   uint32_t sizeclass)
  422 {
  423   chunk_t* chunk = heap->small_free[sizeclass];
  424   void* m;
  425 
  426   // If there are none in this size class, get a new one.
  427   if(chunk != NULL)
  428   {
  429     // Clear and use the first available slot.
  430     uint32_t slots = chunk->slots;
  431     uint32_t bit = __pony_ctz(slots);
  432     slots &= ~((uint32_t)1 << bit);
  433 
  434     m = chunk->m + (bit << HEAP_MINBITS);
  435     chunk->slots = slots;
  436 
  437     // note that a finaliser needs to run
  438     chunk->finalisers |= ((uint32_t)1 << bit);
  439 
  440     if(slots == 0)
  441     {
  442       heap->small_free[sizeclass] = chunk->next;
  443       chunk->next = heap->small_full[sizeclass];
  444       heap->small_full[sizeclass] = chunk;
  445     }
  446   } else {
  447     chunk_t* n = (chunk_t*) POOL_ALLOC(chunk_t);
  448     n->actor = actor;
  449     n->m = (char*) POOL_ALLOC(block_t);
  450     n->size = sizeclass;
  451 #ifdef USE_MEMTRACK
  452     heap->mem_used += sizeof(chunk_t);
  453     heap->mem_used += POOL_ALLOC_SIZE(block_t);
  454     heap->mem_used -= SIZECLASS_SIZE(sizeclass);
  455     heap->mem_allocated += POOL_ALLOC_SIZE(chunk_t);
  456     heap->mem_allocated += POOL_ALLOC_SIZE(block_t);
  457 #endif
  458 
  459     // note that a finaliser needs to run
  460     n->finalisers = 1;
  461 
  462     // Clear the first bit.
  463     n->shallow = n->slots = sizeclass_init[sizeclass];
  464     n->next = NULL;
  465 
  466     ponyint_pagemap_set(n->m, n);
  467 
  468     heap->small_free[sizeclass] = n;
  469     chunk = n;
  470 
  471     // Use the first slot.
  472     m = chunk->m;
  473   }
  474 
  475 #ifdef USE_MEMTRACK
  476   heap->mem_used += SIZECLASS_SIZE(sizeclass);
  477 #endif
  478   heap->used += SIZECLASS_SIZE(sizeclass);
  479   return m;
  480 }
  481 
  482 void* ponyint_heap_alloc_large(pony_actor_t* actor, heap_t* heap, size_t size)
  483 {
  484   size = ponyint_pool_adjust_size(size);
  485 
  486   chunk_t* chunk = (chunk_t*) POOL_ALLOC(chunk_t);
  487   chunk->actor = actor;
  488   chunk->size = size;
  489   chunk->m = (char*) ponyint_pool_alloc_size(size);
  490 #ifdef USE_MEMTRACK
  491   heap->mem_used += sizeof(chunk_t);
  492   heap->mem_used += chunk->size;
  493   heap->mem_allocated += POOL_ALLOC_SIZE(chunk_t);
  494   heap->mem_allocated += ponyint_pool_used_size(size);
  495 #endif
  496   chunk->slots = 0;
  497   chunk->shallow = 0;
  498 
  499   // note that no finaliser needs to run
  500   chunk->finalisers = 0;
  501 
  502   large_pagemap(chunk->m, size, chunk);
  503 
  504   chunk->next = heap->large;
  505   heap->large = chunk;
  506   heap->used += chunk->size;
  507 
  508   return chunk->m;
  509 }
  510 
  511 void* ponyint_heap_alloc_large_final(pony_actor_t* actor, heap_t* heap,
  512   size_t size)
  513 {
  514   size = ponyint_pool_adjust_size(size);
  515 
  516   chunk_t* chunk = (chunk_t*) POOL_ALLOC(chunk_t);
  517   chunk->actor = actor;
  518   chunk->size = size;
  519   chunk->m = (char*) ponyint_pool_alloc_size(size);
  520 #ifdef USE_MEMTRACK
  521   heap->mem_used += sizeof(chunk_t);
  522   heap->mem_used += chunk->size;
  523   heap->mem_allocated += POOL_ALLOC_SIZE(chunk_t);
  524   heap->mem_allocated += ponyint_pool_used_size(size);
  525 #endif
  526   chunk->slots = 0;
  527   chunk->shallow = 0;
  528 
  529   // note that a finaliser needs to run
  530   chunk->finalisers = 1;
  531 
  532   large_pagemap(chunk->m, size, chunk);
  533 
  534   chunk->next = heap->large;
  535   heap->large = chunk;
  536   heap->used += chunk->size;
  537 
  538   return chunk->m;
  539 }
  540 
  541 void* ponyint_heap_realloc(pony_actor_t* actor, heap_t* heap, void* p,
  542   size_t size)
  543 {
  544   if(p == NULL)
  545     return ponyint_heap_alloc(actor, heap, size);
  546 
  547   chunk_t* chunk = ponyint_pagemap_get(p);
  548 
  549   // We can't realloc memory that wasn't pony_alloc'ed since we can't know how
  550   // much to copy from the previous location.
  551   pony_assert(chunk != NULL);
  552 
  553   size_t oldsize;
  554 
  555   if(chunk->size < HEAP_SIZECLASSES)
  556   {
  557     // Previous allocation was a ponyint_heap_alloc_small.
  558     void* ext = EXTERNAL_PTR(p, chunk->size);
  559 
  560     // If the new allocation is a ponyint_heap_alloc_small and the pointer is
  561     // not an internal pointer, we may be able to reuse this memory. If it is
  562     // an internal pointer, we know where the old allocation begins but not
  563     // where it ends, so we cannot reuse this memory.
  564     if((size <= HEAP_MAX) && (p == ext))
  565     {
  566       uint32_t sizeclass = ponyint_heap_index(size);
  567 
  568       // If the new allocation is the same size or smaller, return the old
  569       // one.
  570       if(sizeclass <= chunk->size)
  571         return p;
  572     }
  573 
  574     oldsize = SIZECLASS_SIZE(chunk->size) - ((uintptr_t)p - (uintptr_t)ext);
  575   } else {
  576     // Previous allocation was a ponyint_heap_alloc_large.
  577     if((size <= chunk->size) && (p == chunk->m))
  578     {
  579       // If the new allocation is the same size or smaller, and this is not an
  580       // internal pointer, return the old one. We can't reuse internal
  581       // pointers in large allocs for the same reason as small ones.
  582       return p;
  583     }
  584 
  585     oldsize = chunk->size - ((uintptr_t)p - (uintptr_t)chunk->m);
  586   }
  587 
  588   // Determine how much memory to copy.
  589   if(oldsize > size)
  590     oldsize = size;
  591 
  592   // Get new memory and copy from the old memory.
  593   void* q = ponyint_heap_alloc(actor, heap, size);
  594   memcpy(q, p, oldsize);
  595   return q;
  596 }
  597 
  598 void ponyint_heap_used(heap_t* heap, size_t size)
  599 {
  600   heap->used += size;
  601 }
  602 
  603 bool ponyint_heap_startgc(heap_t* heap)
  604 {
  605   if(heap->used <= heap->next_gc)
  606     return false;
  607 
  608   for(int i = 0; i < HEAP_SIZECLASSES; i++)
  609   {
  610     uint32_t mark = sizeclass_empty[i];
  611     chunk_list(clear_chunk, heap->small_free[i], mark);
  612     chunk_list(clear_chunk, heap->small_full[i], mark);
  613   }
  614 
  615   chunk_list(clear_chunk, heap->large, 1);
  616 
  617   // reset used to zero
  618   heap->used = 0;
  619 #ifdef USE_MEMTRACK
  620   heap->mem_allocated = 0;
  621   heap->mem_used = 0;
  622 #endif
  623   return true;
  624 }
  625 
  626 bool ponyint_heap_mark(chunk_t* chunk, void* p)
  627 {
  628   // If it's an internal pointer, we shallow mark it instead. This will
  629   // preserve the external pointer, but allow us to mark and recurse the
  630   // external pointer in the same pass.
  631   bool marked;
  632 
  633   if(chunk->size >= HEAP_SIZECLASSES)
  634   {
  635     marked = chunk->slots == 0;
  636 
  637     if(p == chunk->m)
  638       chunk->slots = 0;
  639     else
  640       chunk->shallow = 0;
  641   } else {
  642     // Calculate the external pointer.
  643     void* ext = EXTERNAL_PTR(p, chunk->size);
  644 
  645     // Shift to account for smallest allocation size.
  646     uint32_t slot = FIND_SLOT(ext, chunk->m);
  647 
  648     // Check if it was already marked.
  649     marked = (chunk->slots & slot) == 0;
  650 
  651     // A clear bit is in-use, a set bit is available.
  652     if(p == ext)
  653       chunk->slots &= ~slot;
  654     else
  655       chunk->shallow &= ~slot;
  656   }
  657 
  658   return marked;
  659 }
  660 
  661 void ponyint_heap_mark_shallow(chunk_t* chunk, void* p)
  662 {
  663   if(chunk->size >= HEAP_SIZECLASSES)
  664   {
  665     chunk->shallow = 0;
  666   } else {
  667     // Calculate the external pointer.
  668     void* ext = EXTERNAL_PTR(p, chunk->size);
  669 
  670     // Shift to account for smallest allocation size.
  671     uint32_t slot = FIND_SLOT(ext, chunk->m);
  672 
  673     // A clear bit is in-use, a set bit is available.
  674     chunk->shallow &= ~slot;
  675   }
  676 }
  677 
  678 bool ponyint_heap_ismarked(chunk_t* chunk, void* p)
  679 {
  680   if(chunk->size >= HEAP_SIZECLASSES)
  681     return (chunk->slots & chunk->shallow) == 0;
  682 
  683   // Shift to account for smallest allocation size.
  684   uint32_t slot = FIND_SLOT(p, chunk->m);
  685 
  686   // Check if the slot is marked or shallow marked.
  687   return (chunk->slots & chunk->shallow & slot) == 0;
  688 }
  689 
  690 void ponyint_heap_free(chunk_t* chunk, void* p)
  691 {
  692   if(chunk->size >= HEAP_SIZECLASSES)
  693   {
  694     if(p == chunk->m)
  695     {
  696       // run finaliser if needed
  697       final_large(chunk, 0);
  698 
  699       ponyint_pool_free_size(chunk->size, chunk->m);
  700       chunk->m = NULL;
  701       chunk->slots = 1;
  702     }
  703     return;
  704   }
  705 
  706   // Calculate the external pointer.
  707   void* ext = EXTERNAL_PTR(p, chunk->size);
  708 
  709   if(p == ext)
  710   {
  711     // Shift to account for smallest allocation size.
  712     uint32_t slot = FIND_SLOT(ext, chunk->m);
  713 
  714     // check if there's a finaliser to run
  715     if((chunk->finalisers & slot) != 0)
  716     {
  717       // run finaliser
  718       (*(pony_type_t**)p)->final(p);
  719 
  720       // clear finaliser
  721       chunk->finalisers &= ~slot;
  722     }
  723 
  724     // free slot
  725     chunk->slots |= slot;
  726   }
  727 }
  728 
  729 void ponyint_heap_endgc(heap_t* heap)
  730 {
  731   size_t used = 0;
  732 #ifdef USE_MEMTRACK
  733   size_t mem_allocated = 0;
  734   size_t mem_used = 0;
  735 #endif
  736 
  737   for(int i = 0; i < HEAP_SIZECLASSES; i++)
  738   {
  739     chunk_t* list1 = heap->small_free[i];
  740     chunk_t* list2 = heap->small_full[i];
  741 
  742     heap->small_free[i] = NULL;
  743     heap->small_full[i] = NULL;
  744 
  745     chunk_t** avail = &heap->small_free[i];
  746     chunk_t** full = &heap->small_full[i];
  747 
  748     size_t size = SIZECLASS_SIZE(i);
  749     uint32_t empty = sizeclass_empty[i];
  750 
  751 #ifdef USE_MEMTRACK
  752     used += sweep_small(list1, avail, full, empty, size,
  753       &mem_allocated, &mem_used);
  754     used += sweep_small(list2, avail, full, empty, size,
  755       &mem_allocated, &mem_used);
  756 #else
  757     used += sweep_small(list1, avail, full, empty, size);
  758     used += sweep_small(list2, avail, full, empty, size);
  759 #endif
  760   }
  761 
  762 #ifdef USE_MEMTRACK
  763   heap->large = sweep_large(heap->large, &used, &mem_allocated, &mem_used);
  764 #else
  765   heap->large = sweep_large(heap->large, &used);
  766 #endif
  767 
  768   // Foreign object sizes will have been added to heap->used already. Here we
  769   // add local object sizes as well and set the next gc point for when memory
  770   // usage has increased.
  771   heap->used += used;
  772 #ifdef USE_MEMTRACK
  773   heap->mem_allocated += mem_allocated;
  774   heap->mem_used += mem_used;
  775 #endif
  776   heap->next_gc = (size_t)((double)heap->used * heap_nextgc_factor);
  777 
  778   if(heap->next_gc < heap_initialgc)
  779     heap->next_gc = heap_initialgc;
  780 }
  781 
  782 pony_actor_t* ponyint_heap_owner(chunk_t* chunk)
  783 {
  784   // FIX: false sharing
  785   // reading from something that will never be written
  786   // but is on a cache line that will often be written
  787   // called during tracing
  788   // actual chunk only needed for GC tracing
  789   // all other tracing only needs the owner
  790   // so the owner needs the chunk and everyone else just needs the owner
  791   return chunk->actor;
  792 }
  793 
  794 size_t ponyint_heap_size(chunk_t* chunk)
  795 {
  796   if(chunk->size >= HEAP_SIZECLASSES)
  797     return chunk->size;
  798 
  799   return SIZECLASS_SIZE(chunk->size);
  800 }