py: Put all global state together in state structures.

This patch consolidates all global variables in py/ core into one place,
in a global structure.  Root pointers are all located together to make
GC tracing easier and more efficient.
diff --git a/py/gc.c b/py/gc.c
index 07f0b80..73a6ca8 100644
--- a/py/gc.c
+++ b/py/gc.c
@@ -28,6 +28,7 @@
 #include <stdio.h>
 #include <string.h>
 
+#include "py/mpstate.h"
 #include "py/gc.h"
 #include "py/obj.h"
 #include "py/runtime.h"
@@ -48,25 +49,6 @@
 #define WORDS_PER_BLOCK (4)
 #define BYTES_PER_BLOCK (WORDS_PER_BLOCK * BYTES_PER_WORD)
 
-STATIC byte *gc_alloc_table_start;
-STATIC mp_uint_t gc_alloc_table_byte_len;
-#if MICROPY_ENABLE_FINALISER
-STATIC byte *gc_finaliser_table_start;
-#endif
-// We initialise gc_pool_start to a dummy value so it stays out of the bss
-// section.  This makes sure we don't trace this pointer in a collect cycle.
-// If we did trace it, it would make the first block of the heap always
-// reachable, and hence we can never free that block.
-STATIC mp_uint_t *gc_pool_start = (void*)4;
-STATIC mp_uint_t *gc_pool_end;
-
-STATIC int gc_stack_overflow;
-STATIC mp_uint_t gc_stack[MICROPY_ALLOC_GC_STACK_SIZE];
-STATIC mp_uint_t *gc_sp;
-STATIC uint16_t gc_lock_depth;
-uint16_t gc_auto_collect_enabled;
-STATIC mp_uint_t gc_last_free_atb_index;
-
 // ATB = allocation table byte
 // 0b00 = FREE -- free block
 // 0b01 = HEAD -- head of a chain of blocks
@@ -90,15 +72,15 @@
 #define ATB_3_IS_FREE(a) (((a) & ATB_MASK_3) == 0)
 
 #define BLOCK_SHIFT(block) (2 * ((block) & (BLOCKS_PER_ATB - 1)))
-#define ATB_GET_KIND(block) ((gc_alloc_table_start[(block) / BLOCKS_PER_ATB] >> BLOCK_SHIFT(block)) & 3)
-#define ATB_ANY_TO_FREE(block) do { gc_alloc_table_start[(block) / BLOCKS_PER_ATB] &= (~(AT_MARK << BLOCK_SHIFT(block))); } while (0)
-#define ATB_FREE_TO_HEAD(block) do { gc_alloc_table_start[(block) / BLOCKS_PER_ATB] |= (AT_HEAD << BLOCK_SHIFT(block)); } while (0)
-#define ATB_FREE_TO_TAIL(block) do { gc_alloc_table_start[(block) / BLOCKS_PER_ATB] |= (AT_TAIL << BLOCK_SHIFT(block)); } while (0)
-#define ATB_HEAD_TO_MARK(block) do { gc_alloc_table_start[(block) / BLOCKS_PER_ATB] |= (AT_MARK << BLOCK_SHIFT(block)); } while (0)
-#define ATB_MARK_TO_HEAD(block) do { gc_alloc_table_start[(block) / BLOCKS_PER_ATB] &= (~(AT_TAIL << BLOCK_SHIFT(block))); } while (0)
+#define ATB_GET_KIND(block) ((MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] >> BLOCK_SHIFT(block)) & 3)
+#define ATB_ANY_TO_FREE(block) do { MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] &= (~(AT_MARK << BLOCK_SHIFT(block))); } while (0)
+#define ATB_FREE_TO_HEAD(block) do { MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] |= (AT_HEAD << BLOCK_SHIFT(block)); } while (0)
+#define ATB_FREE_TO_TAIL(block) do { MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] |= (AT_TAIL << BLOCK_SHIFT(block)); } while (0)
+#define ATB_HEAD_TO_MARK(block) do { MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] |= (AT_MARK << BLOCK_SHIFT(block)); } while (0)
+#define ATB_MARK_TO_HEAD(block) do { MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] &= (~(AT_TAIL << BLOCK_SHIFT(block))); } while (0)
 
-#define BLOCK_FROM_PTR(ptr) (((ptr) - (mp_uint_t)gc_pool_start) / BYTES_PER_BLOCK)
-#define PTR_FROM_BLOCK(block) (((block) * BYTES_PER_BLOCK + (mp_uint_t)gc_pool_start))
+#define BLOCK_FROM_PTR(ptr) (((ptr) - (mp_uint_t)MP_STATE_MEM(gc_pool_start)) / BYTES_PER_BLOCK)
+#define PTR_FROM_BLOCK(block) (((block) * BYTES_PER_BLOCK + (mp_uint_t)MP_STATE_MEM(gc_pool_start)))
 #define ATB_FROM_BLOCK(bl) ((bl) / BLOCKS_PER_ATB)
 
 #if MICROPY_ENABLE_FINALISER
@@ -107,9 +89,9 @@
 
 #define BLOCKS_PER_FTB (8)
 
-#define FTB_GET(block) ((gc_finaliser_table_start[(block) / BLOCKS_PER_FTB] >> ((block) & 7)) & 1)
-#define FTB_SET(block) do { gc_finaliser_table_start[(block) / BLOCKS_PER_FTB] |= (1 << ((block) & 7)); } while (0)
-#define FTB_CLEAR(block) do { gc_finaliser_table_start[(block) / BLOCKS_PER_FTB] &= (~(1 << ((block) & 7))); } while (0)
+#define FTB_GET(block) ((MP_STATE_MEM(gc_finaliser_table_start)[(block) / BLOCKS_PER_FTB] >> ((block) & 7)) & 1)
+#define FTB_SET(block) do { MP_STATE_MEM(gc_finaliser_table_start)[(block) / BLOCKS_PER_FTB] |= (1 << ((block) & 7)); } while (0)
+#define FTB_CLEAR(block) do { MP_STATE_MEM(gc_finaliser_table_start)[(block) / BLOCKS_PER_FTB] &= (~(1 << ((block) & 7))); } while (0)
 #endif
 
 // TODO waste less memory; currently requires that all entries in alloc_table have a corresponding block in pool
@@ -125,67 +107,67 @@
     // => T = A * (1 + BLOCKS_PER_ATB / BLOCKS_PER_FTB + BLOCKS_PER_ATB * BYTES_PER_BLOCK)
     mp_uint_t total_byte_len = (byte*)end - (byte*)start;
 #if MICROPY_ENABLE_FINALISER
-    gc_alloc_table_byte_len = total_byte_len * BITS_PER_BYTE / (BITS_PER_BYTE + BITS_PER_BYTE * BLOCKS_PER_ATB / BLOCKS_PER_FTB + BITS_PER_BYTE * BLOCKS_PER_ATB * BYTES_PER_BLOCK);
+    MP_STATE_MEM(gc_alloc_table_byte_len) = total_byte_len * BITS_PER_BYTE / (BITS_PER_BYTE + BITS_PER_BYTE * BLOCKS_PER_ATB / BLOCKS_PER_FTB + BITS_PER_BYTE * BLOCKS_PER_ATB * BYTES_PER_BLOCK);
 #else
-    gc_alloc_table_byte_len = total_byte_len / (1 + BITS_PER_BYTE / 2 * BYTES_PER_BLOCK);
+    MP_STATE_MEM(gc_alloc_table_byte_len) = total_byte_len / (1 + BITS_PER_BYTE / 2 * BYTES_PER_BLOCK);
 #endif
 
-    gc_alloc_table_start = (byte*)start;
+    MP_STATE_MEM(gc_alloc_table_start) = (byte*)start;
 
 #if MICROPY_ENABLE_FINALISER
-    mp_uint_t gc_finaliser_table_byte_len = (gc_alloc_table_byte_len * BLOCKS_PER_ATB + BLOCKS_PER_FTB - 1) / BLOCKS_PER_FTB;
-    gc_finaliser_table_start = gc_alloc_table_start + gc_alloc_table_byte_len;
+    mp_uint_t gc_finaliser_table_byte_len = (MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB + BLOCKS_PER_FTB - 1) / BLOCKS_PER_FTB;
+    MP_STATE_MEM(gc_finaliser_table_start) = MP_STATE_MEM(gc_alloc_table_start) + MP_STATE_MEM(gc_alloc_table_byte_len);
 #endif
 
-    mp_uint_t gc_pool_block_len = gc_alloc_table_byte_len * BLOCKS_PER_ATB;
-    gc_pool_start = (mp_uint_t*)((byte*)end - gc_pool_block_len * BYTES_PER_BLOCK);
-    gc_pool_end = (mp_uint_t*)end;
+    mp_uint_t gc_pool_block_len = MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB;
+    MP_STATE_MEM(gc_pool_start) = (mp_uint_t*)((byte*)end - gc_pool_block_len * BYTES_PER_BLOCK);
+    MP_STATE_MEM(gc_pool_end) = (mp_uint_t*)end;
 
 #if MICROPY_ENABLE_FINALISER
-    assert((byte*)gc_pool_start >= gc_finaliser_table_start + gc_finaliser_table_byte_len);
+    assert((byte*)MP_STATE_MEM(gc_pool_start) >= MP_STATE_MEM(gc_finaliser_table_start) + gc_finaliser_table_byte_len);
 #endif
 
     // clear ATBs
-    memset(gc_alloc_table_start, 0, gc_alloc_table_byte_len);
+    memset(MP_STATE_MEM(gc_alloc_table_start), 0, MP_STATE_MEM(gc_alloc_table_byte_len));
 
 #if MICROPY_ENABLE_FINALISER
     // clear FTBs
-    memset(gc_finaliser_table_start, 0, gc_finaliser_table_byte_len);
+    memset(MP_STATE_MEM(gc_finaliser_table_start), 0, gc_finaliser_table_byte_len);
 #endif
 
     // set last free ATB index to start of heap
-    gc_last_free_atb_index = 0;
+    MP_STATE_MEM(gc_last_free_atb_index) = 0;
 
     // unlock the GC
-    gc_lock_depth = 0;
+    MP_STATE_MEM(gc_lock_depth) = 0;
 
     // allow auto collection
-    gc_auto_collect_enabled = 1;
+    MP_STATE_MEM(gc_auto_collect_enabled) = 1;
 
     DEBUG_printf("GC layout:\n");
-    DEBUG_printf("  alloc table at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", gc_alloc_table_start, gc_alloc_table_byte_len, gc_alloc_table_byte_len * BLOCKS_PER_ATB);
+    DEBUG_printf("  alloc table at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", MP_STATE_MEM(gc_alloc_table_start), MP_STATE_MEM(gc_alloc_table_byte_len), MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB);
 #if MICROPY_ENABLE_FINALISER
-    DEBUG_printf("  finaliser table at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", gc_finaliser_table_start, gc_finaliser_table_byte_len, gc_finaliser_table_byte_len * BLOCKS_PER_FTB);
+    DEBUG_printf("  finaliser table at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", MP_STATE_MEM(gc_finaliser_table_start), gc_finaliser_table_byte_len, gc_finaliser_table_byte_len * BLOCKS_PER_FTB);
 #endif
-    DEBUG_printf("  pool at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", gc_pool_start, gc_pool_block_len * BYTES_PER_BLOCK, gc_pool_block_len);
+    DEBUG_printf("  pool at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", MP_STATE_MEM(gc_pool_start), gc_pool_block_len * BYTES_PER_BLOCK, gc_pool_block_len);
 }
 
 void gc_lock(void) {
-    gc_lock_depth++;
+    MP_STATE_MEM(gc_lock_depth)++;
 }
 
 void gc_unlock(void) {
-    gc_lock_depth--;
+    MP_STATE_MEM(gc_lock_depth)--;
 }
 
 bool gc_is_locked(void) {
-    return gc_lock_depth != 0;
+    return MP_STATE_MEM(gc_lock_depth) != 0;
 }
 
 #define VERIFY_PTR(ptr) ( \
         (ptr & (BYTES_PER_BLOCK - 1)) == 0          /* must be aligned on a block */ \
-        && ptr >= (mp_uint_t)gc_pool_start     /* must be above start of pool */ \
-        && ptr < (mp_uint_t)gc_pool_end        /* must be below end of pool */ \
+        && ptr >= (mp_uint_t)MP_STATE_MEM(gc_pool_start)     /* must be above start of pool */ \
+        && ptr < (mp_uint_t)MP_STATE_MEM(gc_pool_end)        /* must be below end of pool */ \
     )
 
 #define VERIFY_MARK_AND_PUSH(ptr) \
@@ -195,19 +177,19 @@
             if (ATB_GET_KIND(_block) == AT_HEAD) { \
                 /* an unmarked head, mark it, and push it on gc stack */ \
                 ATB_HEAD_TO_MARK(_block); \
-                if (gc_sp < &gc_stack[MICROPY_ALLOC_GC_STACK_SIZE]) { \
-                    *gc_sp++ = _block; \
+                if (MP_STATE_MEM(gc_sp) < &MP_STATE_MEM(gc_stack)[MICROPY_ALLOC_GC_STACK_SIZE]) { \
+                    *MP_STATE_MEM(gc_sp)++ = _block; \
                 } else { \
-                    gc_stack_overflow = 1; \
+                    MP_STATE_MEM(gc_stack_overflow) = 1; \
                 } \
             } \
         } \
     } while (0)
 
 STATIC void gc_drain_stack(void) {
-    while (gc_sp > gc_stack) {
+    while (MP_STATE_MEM(gc_sp) > MP_STATE_MEM(gc_stack)) {
         // pop the next block off the stack
-        mp_uint_t block = *--gc_sp;
+        mp_uint_t block = *--MP_STATE_MEM(gc_sp);
 
         // work out number of consecutive blocks in the chain starting with this one
         mp_uint_t n_blocks = 0;
@@ -225,15 +207,15 @@
 }
 
 STATIC void gc_deal_with_stack_overflow(void) {
-    while (gc_stack_overflow) {
-        gc_stack_overflow = 0;
-        gc_sp = gc_stack;
+    while (MP_STATE_MEM(gc_stack_overflow)) {
+        MP_STATE_MEM(gc_stack_overflow) = 0;
+        MP_STATE_MEM(gc_sp) = MP_STATE_MEM(gc_stack);
 
         // scan entire memory looking for blocks which have been marked but not their children
-        for (mp_uint_t block = 0; block < gc_alloc_table_byte_len * BLOCKS_PER_ATB; block++) {
+        for (mp_uint_t block = 0; block < MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB; block++) {
             // trace (again) if mark bit set
             if (ATB_GET_KIND(block) == AT_MARK) {
-                *gc_sp++ = block;
+                *MP_STATE_MEM(gc_sp)++ = block;
                 gc_drain_stack();
             }
         }
@@ -250,7 +232,7 @@
     #endif
     // free unmarked heads and their tails
     int free_tail = 0;
-    for (mp_uint_t block = 0; block < gc_alloc_table_byte_len * BLOCKS_PER_ATB; block++) {
+    for (mp_uint_t block = 0; block < MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB; block++) {
         switch (ATB_GET_KIND(block)) {
             case AT_HEAD:
 #if MICROPY_ENABLE_FINALISER
@@ -292,8 +274,13 @@
 
 void gc_collect_start(void) {
     gc_lock();
-    gc_stack_overflow = 0;
-    gc_sp = gc_stack;
+    MP_STATE_MEM(gc_stack_overflow) = 0;
+    MP_STATE_MEM(gc_sp) = MP_STATE_MEM(gc_stack);
+    // Trace root pointers.  This relies on the root pointers being organised
+    // correctly in the mp_state_ctx structure.  We scan nlr_top, dict_locals,
+    // dict_globals, then the root pointer section of mp_state_vm.
+    void **ptrs = (void**)(void*)&mp_state_ctx;
+    gc_collect_root(ptrs, offsetof(mp_state_ctx_t, vm.stack_top) / sizeof(mp_uint_t));
 }
 
 void gc_collect_root(void **ptrs, mp_uint_t len) {
@@ -307,18 +294,18 @@
 void gc_collect_end(void) {
     gc_deal_with_stack_overflow();
     gc_sweep();
-    gc_last_free_atb_index = 0;
+    MP_STATE_MEM(gc_last_free_atb_index) = 0;
     gc_unlock();
 }
 
 void gc_info(gc_info_t *info) {
-    info->total = (gc_pool_end - gc_pool_start) * sizeof(mp_uint_t);
+    info->total = (MP_STATE_MEM(gc_pool_end) - MP_STATE_MEM(gc_pool_start)) * sizeof(mp_uint_t);
     info->used = 0;
     info->free = 0;
     info->num_1block = 0;
     info->num_2block = 0;
     info->max_block = 0;
-    for (mp_uint_t block = 0, len = 0; block < gc_alloc_table_byte_len * BLOCKS_PER_ATB; block++) {
+    for (mp_uint_t block = 0, len = 0; block < MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB; block++) {
         mp_uint_t kind = ATB_GET_KIND(block);
         if (kind == AT_FREE || kind == AT_HEAD) {
             if (len == 1) {
@@ -361,7 +348,7 @@
     DEBUG_printf("gc_alloc(" UINT_FMT " bytes -> " UINT_FMT " blocks)\n", n_bytes, n_blocks);
 
     // check if GC is locked
-    if (gc_lock_depth > 0) {
+    if (MP_STATE_MEM(gc_lock_depth) > 0) {
         return NULL;
     }
 
@@ -374,12 +361,12 @@
     mp_uint_t end_block;
     mp_uint_t start_block;
     mp_uint_t n_free = 0;
-    int collected = !gc_auto_collect_enabled;
+    int collected = !MP_STATE_MEM(gc_auto_collect_enabled);
     for (;;) {
 
         // look for a run of n_blocks available blocks
-        for (i = gc_last_free_atb_index; i < gc_alloc_table_byte_len; i++) {
-            byte a = gc_alloc_table_start[i];
+        for (i = MP_STATE_MEM(gc_last_free_atb_index); i < MP_STATE_MEM(gc_alloc_table_byte_len); i++) {
+            byte a = MP_STATE_MEM(gc_alloc_table_start)[i];
             if (ATB_0_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 0; goto found; } } else { n_free = 0; }
             if (ATB_1_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 1; goto found; } } else { n_free = 0; }
             if (ATB_2_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 2; goto found; } } else { n_free = 0; }
@@ -407,7 +394,7 @@
     // before this one.  Also, whenever we free or shink a block we must check
     // if this index needs adjusting (see gc_realloc and gc_free).
     if (n_free == 1) {
-        gc_last_free_atb_index = (i + 1) / BLOCKS_PER_ATB;
+        MP_STATE_MEM(gc_last_free_atb_index) = (i + 1) / BLOCKS_PER_ATB;
     }
 
     // mark first block as used head
@@ -420,7 +407,7 @@
     }
 
     // get pointer to first block
-    void *ret_ptr = (void*)(gc_pool_start + start_block * WORDS_PER_BLOCK);
+    void *ret_ptr = (void*)(MP_STATE_MEM(gc_pool_start) + start_block * WORDS_PER_BLOCK);
     DEBUG_printf("gc_alloc(%p)\n", ret_ptr);
 
     // zero out the additional bytes of the newly allocated blocks
@@ -458,7 +445,7 @@
 
 // force the freeing of a piece of memory
 void gc_free(void *ptr_in) {
-    if (gc_lock_depth > 0) {
+    if (MP_STATE_MEM(gc_lock_depth) > 0) {
         // TODO how to deal with this error?
         return;
     }
@@ -470,8 +457,8 @@
         mp_uint_t block = BLOCK_FROM_PTR(ptr);
         if (ATB_GET_KIND(block) == AT_HEAD) {
             // set the last_free pointer to this block if it's earlier in the heap
-            if (block / BLOCKS_PER_ATB < gc_last_free_atb_index) {
-                gc_last_free_atb_index = block / BLOCKS_PER_ATB;
+            if (block / BLOCKS_PER_ATB < MP_STATE_MEM(gc_last_free_atb_index)) {
+                MP_STATE_MEM(gc_last_free_atb_index) = block / BLOCKS_PER_ATB;
             }
 
             // free head and all of its tail blocks
@@ -540,7 +527,7 @@
 #else // Alternative gc_realloc impl
 
 void *gc_realloc(void *ptr_in, mp_uint_t n_bytes) {
-    if (gc_lock_depth > 0) {
+    if (MP_STATE_MEM(gc_lock_depth) > 0) {
         return NULL;
     }
 
@@ -581,7 +568,7 @@
     // efficiently shrink it (see below for shrinking code).
     mp_uint_t n_free   = 0;
     mp_uint_t n_blocks = 1; // counting HEAD block
-    mp_uint_t max_block = gc_alloc_table_byte_len * BLOCKS_PER_ATB;
+    mp_uint_t max_block = MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB;
     for (mp_uint_t bl = block + n_blocks; bl < max_block; bl++) {
         byte block_type = ATB_GET_KIND(bl);
         if (block_type == AT_TAIL) {
@@ -612,8 +599,8 @@
         }
 
         // set the last_free pointer to end of this block if it's earlier in the heap
-        if ((block + new_blocks) / BLOCKS_PER_ATB < gc_last_free_atb_index) {
-            gc_last_free_atb_index = (block + new_blocks) / BLOCKS_PER_ATB;
+        if ((block + new_blocks) / BLOCKS_PER_ATB < MP_STATE_MEM(gc_last_free_atb_index)) {
+            MP_STATE_MEM(gc_last_free_atb_index) = (block + new_blocks) / BLOCKS_PER_ATB;
         }
 
         #if EXTENSIVE_HEAP_PROFILING
@@ -675,22 +662,22 @@
     #if !EXTENSIVE_HEAP_PROFILING
     // When comparing heap output we don't want to print the starting
     // pointer of the heap because it changes from run to run.
-    printf("GC memory layout; from %p:", gc_pool_start);
+    printf("GC memory layout; from %p:", MP_STATE_MEM(gc_pool_start));
     #endif
-    for (mp_uint_t bl = 0; bl < gc_alloc_table_byte_len * BLOCKS_PER_ATB; bl++) {
+    for (mp_uint_t bl = 0; bl < MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB; bl++) {
         if (bl % DUMP_BYTES_PER_LINE == 0) {
             // a new line of blocks
             {
                 // check if this line contains only free blocks
                 mp_uint_t bl2 = bl;
-                while (bl2 < gc_alloc_table_byte_len * BLOCKS_PER_ATB && ATB_GET_KIND(bl2) == AT_FREE) {
+                while (bl2 < MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB && ATB_GET_KIND(bl2) == AT_FREE) {
                     bl2++;
                 }
                 if (bl2 - bl >= 2 * DUMP_BYTES_PER_LINE) {
                     // there are at least 2 lines containing only free blocks, so abbreviate their printing
                     printf("\n       (" UINT_FMT " lines all free)", (bl2 - bl) / DUMP_BYTES_PER_LINE);
                     bl = bl2 & (~(DUMP_BYTES_PER_LINE - 1));
-                    if (bl >= gc_alloc_table_byte_len * BLOCKS_PER_ATB) {
+                    if (bl >= MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB) {
                         // got to end of heap
                         break;
                     }
@@ -736,7 +723,7 @@
             */
             /* this prints the uPy object type of the head block */
             case AT_HEAD: {
-                mp_uint_t *ptr = gc_pool_start + bl * WORDS_PER_BLOCK;
+                mp_uint_t *ptr = MP_STATE_MEM(gc_pool_start) + bl * WORDS_PER_BLOCK;
                 if (*ptr == (mp_uint_t)&mp_type_tuple) { c = 'T'; }
                 else if (*ptr == (mp_uint_t)&mp_type_list) { c = 'L'; }
                 else if (*ptr == (mp_uint_t)&mp_type_dict) { c = 'D'; }