1/*
2    Title:      Multi-Threaded Garbage Collector - Copy phase
3
4    Copyright (c) 2010-12 David C. J. Matthews
5
6    Based on the original garbage collector code
7        Copyright 2000-2008
8        Cambridge University Technical Services Limited
9
10    This library is free software; you can redistribute it and/or
11    modify it under the terms of the GNU Lesser General Public
12    License as published by the Free Software Foundation; either
13    version 2.1 of the License, or (at your option) any later version.
14
15    This library is distributed in the hope that it will be useful,
16    but WITHOUT ANY WARRANTY; without even the implied warranty of
17    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18    Lesser General Public License for more details.
19
20    You should have received a copy of the GNU Lesser General Public
21    License along with this library; if not, write to the Free Software
22    Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
23
24*/
25/*
26This is the second, copy, phase of the garbage collector.  The previous,
27mark, phase has identified all the live data and set the bits in the bit-maps.
28This phase compacts the memory by copying cells from lower in the segment or
29from other segments.  When a cell is copied the length-word is modified to be
30a tomb-stone that gives the new location for the cell.  Cells can be copied to
31areas of memory that are shown as free in the bit-maps and the destination area
32is then marked as allocated.  Because there are tomb-stones left behind the original
33location of a cell must remain as allocated and its space cannot be reused until the
34GC is complete.
35
36We copy cells in a defined order to avoid copying loops.
37The ordering on the addresses is:
38    Immutable areas (for immutable cells) (highest)
39    Mutable areas
40    Allocation areas (lowest)
41Within each group a lower position in the gMem.lSpaces is higher
42MemMgr::AddLocalSpace enters spaces gMem.lSpaces such that immutable
43areas come before mutable areas which come before allocation areas
44so this reduces to the order in that table.
45Within a space higher addresses are higher.
46So we try to copy data out of the allocation areas and to copy any
47cells that are now immutable out of the mutable areas.  We try to copy
48data out of higher numbered spaces in order to try to free them
49completely and to compact data towards the top of a space if we
50can't.
51
52Once a thread has started copying into or out of an area it takes
53ownership of the area and no other thread can use the area.  This
54avoids
55*/
56
57#ifdef HAVE_CONFIG_H
58#include "config.h"
59#elif defined(_WIN32)
60#include "winconfig.h"
61#else
62#error "No configuration file"
63#endif
64
65#ifdef HAVE_ASSERT_H
66#include <assert.h>
67#define ASSERT(x)   assert(x)
68#else
69#define ASSERT(x)
70#endif
71
72#ifdef HAVE_STRING_H
73#include <string.h>
74#endif
75
76#include "globals.h"
77#include "machine_dep.h"
78#include "processes.h"
79#include "gc.h"
80#include "scanaddrs.h"
81#include "bitmap.h"
82#include "memmgr.h"
83#include "gctaskfarm.h"
84#include "locking.h"
85#include "diagnostics.h"
86
87static PLock copyLock("Copy");
88
89// Search the area downwards looking for n consecutive free words.
90// Return the address of the word if successful or 0 on failure.
91// "limit" is the bit position of the bottom of the area or, if we're compacting an area,
92// the bit position of the object we'd like to move to a higher address.
93static inline PolyWord *FindFreeAndAllocate(LocalMemSpace *dst, uintptr_t limit, uintptr_t n)
94{
95    if (dst == 0) return 0; // No current space
96
97    /* SPF's version of the start caching code. SPF 2/10/96 */
98    // The idea of it is to avoid having to search over an area that is
99    // already known not to have any spaces large enough for an object of
100    // a given size.  Knowing that there is no space for an object of
101    // size n implies that there is no space for anything of size larger
102    // than n.  SPF's idea is that after finding the space in the bitmap
103    // we update only the element for the size we are looking for rather
104    // than everything larger.
105    unsigned truncated_n = (unsigned)(n < NSTARTS ? n : NSTARTS - 1);
106
107    // If we're looking for something larger than last time update
108    // all the entries last time's size and this size.
109    for (unsigned i = dst->start_index; i < truncated_n; i ++)
110    {
111        if (dst->start[i] < dst->start[i+1])
112            dst->start[i+1] = dst->start[i];
113    }
114
115    dst->start_index = truncated_n;
116    uintptr_t start = dst->start[truncated_n];
117    if (start <= limit)
118        return 0;
119
120#ifdef POLYML32IN64
121    // This is complicated.  We need the eventual address to be on an even word boundary
122    // which means the length word is on an odd boundary.  We might find an exact match that
123    // fits this or we may need to keep looking.
124    uintptr_t free = start;
125    uintptr_t m = n & 1 ? n + 1 : n; // If n is odd round up.
126    for (;;)
127    {
128        uintptr_t lastFree = free;
129        free = dst->bitmap.FindFree(limit, free, m);
130        if (free == lastFree) { free = start;  break; } // Not there - return with free = start
131        if (free & 1) break;  // It's odd aligned - that's fine
132        if (!dst->bitmap.TestBit(free - 1))
133        {
134            // The previous bit is free - we can use this.
135            // This leaves a hole but we'll zero it during the update phase.
136            free = free - 1;
137            break;
138        }
139    }
140#else
141    // Look in the bitmap.  Returns "start" if it can't find space.
142    POLYUNSIGNED free = dst->bitmap.FindFree(limit, start, n);
143#endif
144
145    // If we failed to allocate the space (free == start) we set this to
146    // zero to indicate that there is no space for anything of this size
147    // or larger.
148    if (n < NSTARTS)
149        dst->start[n] = free == start ? 0 : free;
150
151    if (free == start)
152        return 0;
153
154    // Allocate the space.
155    dst->bitmap.SetBits(free, n);
156
157    PolyWord *newp = dst->wordAddr(free); /* New object address */
158
159    // Update dst->upperAllocPtr, so the new object doesn't get trampled.
160    if (newp < dst->upperAllocPtr)
161        dst->upperAllocPtr = newp;
162
163    return newp;
164}
165
166// Copy a cell to its new address.
167void CopyObjectToNewAddress(PolyObject *srcAddress, PolyObject *destAddress, POLYUNSIGNED L)
168{
169    destAddress->SetLengthWord(L); /* copy length word */
170
171    POLYUNSIGNED n = OBJ_OBJECT_LENGTH(L);
172
173    // Unroll loop for most common cases.
174    switch (n)
175    {
176    default:
177        memcpy(destAddress, srcAddress, n * sizeof(PolyWord));
178        break;
179    case 4:
180        destAddress->Set(3, srcAddress->Get(3));
181    case 3:
182        destAddress->Set(2, srcAddress->Get(2));
183    case 2:
184        destAddress->Set(1, srcAddress->Get(1));
185    case 1:
186        destAddress->Set(0, srcAddress->Get(0));
187    }
188}
189
190// Find the next space in the sequence.  It may return with the space unchanged if it
191// is unable to find a suitable space.
192static bool FindNextSpace(LocalMemSpace *src, LocalMemSpace **dst, bool isMutable, GCTaskId *id)
193{
194    std::vector<LocalMemSpace*>::iterator m = gMem.lSpaces.begin();
195    // If we're compressing the space and it's full that's it.
196    if (*dst == src)
197        return false;
198    if (*dst != 0)
199    {
200        // Find the next space after this
201        while (*m != *dst) m++;
202        m++;
203    }
204    for (; m < gMem.lSpaces.end(); m++) {
205        LocalMemSpace *lSpace = *m;
206        if (lSpace == src)
207        {
208            // The only possibility is to compact this area.
209            ASSERT(!isMutable || src->isMutable);
210            *dst = src;
211            return true; // We already own it
212        }
213        if (lSpace->isMutable == isMutable && !lSpace->allocationSpace && lSpace->spaceOwner == 0)
214        {
215            // Now acquire the lock.  We have to retest spaceOwner with the lock held.
216            PLocker lock(&copyLock);
217            if (lSpace->spaceOwner == 0)
218            {
219                // Change the space.
220                lSpace->spaceOwner = id;
221                *dst = lSpace; // Return the space
222                if (debugOptions & DEBUG_GC_ENHANCED)
223                    Log("GC: Copy: copying %s cells from %p to %p\n",
224                                isMutable ? "mutable" : "immutable", src, lSpace);
225                return true;
226            }
227        }
228    }
229    return false;
230}
231
232// Copy objects from the source space into an earlier space or up within the
233// current space.
234static void copyAllData(GCTaskId *id, void * /*arg1*/, void * /*arg2*/)
235{
236    LocalMemSpace *mutableDest = 0, *immutableDest = 0;
237
238    for (std::vector<LocalMemSpace*>::reverse_iterator i = gMem.lSpaces.rbegin(); i != gMem.lSpaces.rend(); i++)
239    {
240        LocalMemSpace *src = *i;
241
242        if (src->spaceOwner == 0)
243        {
244            PLocker lock(&copyLock);
245            if (src->spaceOwner == 0)
246                src->spaceOwner = id;
247            else continue;
248        }
249        else if (src->spaceOwner != id)
250            continue;
251
252        if (debugOptions & DEBUG_GC_ENHANCED)
253            Log("GC: Copy: copying area %p (thread %p) %s \n", src, id, src->spaceTypeString());
254
255        // We start at fullGCLowerLimit which is the lowest marked object in the heap
256        // N.B.  It's essential that the first set bit at or above this corresponds
257        // to the length word of a real object.
258        uintptr_t  bitno   = src->wordNo(src->fullGCLowerLimit);
259        // Set the limit to the top so we won't rescan this.  That can
260        // only happen if copying takes a very short time and the same
261        // thread runs multiple tasks.
262        src->fullGCLowerLimit = src->top;
263
264        // src->highest is the bit position that corresponds to the top of
265        // generation we're copying.
266        uintptr_t  highest = src->wordNo(src->top);
267
268        for (;;)
269        {
270            if (bitno >= highest) break;
271
272            /* SPF version; Invariant: 0 < highest - bitno */
273            bitno += src->bitmap.CountZeroBits(bitno, highest - bitno);
274
275            if (bitno >= highest) break;
276
277            /* first set bit corresponds to the length word */
278            PolyWord *old = src->wordAddr(bitno); /* Old object address */
279
280            PolyObject *obj = (PolyObject*)(old+1);
281
282            POLYUNSIGNED L = obj->LengthWord();
283            ASSERT (OBJ_IS_LENGTH(L));
284
285            POLYUNSIGNED n = OBJ_OBJECT_LENGTH(L) + 1 ;/* Length of allocation (including length word) */
286            bitno += n;
287
288            // Find a mutable space for the mutable objects and an immutable space for
289            // the immutables.  We copy objects into earlier spaces or within its own
290            // space but we don't copy an object to a later space.  This avoids the
291            // risk of copying an object multiple times.  Previously this copied objects
292            // into later spaces but that doesn't work well if we have converted old
293            // saved state segments into local areas.  It's much better to delete them
294            // if possible.
295            bool isMutable = OBJ_IS_MUTABLE_OBJECT(L);
296            LocalMemSpace *destSpace = isMutable || immutableDest == 0 ? mutableDest : immutableDest;
297            PolyWord *newp = FindFreeAndAllocate(destSpace, (src == destSpace) ? bitno : 0, n);
298            if (newp == 0 && src != destSpace)
299            {
300                // See if we can find a different space.
301                // N.B.  FindNextSpace side-effects mutableDest/immutableDest to give the next space.
302                if (FindNextSpace(src, isMutable ? &mutableDest : &immutableDest, isMutable, id))
303                {
304                    bitno -= n; // Redo this object
305                    continue;
306                }
307                // else just leave it
308            }
309
310            if (newp == 0) /* no room */
311            {
312                // We're not going to move this object
313                // Update src->upperAllocPtr, so the old object doesn't get trampled.
314                if (old < src->upperAllocPtr)
315                    src->upperAllocPtr = old;
316
317                // Previously this continued compressing to try to make space available
318                // on the next GC.  Normally full GCs are infrequent so the chances are
319                // that at the next GC other data will have been freed.  Just stop at
320                // this point.
321                // However if we're compressing a mutable area and there is immutable
322                // data in it we should move those out because the mutable area is scanned
323                // on every partial GC.
324                if (! src->isMutable || src->i_marked == 0)
325                    break;
326            }
327            else
328            {
329                PolyObject *destAddress = (PolyObject*)(newp+1);
330                obj->SetForwardingPtr(destAddress);
331                CopyObjectToNewAddress(obj, destAddress, L);
332
333                if (debugOptions & DEBUG_GC_DETAIL)
334                    Log("GC: Copy: %p %lu %u -> %p\n", obj, OBJ_OBJECT_LENGTH(L),
335                                GetTypeBits(L), destAddress);
336            }
337        }
338
339        if (mutableDest == src)
340            mutableDest = 0;
341        if (immutableDest == src)
342            immutableDest = 0;
343    }
344}
345
346void GCCopyPhase()
347{
348    mainThreadPhase = MTP_GCPHASECOMPACT;
349
350    for(std::vector<LocalMemSpace*>::iterator i = gMem.lSpaces.begin(); i < gMem.lSpaces.end(); i++)
351    {
352        LocalMemSpace *lSpace = *i;
353        uintptr_t highest = lSpace->wordNo(lSpace->top);
354        for (unsigned i = 0; i < NSTARTS; i++)
355            lSpace->start[i] = highest;
356        lSpace->start_index = NSTARTS - 1;
357        lSpace->spaceOwner = 0;
358        // Reset the allocation pointers. This puts garbage (and real data) below them.
359        // At the end of the compaction the allocation pointer will point below the
360        // lowest real data.
361        lSpace->upperAllocPtr = lSpace->top;
362    }
363
364    // Copy the mutable data into a lower area if possible.
365    if (gpTaskFarm->ThreadCount() == 0)
366        copyAllData(globalTask, 0, 0);
367    else
368    {
369        // We start as many tasks as we have threads.  If the amount of work to
370        // be done is very small one thread could process more than one task.
371        // Have to be careful because we use the task ID to decide which space
372        // to scan.
373        for (unsigned j = 0; j < gpTaskFarm->ThreadCount(); j++)
374            gpTaskFarm->AddWorkOrRunNow(&copyAllData, 0, 0);
375    }
376
377    gpTaskFarm->WaitForCompletion();
378}
379