1/*
2    Title:      Quick copying garbage collector
3
4    Copyright (c) 2011-12, 2016 David C. J. Matthews
5
6    This library is free software; you can redistribute it and/or
7    modify it under the terms of the GNU Lesser General Public
8    License version 2.1 as published by the Free Software Foundation.
9
10    This library is distributed in the hope that it will be useful,
11    but WITHOUT ANY WARRANTY; without even the implied warranty of
12    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13    Lesser General Public License for more details.
14
15    You should have received a copy of the GNU Lesser General Public
16    License along with this library; if not, write to the Free Software
17    Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
18
19*/
20/*
21This is a quick copying garbage collector that moves all the data out of
22the allocation areas and into the mutable and immutable areas.  If either of
23these has filled up it fails and a full garbage collection must be done.
24*/
25#ifdef HAVE_CONFIG_H
26#include "config.h"
27#elif defined(_WIN32)
28#include "winconfig.h"
29#else
30#error "No configuration file"
31#endif
32
33#ifdef HAVE_STDLIB_H
34#include <stdlib.h>
35#endif
36
37#ifdef HAVE_STRING_H
38#include <string.h>
39#endif
40
41#ifdef HAVE_ASSERT_H
42#include <assert.h>
43#define ASSERT(x)   assert(x)
44#else
45#define ASSERT(x)
46#endif
47
48#include "globals.h"
49#include "processes.h"
50#include "gc.h"
51#include "scanaddrs.h"
52#include "check_objects.h"
53#include "bitmap.h"
54#include "memmgr.h"
55#include "diagnostics.h"
56#include "heapsizing.h"
57#include "gctaskfarm.h"
58#include "statistics.h"
59
60// This protects access to the gMem.lSpace table.
61static PLock localTableLock("Minor GC tables");
62
63static bool succeeded = true;
64
65class QuickGCScanner: public ScanAddress
66{
67public:
68    QuickGCScanner(bool r): rootScan(r) {}
69    virtual ~QuickGCScanner() {}
70
71    // Overrides for ScanAddress class
72    virtual POLYUNSIGNED ScanAddressAt(PolyWord *pt);
73    virtual PolyObject *ScanObjectAddress(PolyObject *base);
74private:
75    PolyObject *FindNewAddress(PolyObject *obj, POLYUNSIGNED L, LocalMemSpace *srcSpace);
76    virtual LocalMemSpace *FindSpace(POLYUNSIGNED length, bool isMutable) = 0;
77protected:
78    bool objectCopied;
79    bool rootScan;
80};
81
82class RootScanner: public QuickGCScanner
83{
84public:
85    RootScanner(): QuickGCScanner(true), mutableSpace(0), immutableSpace(0) {}
86private:
87    virtual LocalMemSpace *FindSpace(POLYUNSIGNED length, bool isMutable);
88    LocalMemSpace *mutableSpace, *immutableSpace;
89};
90
91class ThreadScanner: public QuickGCScanner
92{
93public:
94    ThreadScanner(GCTaskId* id): QuickGCScanner(false), taskID(id), mutableSpace(0), immutableSpace(0),
95        spaceTable(0), nOwnedSpaces(0) {}
96    virtual ~ThreadScanner() { free(spaceTable); }
97
98    void ScanOwnedAreas(void);
99private:
100    virtual LocalMemSpace *FindSpace(POLYUNSIGNED length, bool isMutable);
101    bool TakeOwnership(LocalMemSpace *space);
102
103    GCTaskId *taskID;
104    LocalMemSpace *mutableSpace, *immutableSpace;
105    LocalMemSpace **spaceTable;
106    unsigned nOwnedSpaces;
107};
108
109// This is used when scanning code areas.  If there are no mutable cells left we can clear
110// the mutable bit and we don't have to scan it again.
111class CodeCheck: public ScanAddress
112{
113public:
114    CodeCheck(): foundMutable(false) {}
115    virtual PolyObject *ScanObjectAddress(PolyObject *base) { return base; }
116    virtual void ScanAddressesInObject(PolyObject *base, POLYUNSIGNED lengthWord)
117        { if (OBJ_IS_MUTABLE_OBJECT(lengthWord)) foundMutable = true;  }
118    bool foundMutable;
119};
120
121// This uses the conditional exchange instruction to check and update
122// the forwarding pointer.  It uses a lock prefix so that if another
123// thread has updated it in the meantime it will not set it.
124// Using the assembly code provides a very small speed-up so may not
125// be worth-while.
126#if defined(_MSC_VER) && (_MSC_VER >= 1600)
127// In later versions of MS C we can use the intrinsic.
128// 1600 is Visual Studio 2010.  It may well work in older versions
129#   include <intrin.h>
130#   pragma intrinsic(_InterlockedCompareExchange)
131#   if (SIZEOF_VOIDP == 8)
132#       define InterlockedCompareExchange64 _InterlockedCompareExchange64
133#   else
134#       define InterlockedCompareExchange   _InterlockedCompareExchange
135#   endif
136#endif
137
138static bool atomiclySetForwarding(LocalMemSpace *space, POLYUNSIGNED *pt,
139                                  POLYUNSIGNED testVal, POLYUNSIGNED update)
140{
141#ifdef _MSC_VER
142# if (SIZEOF_VOIDP == 8)
143    LONGLONG *address = (LONGLONG*)(pt-1);
144    POLYUNSIGNED result = InterlockedCompareExchange64(address, update, testVal);
145    return result == testVal;
146# else
147    LONG *address = (LONG*)(pt-1);
148    POLYUNSIGNED result = InterlockedCompareExchange(address, update, testVal);
149    return result == testVal;
150# endif
151#elif((defined(HOSTARCHITECTURE_X86) || defined(HOSTARCHITECTURE_X32)) && defined(__GNUC__))
152    POLYUNSIGNED result;
153    __asm__ __volatile__ (
154        "lock; cmpxchgl %1,%2"
155        :"=a"(result)
156        :"r"(update),"m"(pt[-1]),"0"(testVal)
157        :"memory", "cc"
158    );
159    return result == testVal;
160#elif(defined(HOSTARCHITECTURE_X86_64) && defined(__GNUC__))
161    POLYUNSIGNED result;
162    __asm__ __volatile__ (
163        "lock; cmpxchgq %1,%2"
164        :"=a"(result)
165        :"r"(update),"m"(pt[-1]),"0"(testVal)
166        :"memory", "cc"
167    );
168    return result == testVal;
169#else
170    // Fallback on other targets.
171    PLocker lock(&space->spaceLock);
172    if (pt[-1] == testVal)
173    {
174        pt[-1] = update;
175        return true;
176    }
177    return false;
178#endif
179}
180
181PolyObject *QuickGCScanner::FindNewAddress(PolyObject *obj, POLYUNSIGNED L, LocalMemSpace *srcSpace)
182{
183    bool isMutable = OBJ_IS_MUTABLE_OBJECT(L);
184    POLYUNSIGNED n = OBJ_OBJECT_LENGTH(L);
185    LocalMemSpace *lSpace = FindSpace(n, isMutable);
186    if (lSpace == 0)
187        return 0; // Unable to move it.
188    PolyObject *newObject = (PolyObject*)(lSpace->lowerAllocPtr+1);
189
190    // It's possible that another thread may have actually copied the
191    // object since we loaded the length word so we check it again.
192    // If this is a mutable we must ensure that checking the forwarding
193    // pointer here and updating it if necessary is atomic.  We don't need
194    // to do that for immutable data so there is a small chance that an
195    // object may be copied twice.  That's not a problem for immutable data.
196    // Also lock this if it's code.  This may not be necessary but code objects
197    // are rare. Updating the addresses in code objects is complicated and
198    // it's possible that there are assumptions somewhere that there's only one
199    // copy.
200    // Avoiding locking for immutables provides only a small speed-up so may not
201    // be worth-while.
202    if (isMutable || OBJ_IS_CODE_OBJECT(L))
203    {
204        if (! atomiclySetForwarding(srcSpace, (POLYUNSIGNED*)obj, L, OBJ_SET_POINTER(newObject)))
205        {
206            newObject = obj->GetForwardingPtr();
207            if (debugOptions & DEBUG_GC_DETAIL)
208                Log("GC: Quick: %p %lu %u has already moved to %p\n", obj, n, GetTypeBits(L), newObject);
209            objectCopied = false;
210            return newObject;
211        }
212    }
213    else
214    {
215        if (obj->ContainsForwardingPtr())
216        {
217            newObject = obj->GetForwardingPtr();
218            if (debugOptions & DEBUG_GC_DETAIL)
219                Log("GC: Quick: %p %lu %u has already moved to %p\n", obj, n, GetTypeBits(L), newObject);
220            objectCopied = false;
221            return newObject;
222        }
223        else obj->SetForwardingPtr(newObject);
224    }
225
226    lSpace->lowerAllocPtr += n+1;
227    CopyObjectToNewAddress(obj, newObject, L);
228    objectCopied = true;
229    return newObject;
230}
231
232// When scanning the roots we want to distribute the data among the immutable and mutable areas
233// so that the work is distributed for the scanning threads.
234LocalMemSpace *RootScanner::FindSpace(POLYUNSIGNED n, bool isMutable)
235{
236    LocalMemSpace *lSpace = isMutable ? mutableSpace : immutableSpace;
237
238    if (lSpace != 0)
239    {
240        // See if there's space in the existing area.
241        if (lSpace->freeSpace() > n /* At least n+1*/)
242            return lSpace;
243    }
244
245    // Find the space with the largest free area.
246    for (std::vector<LocalMemSpace*>::iterator i = gMem.lSpaces.begin(); i < gMem.lSpaces.end(); i++)
247    {
248        LocalMemSpace *sp = *i;
249        if (sp->isMutable == isMutable && !sp->allocationSpace &&
250                (lSpace == 0 || sp->freeSpace() > lSpace->freeSpace()))
251            lSpace = sp;
252    }
253
254    if (lSpace != 0 && lSpace->freeSpace() > n)
255    {
256        if (isMutable) mutableSpace = lSpace; else immutableSpace = lSpace;
257        return lSpace;
258    }
259
260    return gHeapSizeParameters.AddSpaceInMinorGC(n+1, isMutable);
261}
262
263// When scanning within a thread we don't want to be searching the space table.
264LocalMemSpace *ThreadScanner::FindSpace(POLYUNSIGNED n, bool isMutable)
265{
266    LocalMemSpace *lSpace = isMutable ? mutableSpace : immutableSpace;
267
268    if (lSpace != 0)
269    {
270        // See if there's space in the existing area.
271        if (lSpace->freeSpace() > n /* At least n+1*/)
272            return lSpace;
273    }
274
275    for (unsigned i = 0; i < nOwnedSpaces; i++)
276    {
277        lSpace = spaceTable[i];
278        if (lSpace->isMutable == isMutable &&
279            ! lSpace->allocationSpace && lSpace->freeSpace() > n /* At least n+1*/)
280        {
281            if (n < 10)
282            {
283                // We use this space for further allocations unless we are trying to
284                // allocate a "large" object.
285                if (isMutable) mutableSpace = lSpace; else immutableSpace = lSpace;
286            }
287            return lSpace;
288        }
289    }
290
291    PLocker l(&localTableLock);
292    // Another thread may allocate a new area, reallocating gMem.lSpaces so we
293    // we need a lock here.
294    if (taskID != 0)
295    {
296        // See if we can take a space that is currently unused.
297        for (std::vector<LocalMemSpace*>::iterator i = gMem.lSpaces.begin(); i < gMem.lSpaces.end(); i++)
298        {
299            lSpace = *i;
300            if (lSpace->spaceOwner == 0 && lSpace->isMutable == isMutable &&
301                ! lSpace->allocationSpace && lSpace->freeSpace() > n /* At least n+1*/)
302            {
303                if (debugOptions & DEBUG_GC_ENHANCED)
304                    Log("GC: Quick: Thread %p is taking ownership of space %p\n", taskID, lSpace);
305                if (! TakeOwnership(lSpace))
306                    return 0;
307                return lSpace;
308            }
309        }
310    }
311
312    lSpace = gHeapSizeParameters.AddSpaceInMinorGC(n+1, isMutable);
313    if (lSpace != 0 && TakeOwnership(lSpace))
314        return lSpace;
315    return 0;
316}
317
318// Copy all the objects.
319POLYUNSIGNED QuickGCScanner::ScanAddressAt(PolyWord *pt)
320{
321    POLYUNSIGNED n = 1; // Set up the loop to process one word at *pt
322    pt++;
323
324    while (n-- != 0)
325    {
326        PolyWord val = *(--pt);
327        if (! val.IsTagged())
328        {
329            LocalMemSpace *space = gMem.LocalSpaceForAddress(val.AsStackAddr()-1);
330
331            // We only copy it if it is in a local allocation space and not in the
332            // "overflow" area of data that could not copied by the last full GC.
333            if (space != 0 && space->allocationSpace && val.AsAddress() <= space->upperAllocPtr)
334            {
335                // We shouldn't get code addresses since we handle code
336                // segments separately so if this isn't an integer it must be an object address.
337                ASSERT(OBJ_IS_DATAPTR(val));
338
339                PolyObject *obj = val.AsObjPtr();
340                // Load the length word without any interlock.  We can't assume that
341                // another thread won't also copy this at the same time.
342                POLYUNSIGNED L = obj->LengthWord();
343
344                // Has it been moved already? N.B.  Another thread may be in the process of
345                // moving it so the new object may not be fully copied.
346                if (OBJ_IS_POINTER(L))
347                    *pt = OBJ_GET_POINTER(L);
348                else
349                {
350                    // We need to copy this object.
351                    PolyObject *newObject = FindNewAddress(obj, L, space); // New address of object.
352
353                    if (newObject == 0) { // Couldn't copy it - not enough space.
354                        succeeded = false;
355
356                        if (debugOptions & DEBUG_GC_DETAIL)
357                            Log("GC: Quick: Insufficient space to move %p %lu %u\n",
358                                obj, OBJ_OBJECT_LENGTH(L), GetTypeBits(L));
359
360                        return 0;
361                    }
362
363                    *pt = newObject; // Update the pointer to the object
364                    // N.B.  If another thread has just copied it "newObject" may actually
365                    // be an address in another thread's space.  In that case "objectCopied"
366                    // will be false.
367
368                    if (debugOptions & DEBUG_GC_DETAIL)
369                        Log("GC: Quick: %p %lu %u moved to %p\n", obj, OBJ_OBJECT_LENGTH(L), GetTypeBits(L), newObject);
370
371                    // Stop now unless this is a simple word object we have been able to move.
372                    // Also stop if we're just scanning the roots.
373                    if (! rootScan && newObject != obj && ! OBJ_IS_MUTABLE_OBJECT(L) &&
374                        GetTypeBits(L) == 0 && objectCopied)
375                    {
376                        // We can simply return zero in which case this performs a breadth-first scan.
377                        // A breadth-first scan distributes the objects through the memory so
378                        // to retain some degree of locality we try to copy some object pointed at
379                        // by this one.  We work from the end back so that we follow the tail pointers
380                        // for lists.
381                        n = OBJ_OBJECT_LENGTH(L); // Object length
382                        pt = (PolyWord*)newObject + n;
383                    }
384                }
385            }
386        }
387    }
388    // We've reached the end without finding a pointer to follow
389    return 0;
390}
391
392// The initial entry to process the roots.  Also used when processing the addresses
393// in objects that can't be handled by ScanAddressAt.
394PolyObject *QuickGCScanner::ScanObjectAddress(PolyObject *base)
395{
396    PolyWord val = base;
397    // Scan this as an address.
398    (void)QuickGCScanner::ScanAddressAt(&val);
399    // Ignore the result of ScanAddressAt which is always zero and
400    // just return the updated address.
401    return val.AsObjPtr();
402}
403
404// Add this to the set of spaces we own.  Must be called with the
405// localTableLock held.
406bool ThreadScanner::TakeOwnership(LocalMemSpace *space)
407{
408    ASSERT(space->spaceOwner == 0);
409    LocalMemSpace **v = (LocalMemSpace**)realloc(spaceTable, (nOwnedSpaces+1)*sizeof(LocalMemSpace*));
410    if (v == 0)
411        return false;
412    spaceTable = v;
413    space->spaceOwner = taskID;
414    spaceTable[nOwnedSpaces++] = space;
415    return true;
416}
417
418// Thread function to scan an area.  It scans the addresses in the region
419// copying any objects from the allocation area into mutable or immutable
420// areas it owns.  It then processes all the areas it owns until there
421// are no further addresses to scan.
422static void scanArea(GCTaskId *id, void *arg1, void *arg2)
423{
424    ThreadScanner marker(id);
425    marker.ScanAddressesInRegion((PolyWord*)arg1, (PolyWord*)arg2);
426    marker.ScanOwnedAreas();
427}
428
429void ThreadScanner::ScanOwnedAreas()
430{
431    while (true)
432    {
433        bool allDone = true;
434        // We're finished when there is no unscanned data in any space we own.
435        for (unsigned k = 0; k < nOwnedSpaces && allDone; k++)
436        {
437            LocalMemSpace *space = spaceTable[k];
438            allDone = space->partialGCScan == space->lowerAllocPtr;
439        }
440        if (allDone)
441            break;
442
443        // Scan each area that has had data added to it.
444        for (unsigned l = 0; l < nOwnedSpaces; l++)
445        {
446            LocalMemSpace *space = spaceTable[l];
447            // Scan the area.  This may well result in more data being added
448            while (space->partialGCScan < space->lowerAllocPtr)
449            {
450                // Is the queue draining?  If so it's probably worth creating
451                // some spare work.
452                if (gpTaskFarm->Draining() && gpTaskFarm->ThreadCount() > 1)
453                {
454                    PolyWord *mid =
455                        space->partialGCScan + (space->lowerAllocPtr - space->partialGCScan)/2;
456                    // Split the space in two.
457                    PolyWord *p = space->partialGCScan;
458                    while (p < mid)
459                    {
460                        PolyObject *o = (PolyObject*)(p+1);
461                        ASSERT(o->ContainsNormalLengthWord());
462                        p += o->Length()+1;
463                    }
464                    // Start a new task to scan the area up to the half-way point.
465                    // Because we round up to the end of the next object we may
466                    // include the whole area but that's probably better because
467                    // we may have other areas to scan.
468                    if (gpTaskFarm->AddWork(scanArea, space->partialGCScan, p))
469                    {
470                        space->partialGCScan = p;
471                        if (space->lowerAllocPtr == space->partialGCScan)
472                            break;
473                    }
474                }
475                PolyObject *obj = (PolyObject*)(space->partialGCScan+1);
476                ASSERT(obj->ContainsNormalLengthWord());
477                POLYUNSIGNED length = obj->Length();
478                ASSERT(space->partialGCScan+length+1 <= space->lowerAllocPtr);
479                space->partialGCScan += length+1;
480                if (length != 0)
481                    ScanAddressesInObject(obj);
482                // If any thread has run out of space we should stop.
483                if (! succeeded)
484                    return;
485            }
486        }
487    }
488    // Release the spaces we're holding in case another thread wants to use them.
489    for (unsigned m = 0; m < nOwnedSpaces; m++)
490    {
491        LocalMemSpace *space = spaceTable[m];
492        space->spaceOwner = 0;
493    }
494    nOwnedSpaces = 0;
495}
496
497bool RunQuickGC(const POLYUNSIGNED wordsRequiredToAllocate)
498{
499    // If the last minor GC took too long force a full GC.
500    if (gHeapSizeParameters.RunMajorGCImmediately())
501        return false;
502
503    gHeapSizeParameters.RecordGCTime(HeapSizeParameters::GCTimeStart);
504    globalStats.incCount(PSC_GC_PARTIALGC);
505    mainThreadPhase = MTP_GCQUICK;
506    succeeded = true;
507
508    if (debugOptions & DEBUG_GC)
509        Log("GC: Beginning quick GC\n");
510
511    if (debugOptions & DEBUG_HEAPSIZE)
512        gMem.ReportHeapSizes("Minor GC (before)");
513
514    POLYUNSIGNED spaceBeforeGC = 0;
515
516    for(std::vector<LocalMemSpace*>::iterator i = gMem.lSpaces.begin(); i < gMem.lSpaces.end(); i++)
517    {
518        LocalMemSpace *lSpace = *i;
519        ASSERT (lSpace->top >= lSpace->upperAllocPtr);
520        ASSERT (lSpace->upperAllocPtr >= lSpace->lowerAllocPtr);
521        ASSERT (lSpace->lowerAllocPtr >= lSpace->bottom);
522        // Remember the top before we started this GC.  It's
523        // only relevant for mutable areas.  It avoids us rescanning
524        // objects that may have been added to the space as a result of
525        // scanning another space.
526        if (lSpace->isMutable)
527            lSpace->partialGCTop = lSpace->upperAllocPtr;
528        else lSpace->partialGCTop = lSpace->top;
529        // If we're scanning a space this is where we start.
530        // For immutable areas this only includes newly added
531        // data but for mutable areas we have to scan data added
532        // by previous partial GCs.
533        if (lSpace->isMutable && ! lSpace->allocationSpace)
534            lSpace->partialGCRootBase = lSpace->bottom;
535        else lSpace->partialGCRootBase = lSpace->lowerAllocPtr;
536        lSpace->spaceOwner = 0; // Not currently owned
537        // Add up the space in the mutable and immutable areas
538        if (! lSpace->allocationSpace)
539            spaceBeforeGC += lSpace->allocatedSpace();
540    }
541
542    // First scan the roots, copying the data into the mutable and immutable areas.
543    RootScanner rootScan;
544    // Scan the permanent mutable areas.  This could be parallelised but it doesn't
545    // appear to be worthwhile at the moment.
546    for (std::vector<PermanentMemSpace*>::iterator i = gMem.pSpaces.begin(); i < gMem.pSpaces.end(); i++)
547    {
548        PermanentMemSpace *space = *i;
549        if (space->isMutable && ! space->byteOnly)
550            rootScan.ScanAddressesInRegion(space->bottom, space->top);
551    }
552    // Scan code spaces.
553    for (std::vector<CodeSpace *>::iterator i = gMem.cSpaces.begin(); i < gMem.cSpaces.end(); i++)
554    {
555        CodeSpace *space = *i;
556        // Spaces are mutable if any object has been added to the area since the last GC.
557        if (space->isMutable)
558        {
559            rootScan.ScanAddressesInRegion(space->bottom, space->top);
560            // Check to see if any of the objects are still mutable.  If they are
561            // we are still building the code and must rescan it on the next GC.
562            // If there aren't we don't need to unless another code object is added.
563            CodeCheck codeCheck;
564            codeCheck.ScanAddressesInRegion(space->bottom, space->top);
565            space->isMutable = codeCheck.foundMutable;
566        }
567    }
568
569    // Scan RTS addresses.  This will include the thread stacks.
570    GCModules(&rootScan);
571
572    // At this point the immutable and mutable areas will have some root objects
573    // in the space between partialGCRootBase (the old value of lowerAllocPtr) and
574    // lowerAllocPtr.  These will contain the addresses of objects in the allocation
575    // areas.  We need to scan these root objects and then any new objects we copy
576    // until there are no objects left to scan.
577    // We also need to scan local mutable areas since these are roots as well.
578    // They have data between partialGCTop and top.  Parallelising this appears
579    // to be a significant gain.
580    // We have to be careful about the pointers here.  AddWorkOrRunNow begins
581    // a thread immediately and so the scanning threads may be running while
582    // we are still creating new tasks.  To avoid tripping up we use separate
583    // pointers to the root objects rather than using lowerAllocPtr and
584    // partialGCScan because these can be modified by the scanning tasks.
585    // It's also possible for new spaces to be added to the table by the scanning
586    // tasks while we are still adding tasks.  It is important that the values of
587    // partialGCRootBase, partialGCRootTop and partialGCTop are properly initialised
588    // for these new spaces.
589    for (std::vector<LocalMemSpace*>::iterator i = gMem.lSpaces.begin(); i < gMem.lSpaces.end(); i++)
590    {
591        LocalMemSpace *space = *i;
592        space->partialGCRootTop = space->lowerAllocPtr; // Top of the roots
593        space->partialGCScan = space->lowerAllocPtr; // Start of scanning for new data.
594    }
595
596    // Now start creating tasks.  From this point only a thread that owns a space
597    // may read or modify lowerAllocPtr or partialGCScan.
598    {
599        unsigned l = 0;
600        while (true)
601        {
602            LocalMemSpace *space;
603            {
604                // There is a chance that a thread that has already been forked may
605                // allocate a new space and realloc gMem.lSpaces.  We have to drop
606                // the lock before calling AddWorkOrRunNow in case we "run now".
607                PLocker lock(&localTableLock);
608                if (l >= gMem.lSpaces.size())
609                    break;
610                space = gMem.lSpaces[l++];
611            }
612            if (space->partialGCRootBase != space->partialGCRootTop)
613                gpTaskFarm->AddWorkOrRunNow(scanArea, space->partialGCRootBase, space->partialGCRootTop);
614            if (space->partialGCTop != space->top)
615                gpTaskFarm->AddWorkOrRunNow(scanArea, space->partialGCTop, space->top);
616        }
617    }
618
619    gpTaskFarm->WaitForCompletion();
620
621    POLYUNSIGNED spaceAfterGC = 0;
622
623    if (succeeded)
624    {
625        globalStats.setSize(PSS_AFTER_LAST_GC, 0);
626        globalStats.setSize(PSS_ALLOCATION, 0);
627        globalStats.setSize(PSS_ALLOCATION_FREE, 0);
628        // If it succeeded the allocation areas are now empty.
629        for(std::vector<LocalMemSpace*>::iterator i = gMem.lSpaces.begin(); i < gMem.lSpaces.end(); i++)
630        {
631            LocalMemSpace *lSpace = *i;
632            POLYUNSIGNED free;
633            if (lSpace->allocationSpace)
634            {
635                lSpace->lowerAllocPtr = lSpace->bottom;
636                free = lSpace->freeSpace();
637#ifdef FILL_UNUSED_MEMORY
638                // This provides extra checking if we have dangling pointers
639                memset(lSpace->bottom, 0xaa, (char*)lSpace->upperAllocPtr - (char*)lSpace->bottom);
640#endif
641                globalStats.incSize(PSS_ALLOCATION, free*sizeof(PolyWord));
642                globalStats.incSize(PSS_ALLOCATION_FREE, free*sizeof(PolyWord));
643            }
644            else free = lSpace->freeSpace();
645
646            if (debugOptions & DEBUG_GC_ENHANCED)
647                Log("GC: %s space %p %" PRI_SIZET " free in %" PRI_SIZET " words %2.1f%% full\n", lSpace->spaceTypeString(),
648                    lSpace, lSpace->freeSpace(), lSpace->spaceSize(),
649                    ((float)lSpace->allocatedSpace()) * 100 / (float)lSpace->spaceSize());
650            globalStats.incSize(PSS_AFTER_LAST_GC, free*sizeof(PolyWord));
651            spaceAfterGC += lSpace->allocatedSpace();
652        }
653
654        if (! gMem.CheckForAllocation(wordsRequiredToAllocate))
655            succeeded = false;
656    }
657
658    if (succeeded)
659    {
660        gHeapSizeParameters.RecordGCTime(HeapSizeParameters::GCTimeEnd);
661
662        if (! gHeapSizeParameters.AdjustSizeAfterMinorGC(spaceAfterGC, spaceBeforeGC)) // Adjust the allocation size.
663            return false; // If necessary trigger a full GC immediately
664        gHeapSizeParameters.resetMinorTimingData();
665        // Remove allocation spaces that are larger than the default
666        // and any excess over the current size of the allocation area.
667        gMem.RemoveExcessAllocation();
668
669        if (debugOptions & DEBUG_HEAPSIZE)
670            gMem.ReportHeapSizes("Minor GC (after)");
671
672        if (debugOptions & DEBUG_GC)
673            Log("GC: Completed successfully\n");
674
675        CheckMemory();
676    }
677    else
678    {
679        // There was insufficient room to copy everything.  We will need to
680        // run a full GC.
681        gHeapSizeParameters.RecordGCTime(HeapSizeParameters::GCTimeEnd);
682        if (debugOptions & DEBUG_GC)
683            Log("GC: Quick GC failed\n");
684    }
685
686    return succeeded;
687}
688