1/*
2 *  Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
3 *  Copyright (C) 2007 Eric Seidel <eric@webkit.org>
4 *
5 *  This library is free software; you can redistribute it and/or
6 *  modify it under the terms of the GNU Lesser General Public
7 *  License as published by the Free Software Foundation; either
8 *  version 2 of the License, or (at your option) any later version.
9 *
10 *  This library is distributed in the hope that it will be useful,
11 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 *  Lesser General Public License for more details.
14 *
15 *  You should have received a copy of the GNU Lesser General Public
16 *  License along with this library; if not, write to the Free Software
17 *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
18 *
19 */
20
21#include "config.h"
22#include "MarkedSpace.h"
23
24#include "DelayedReleaseScope.h"
25#include "IncrementalSweeper.h"
26#include "JSGlobalObject.h"
27#include "JSLock.h"
28#include "JSObject.h"
29#include "JSCInlines.h"
30
31namespace JSC {
32
33class Structure;
34
35class Free {
36public:
37    typedef MarkedBlock* ReturnType;
38
39    enum FreeMode { FreeOrShrink, FreeAll };
40
41    Free(FreeMode, MarkedSpace*);
42    void operator()(MarkedBlock*);
43    ReturnType returnValue();
44
45private:
46    FreeMode m_freeMode;
47    MarkedSpace* m_markedSpace;
48    DoublyLinkedList<MarkedBlock> m_blocks;
49};
50
51inline Free::Free(FreeMode freeMode, MarkedSpace* newSpace)
52    : m_freeMode(freeMode)
53    , m_markedSpace(newSpace)
54{
55}
56
57inline void Free::operator()(MarkedBlock* block)
58{
59    if (m_freeMode == FreeOrShrink)
60        m_markedSpace->freeOrShrinkBlock(block);
61    else
62        m_markedSpace->freeBlock(block);
63}
64
65inline Free::ReturnType Free::returnValue()
66{
67    return m_blocks.head();
68}
69
70struct VisitWeakSet : MarkedBlock::VoidFunctor {
71    VisitWeakSet(HeapRootVisitor& heapRootVisitor) : m_heapRootVisitor(heapRootVisitor) { }
72    void operator()(MarkedBlock* block) { block->visitWeakSet(m_heapRootVisitor); }
73private:
74    HeapRootVisitor& m_heapRootVisitor;
75};
76
77struct ReapWeakSet : MarkedBlock::VoidFunctor {
78    void operator()(MarkedBlock* block) { block->reapWeakSet(); }
79};
80
81MarkedSpace::MarkedSpace(Heap* heap)
82    : m_heap(heap)
83    , m_capacity(0)
84    , m_isIterating(false)
85    , m_currentDelayedReleaseScope(nullptr)
86{
87    for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
88        allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None);
89        normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal);
90        immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure);
91    }
92
93    for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
94        allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None);
95        normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal);
96        immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure);
97    }
98
99    m_normalSpace.largeAllocator.init(heap, this, 0, MarkedBlock::None);
100    m_normalDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::Normal);
101    m_immortalStructureDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::ImmortalStructure);
102}
103
104MarkedSpace::~MarkedSpace()
105{
106    Free free(Free::FreeAll, this);
107    forEachBlock(free);
108    ASSERT(!m_blocks.set().size());
109}
110
111struct LastChanceToFinalize {
112    void operator()(MarkedAllocator& allocator) { allocator.lastChanceToFinalize(); }
113};
114
115void MarkedSpace::lastChanceToFinalize()
116{
117    DelayedReleaseScope delayedReleaseScope(*this);
118    stopAllocating();
119    forEachAllocator<LastChanceToFinalize>();
120}
121
122void MarkedSpace::sweep()
123{
124    if (Options::logGC())
125        dataLog("Eagerly sweeping...");
126    m_heap->sweeper()->willFinishSweeping();
127    forEachBlock<Sweep>();
128}
129
130void MarkedSpace::zombifySweep()
131{
132    if (Options::logGC())
133        dataLog("Zombifying sweep...");
134    m_heap->sweeper()->willFinishSweeping();
135    forEachBlock<ZombifySweep>();
136}
137
138void MarkedSpace::resetAllocators()
139{
140    for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
141        allocatorFor(cellSize).reset();
142        normalDestructorAllocatorFor(cellSize).reset();
143        immortalStructureDestructorAllocatorFor(cellSize).reset();
144    }
145
146    for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
147        allocatorFor(cellSize).reset();
148        normalDestructorAllocatorFor(cellSize).reset();
149        immortalStructureDestructorAllocatorFor(cellSize).reset();
150    }
151
152    m_normalSpace.largeAllocator.reset();
153    m_normalDestructorSpace.largeAllocator.reset();
154    m_immortalStructureDestructorSpace.largeAllocator.reset();
155
156#if ENABLE(GGC)
157    m_blocksWithNewObjects.clear();
158#endif
159}
160
161void MarkedSpace::visitWeakSets(HeapRootVisitor& heapRootVisitor)
162{
163    VisitWeakSet visitWeakSet(heapRootVisitor);
164    if (m_heap->operationInProgress() == EdenCollection) {
165        for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
166            visitWeakSet(m_blocksWithNewObjects[i]);
167    } else
168        forEachBlock(visitWeakSet);
169}
170
171void MarkedSpace::reapWeakSets()
172{
173    if (m_heap->operationInProgress() == EdenCollection) {
174        for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
175            m_blocksWithNewObjects[i]->reapWeakSet();
176    } else
177        forEachBlock<ReapWeakSet>();
178}
179
180template <typename Functor>
181void MarkedSpace::forEachAllocator()
182{
183    Functor functor;
184    forEachAllocator(functor);
185}
186
187template <typename Functor>
188void MarkedSpace::forEachAllocator(Functor& functor)
189{
190    for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
191        functor(allocatorFor(cellSize));
192        functor(normalDestructorAllocatorFor(cellSize));
193        functor(immortalStructureDestructorAllocatorFor(cellSize));
194    }
195
196    for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
197        functor(allocatorFor(cellSize));
198        functor(normalDestructorAllocatorFor(cellSize));
199        functor(immortalStructureDestructorAllocatorFor(cellSize));
200    }
201
202    functor(m_normalSpace.largeAllocator);
203    functor(m_normalDestructorSpace.largeAllocator);
204    functor(m_immortalStructureDestructorSpace.largeAllocator);
205}
206
207struct StopAllocatingFunctor {
208    void operator()(MarkedAllocator& allocator) { allocator.stopAllocating(); }
209};
210
211void MarkedSpace::stopAllocating()
212{
213    ASSERT(!isIterating());
214    forEachAllocator<StopAllocatingFunctor>();
215}
216
217struct ResumeAllocatingFunctor {
218    void operator()(MarkedAllocator& allocator) { allocator.resumeAllocating(); }
219};
220
221void MarkedSpace::resumeAllocating()
222{
223    ASSERT(isIterating());
224    forEachAllocator<ResumeAllocatingFunctor>();
225}
226
227bool MarkedSpace::isPagedOut(double deadline)
228{
229    for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
230        if (allocatorFor(cellSize).isPagedOut(deadline)
231            || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline)
232            || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline))
233            return true;
234    }
235
236    for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
237        if (allocatorFor(cellSize).isPagedOut(deadline)
238            || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline)
239            || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline))
240            return true;
241    }
242
243    if (m_normalSpace.largeAllocator.isPagedOut(deadline)
244        || m_normalDestructorSpace.largeAllocator.isPagedOut(deadline)
245        || m_immortalStructureDestructorSpace.largeAllocator.isPagedOut(deadline))
246        return true;
247
248    return false;
249}
250
251void MarkedSpace::freeBlock(MarkedBlock* block)
252{
253    block->allocator()->removeBlock(block);
254    m_capacity -= block->capacity();
255    m_blocks.remove(block);
256    if (block->capacity() == MarkedBlock::blockSize) {
257        m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block));
258        return;
259    }
260    m_heap->blockAllocator().deallocateCustomSize(MarkedBlock::destroy(block));
261}
262
263void MarkedSpace::freeOrShrinkBlock(MarkedBlock* block)
264{
265    if (!block->isEmpty()) {
266        block->shrink();
267        return;
268    }
269
270    freeBlock(block);
271}
272
273struct Shrink : MarkedBlock::VoidFunctor {
274    void operator()(MarkedBlock* block) { block->shrink(); }
275};
276
277void MarkedSpace::shrink()
278{
279    Free freeOrShrink(Free::FreeOrShrink, this);
280    forEachBlock(freeOrShrink);
281}
282
283static void clearNewlyAllocatedInBlock(MarkedBlock* block)
284{
285    if (!block)
286        return;
287    block->clearNewlyAllocated();
288}
289
290struct ClearNewlyAllocated : MarkedBlock::VoidFunctor {
291    void operator()(MarkedBlock* block) { block->clearNewlyAllocated(); }
292};
293
294#ifndef NDEBUG
295struct VerifyNewlyAllocated : MarkedBlock::VoidFunctor {
296    void operator()(MarkedBlock* block) { ASSERT(!block->clearNewlyAllocated()); }
297};
298#endif
299
300void MarkedSpace::clearNewlyAllocated()
301{
302    for (size_t i = 0; i < preciseCount; ++i) {
303        clearNewlyAllocatedInBlock(m_normalSpace.preciseAllocators[i].takeLastActiveBlock());
304        clearNewlyAllocatedInBlock(m_normalDestructorSpace.preciseAllocators[i].takeLastActiveBlock());
305        clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace.preciseAllocators[i].takeLastActiveBlock());
306    }
307
308    for (size_t i = 0; i < impreciseCount; ++i) {
309        clearNewlyAllocatedInBlock(m_normalSpace.impreciseAllocators[i].takeLastActiveBlock());
310        clearNewlyAllocatedInBlock(m_normalDestructorSpace.impreciseAllocators[i].takeLastActiveBlock());
311        clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace.impreciseAllocators[i].takeLastActiveBlock());
312    }
313
314    // We have to iterate all of the blocks in the large allocators because they are
315    // canonicalized as they are used up (see MarkedAllocator::tryAllocateHelper)
316    // which creates the m_newlyAllocated bitmap.
317    ClearNewlyAllocated functor;
318    m_normalSpace.largeAllocator.forEachBlock(functor);
319    m_normalDestructorSpace.largeAllocator.forEachBlock(functor);
320    m_immortalStructureDestructorSpace.largeAllocator.forEachBlock(functor);
321
322#ifndef NDEBUG
323    VerifyNewlyAllocated verifyFunctor;
324    forEachBlock(verifyFunctor);
325#endif
326}
327
328#ifndef NDEBUG
329struct VerifyMarkedOrRetired : MarkedBlock::VoidFunctor {
330    void operator()(MarkedBlock* block)
331    {
332        switch (block->m_state) {
333        case MarkedBlock::Marked:
334        case MarkedBlock::Retired:
335            return;
336        default:
337            RELEASE_ASSERT_NOT_REACHED();
338        }
339    }
340};
341#endif
342
343void MarkedSpace::clearMarks()
344{
345    if (m_heap->operationInProgress() == EdenCollection) {
346        for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
347            m_blocksWithNewObjects[i]->clearMarks();
348    } else
349        forEachBlock<ClearMarks>();
350
351#ifndef NDEBUG
352    VerifyMarkedOrRetired verifyFunctor;
353    forEachBlock(verifyFunctor);
354#endif
355}
356
357void MarkedSpace::willStartIterating()
358{
359    ASSERT(!isIterating());
360    stopAllocating();
361    m_isIterating = true;
362}
363
364void MarkedSpace::didFinishIterating()
365{
366    ASSERT(isIterating());
367    DelayedReleaseScope scope(*this);
368    resumeAllocating();
369    m_isIterating = false;
370}
371
372} // namespace JSC
373