1/*
2 * Copyright (c) 2004-2007 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24#include "objc-private.h"
25
26#include "objc-config.h"
27#include "objc-auto.h"
28#include "objc-accessors.h"
29
30#ifndef OBJC_NO_GC
31
32#include <stdint.h>
33#include <stdbool.h>
34#include <fcntl.h>
35#include <dlfcn.h>
36#include <mach/mach.h>
37#include <mach-o/dyld.h>
38#include <mach-o/nlist.h>
39#include <sys/types.h>
40#include <sys/mman.h>
41#include <libkern/OSAtomic.h>
42#include <auto_zone.h>
43
44#include <Block_private.h>
45#include <dispatch/private.h>
46
47#include "objc-private.h"
48#include "objc-references.h"
49#include "maptable.h"
50#include "message.h"
51#include "objc-gdb.h"
52
53#if !defined(NDEBUG)  &&  !__OBJC2__
54#include "objc-exception.h"
55#endif
56
57
58static auto_zone_t *gc_zone_init(void);
59static void gc_block_init(void);
60static void registeredClassTableInit(void);
61static BOOL objc_isRegisteredClass(Class candidate);
62
63int8_t UseGC = -1;
64static BOOL WantsMainThreadFinalization = NO;
65
66auto_zone_t *gc_zone = nil;
67
68
69/* Method prototypes */
70@interface DoesNotExist
71- (const char *)UTF8String;
72- (id)description;
73@end
74
75
76/***********************************************************************
77* Break-on-error functions
78**********************************************************************/
79
80BREAKPOINT_FUNCTION(
81    void objc_assign_ivar_error(id base, ptrdiff_t offset)
82);
83
84BREAKPOINT_FUNCTION(
85    void objc_assign_global_error(id value, id *slot)
86);
87
88BREAKPOINT_FUNCTION(
89    void objc_exception_during_finalize_error(void)
90);
91
92/***********************************************************************
93* Utility exports
94* Called by various libraries.
95**********************************************************************/
96
97OBJC_EXPORT void objc_set_collection_threshold(size_t threshold) { // Old naming
98    if (UseGC) {
99        auto_collection_parameters(gc_zone)->collection_threshold = threshold;
100    }
101}
102
103OBJC_EXPORT void objc_setCollectionThreshold(size_t threshold) {
104    if (UseGC) {
105        auto_collection_parameters(gc_zone)->collection_threshold = threshold;
106    }
107}
108
109void objc_setCollectionRatio(size_t ratio) {
110    if (UseGC) {
111        auto_collection_parameters(gc_zone)->full_vs_gen_frequency = ratio;
112    }
113}
114
115void objc_set_collection_ratio(size_t ratio) {  // old naming
116    if (UseGC) {
117        auto_collection_parameters(gc_zone)->full_vs_gen_frequency = ratio;
118    }
119}
120
121void objc_finalizeOnMainThread(Class cls) {
122    if (UseGC) {
123        WantsMainThreadFinalization = YES;
124        cls->setShouldFinalizeOnMainThread();
125    }
126}
127
128// stack based data structure queued if/when there is main-thread-only finalization work TBD
129typedef struct BatchFinalizeBlock {
130    auto_zone_foreach_object_t foreach;
131    auto_zone_cursor_t cursor;
132    size_t cursor_size;
133    volatile BOOL finished;
134    volatile BOOL started;
135    struct BatchFinalizeBlock *next;
136} BatchFinalizeBlock_t;
137
138// The Main Thread Finalization Work Queue Head
139static struct {
140    pthread_mutex_t mutex;
141    pthread_cond_t condition;
142    BatchFinalizeBlock_t *head;
143    BatchFinalizeBlock_t *tail;
144} MainThreadWorkQ;
145
146
147void objc_startCollectorThread(void) {
148}
149
150void objc_start_collector_thread(void) {
151}
152
153static void batchFinalizeOnMainThread(void);
154
155void objc_collect(unsigned long options) {
156    if (!UseGC) return;
157    BOOL onMainThread = pthread_main_np() ? YES : NO;
158
159    // while we're here, sneak off and do some finalization work (if any)
160    if (onMainThread) batchFinalizeOnMainThread();
161    // now on with our normally scheduled programming
162    auto_zone_options_t amode = AUTO_ZONE_COLLECT_NO_OPTIONS;
163    if (!(options & OBJC_COLLECT_IF_NEEDED)) {
164        switch (options & 0x3) {
165            case OBJC_RATIO_COLLECTION:        amode = AUTO_ZONE_COLLECT_RATIO_COLLECTION;        break;
166            case OBJC_GENERATIONAL_COLLECTION: amode = AUTO_ZONE_COLLECT_GENERATIONAL_COLLECTION; break;
167            case OBJC_FULL_COLLECTION:         amode = AUTO_ZONE_COLLECT_FULL_COLLECTION;         break;
168            case OBJC_EXHAUSTIVE_COLLECTION:   amode = AUTO_ZONE_COLLECT_EXHAUSTIVE_COLLECTION;   break;
169        }
170        amode |= AUTO_ZONE_COLLECT_COALESCE;
171        amode |= AUTO_ZONE_COLLECT_LOCAL_COLLECTION;
172    }
173    if (options & OBJC_WAIT_UNTIL_DONE) {
174        __block BOOL done = NO;
175        // If executing on the main thread, use the main thread work queue condition to block,
176        // so main thread finalization can complete. Otherwise, use a thread-local condition.
177        pthread_mutex_t localMutex = PTHREAD_MUTEX_INITIALIZER, *mutex = &localMutex;
178        pthread_cond_t localCondition = PTHREAD_COND_INITIALIZER, *condition = &localCondition;
179        if (onMainThread) {
180            mutex = &MainThreadWorkQ.mutex;
181            condition = &MainThreadWorkQ.condition;
182        }
183        pthread_mutex_lock(mutex);
184        auto_zone_collect_and_notify(gc_zone, amode, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
185            pthread_mutex_lock(mutex);
186            done = YES;
187            pthread_cond_signal(condition);
188            pthread_mutex_unlock(mutex);
189        });
190        while (!done) {
191            pthread_cond_wait(condition, mutex);
192            if (onMainThread && MainThreadWorkQ.head) {
193                pthread_mutex_unlock(mutex);
194                batchFinalizeOnMainThread();
195                pthread_mutex_lock(mutex);
196            }
197        }
198        pthread_mutex_unlock(mutex);
199    } else {
200        auto_zone_collect(gc_zone, amode);
201    }
202}
203
204
205// USED BY CF & ONE OTHER
206BOOL objc_isAuto(id object)
207{
208    return UseGC && auto_zone_is_valid_pointer(gc_zone, object) != 0;
209}
210
211
212BOOL objc_collectingEnabled(void)
213{
214    return UseGC;
215}
216
217BOOL objc_collecting_enabled(void) // Old naming
218{
219    return UseGC;
220}
221
222malloc_zone_t *objc_collectableZone(void) {
223    return gc_zone;
224}
225
226BOOL objc_dumpHeap(char *filenamebuffer, unsigned long length) {
227    static int counter = 0;
228    ++counter;
229    char buffer[1024];
230    sprintf(buffer, OBJC_HEAP_DUMP_FILENAME_FORMAT, getpid(), counter);
231    if (!_objc_dumpHeap(gc_zone, buffer)) return NO;
232    if (filenamebuffer) {
233        unsigned long blen = strlen(buffer);
234        if (blen < length)
235            strncpy(filenamebuffer, buffer, blen+1);
236        else if (length > 0)
237            filenamebuffer[0] = 0;  // give some answer
238    }
239    return YES;
240}
241
242
243/***********************************************************************
244* Memory management.
245* Called by CF and Foundation.
246**********************************************************************/
247
248// Allocate an object in the GC zone, with the given number of extra bytes.
249id objc_allocate_object(Class cls, int extra)
250{
251    return class_createInstance(cls, extra);
252}
253
254
255/***********************************************************************
256* Write barrier implementations, optimized for when GC is known to be on
257* Called by the write barrier exports only.
258* These implementations assume GC is on. The exported function must
259* either perform the check itself or be conditionally stomped at
260* startup time.
261**********************************************************************/
262
263id objc_assign_strongCast_gc(id value, id *slot) {
264    if (!auto_zone_set_write_barrier(gc_zone, (void*)slot, value)) {    // stores & returns true if slot points into GC allocated memory
265        auto_zone_root_write_barrier(gc_zone, slot, value);     // always stores
266    }
267    return value;
268}
269
270id objc_assign_global_gc(id value, id *slot) {
271    // use explicit root registration.
272    if (value && auto_zone_is_valid_pointer(gc_zone, value)) {
273        if (auto_zone_is_finalized(gc_zone, value)) {
274            _objc_inform("GC: storing an already collected object %p into global memory at %p, break on objc_assign_global_error to debug\n", (void*)value, slot);
275            objc_assign_global_error(value, slot);
276        }
277        auto_zone_add_root(gc_zone, slot, value);
278    }
279    else
280        *slot = value;
281
282    return value;
283}
284
285id objc_assign_threadlocal_gc(id value, id *slot)
286{
287    if (value && auto_zone_is_valid_pointer(gc_zone, value)) {
288        auto_zone_add_root(gc_zone, slot, value);
289    }
290    else {
291        *slot = value;
292    }
293
294    return value;
295}
296
297id objc_assign_ivar_gc(id value, id base, ptrdiff_t offset)
298{
299    id *slot = (id*) ((char *)base + offset);
300
301    if (value) {
302        if (!auto_zone_set_write_barrier(gc_zone, (char *)base + offset, value)) {
303            _objc_inform("GC: %p + %tu isn't in the auto_zone, break on objc_assign_ivar_error to debug.\n", (void*)base, offset);
304            objc_assign_ivar_error(base, offset);
305        }
306    }
307    else
308        *slot = value;
309
310    return value;
311}
312
313id objc_assign_strongCast_non_gc(id value, id *slot) {
314    return (*slot = value);
315}
316
317id objc_assign_global_non_gc(id value, id *slot) {
318    return (*slot = value);
319}
320
321id objc_assign_threadlocal_non_gc(id value, id *slot) {
322    return (*slot = value);
323}
324
325id objc_assign_ivar_non_gc(id value, id base, ptrdiff_t offset) {
326    id *slot = (id*) ((char *)base + offset);
327    return (*slot = value);
328}
329
330
331/***********************************************************************
332* Non-trivial write barriers
333**********************************************************************/
334
335void *objc_memmove_collectable(void *dst, const void *src, size_t size)
336{
337    if (UseGC) {
338        return auto_zone_write_barrier_memmove(gc_zone, dst, src, size);
339    } else {
340        return memmove(dst, src, size);
341    }
342}
343
344BOOL objc_atomicCompareAndSwapPtr(id predicate, id replacement, volatile id *objectLocation) {
345    const BOOL issueMemoryBarrier = NO;
346    if (UseGC)
347        return auto_zone_atomicCompareAndSwapPtr(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, issueMemoryBarrier);
348    else
349        return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
350}
351
352BOOL objc_atomicCompareAndSwapPtrBarrier(id predicate, id replacement, volatile id *objectLocation) {
353    const BOOL issueMemoryBarrier = YES;
354    if (UseGC)
355        return auto_zone_atomicCompareAndSwapPtr(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, issueMemoryBarrier);
356    else
357        return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
358}
359
360BOOL objc_atomicCompareAndSwapGlobal(id predicate, id replacement, volatile id *objectLocation) {
361    const BOOL isGlobal = YES;
362    const BOOL issueMemoryBarrier = NO;
363    if (UseGC)
364        return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
365    else
366        return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
367}
368
369BOOL objc_atomicCompareAndSwapGlobalBarrier(id predicate, id replacement, volatile id *objectLocation) {
370    const BOOL isGlobal = YES;
371    const BOOL issueMemoryBarrier = YES;
372    if (UseGC)
373        return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
374    else
375        return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
376}
377
378BOOL objc_atomicCompareAndSwapInstanceVariable(id predicate, id replacement, volatile id *objectLocation) {
379    const BOOL isGlobal = NO;
380    const BOOL issueMemoryBarrier = NO;
381    if (UseGC)
382        return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
383    else
384        return OSAtomicCompareAndSwapPtr((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
385}
386
387BOOL objc_atomicCompareAndSwapInstanceVariableBarrier(id predicate, id replacement, volatile id *objectLocation) {
388    const BOOL isGlobal = NO;
389    const BOOL issueMemoryBarrier = YES;
390    if (UseGC)
391        return auto_zone_atomicCompareAndSwap(gc_zone, (void *)predicate, (void *)replacement, (void * volatile *)objectLocation, isGlobal, issueMemoryBarrier);
392    else
393        return OSAtomicCompareAndSwapPtrBarrier((void *)predicate, (void *)replacement, (void * volatile *)objectLocation);
394}
395
396
397/***********************************************************************
398* Weak ivar support
399**********************************************************************/
400
401id objc_read_weak_gc(id *location) {
402    id result = *location;
403    if (result) {
404        result = (id)auto_read_weak_reference(gc_zone, (void **)location);
405    }
406    return result;
407}
408
409id objc_read_weak_non_gc(id *location) {
410    return *location;
411}
412
413id objc_assign_weak_gc(id value, id *location) {
414    auto_assign_weak_reference(gc_zone, value, (const void **)location, nil);
415    return value;
416}
417
418id objc_assign_weak_non_gc(id value, id *location) {
419    return (*location = value);
420}
421
422
423void gc_fixup_weakreferences(id newObject, id oldObject) {
424    // fix up weak references if any.
425    const unsigned char *weakLayout = (const unsigned char *)class_getWeakIvarLayout(newObject->ISA());
426    if (weakLayout) {
427        void **newPtr = (void **)newObject, **oldPtr = (void **)oldObject;
428        unsigned char byte;
429        while ((byte = *weakLayout++)) {
430            unsigned skips = (byte >> 4);
431            unsigned weaks = (byte & 0x0F);
432            newPtr += skips, oldPtr += skips;
433            while (weaks--) {
434                *newPtr = nil;
435                auto_assign_weak_reference(gc_zone, auto_read_weak_reference(gc_zone, oldPtr), (const void **)newPtr, nil);
436                ++newPtr, ++oldPtr;
437            }
438        }
439    }
440}
441
442
443/***********************************************************************
444* dyld resolver functions for basic GC write barriers
445* dyld calls the resolver function to bind the symbol.
446* We return the GC or non-GC variant as appropriate.
447**********************************************************************/
448
449#define GC_RESOLVER(name)                                       \
450    OBJC_EXPORT void *name##_resolver(void) __asm__("_" #name); \
451    void *name##_resolver(void)                                 \
452    {                                                           \
453        __asm__(".symbol_resolver _" #name);                    \
454        if (UseGC) return (void*)name##_gc;                     \
455        else return (void*)name##_non_gc;                       \
456    }
457
458GC_RESOLVER(objc_assign_ivar)
459GC_RESOLVER(objc_assign_strongCast)
460GC_RESOLVER(objc_assign_global)
461GC_RESOLVER(objc_assign_threadlocal)
462GC_RESOLVER(objc_read_weak)
463GC_RESOLVER(objc_assign_weak)
464GC_RESOLVER(objc_getProperty)
465GC_RESOLVER(objc_setProperty)
466GC_RESOLVER(objc_getAssociatedObject)
467GC_RESOLVER(objc_setAssociatedObject)
468GC_RESOLVER(_object_addExternalReference)
469GC_RESOLVER(_object_readExternalReference)
470GC_RESOLVER(_object_removeExternalReference)
471
472
473/***********************************************************************
474* Testing tools
475* Used to isolate resurrection of garbage objects during finalization.
476**********************************************************************/
477BOOL objc_is_finalized(void *ptr) {
478    if (ptr != nil && UseGC) {
479        return auto_zone_is_finalized(gc_zone, ptr);
480    }
481    return NO;
482}
483
484
485/***********************************************************************
486* Stack clearing.
487* Used by top-level thread loops to reduce false pointers from the stack.
488**********************************************************************/
489void objc_clear_stack(unsigned long options) {
490    if (!UseGC) return;
491    auto_zone_clear_stack(gc_zone, 0);
492}
493
494
495/***********************************************************************
496* Finalization support
497**********************************************************************/
498
499// Finalizer crash debugging
500static void *finalizing_object;
501
502// finalize a single object without fuss
503// When there are no main-thread-only classes this is used directly
504// Otherwise, it is used indirectly by smarter code that knows main-thread-affinity requirements
505static void finalizeOneObject(void *obj, void *ignored) {
506    id object = (id)obj;
507    finalizing_object = obj;
508
509    Class cls = object->ISA();
510    CRSetCrashLogMessage2(class_getName(cls));
511
512    /// call -finalize method.
513    ((void(*)(id, SEL))objc_msgSend)(object, @selector(finalize));
514
515    // Call C++ destructors.
516    // This would be objc_destructInstance() but for performance.
517    if (cls->hasCxxDtor()) {
518        object_cxxDestruct(object);
519    }
520
521    finalizing_object = nil;
522    CRSetCrashLogMessage2(nil);
523}
524
525// finalize object only if it is a main-thread-only object.
526// Called only from the main thread.
527static void finalizeOneMainThreadOnlyObject(void *obj, void *arg) {
528    id object = (id)obj;
529    Class cls = object->ISA();
530    if (cls == nil) {
531        _objc_fatal("object with nil ISA passed to finalizeOneMainThreadOnlyObject:  %p\n", obj);
532    }
533    if (cls->shouldFinalizeOnMainThread()) {
534        finalizeOneObject(obj, nil);
535    }
536}
537
538// finalize one object only if it is not a main-thread-only object
539// called from any other thread than the main thread
540// Important: if a main-thread-only object is passed, return that fact in the needsMain argument
541static void finalizeOneAnywhereObject(void *obj, void *needsMain) {
542    id object = (id)obj;
543    Class cls = object->ISA();
544    bool *needsMainThreadWork = (bool *)needsMain;
545    if (cls == nil) {
546        _objc_fatal("object with nil ISA passed to finalizeOneAnywhereObject:  %p\n", obj);
547    }
548    if (!cls->shouldFinalizeOnMainThread()) {
549        finalizeOneObject(obj, nil);
550    }
551    else {
552        *needsMainThreadWork = true;
553    }
554}
555
556
557// Utility workhorse.
558// Set up the expensive @try block and ask the collector to hand the next object to
559// our finalizeAnObject function.
560// Track and return a boolean that records whether or not any main thread work is necessary.
561// (When we know that there are no main thread only objects then the boolean isn't even computed)
562static bool batchFinalize(auto_zone_t *zone,
563                          auto_zone_foreach_object_t foreach,
564                          auto_zone_cursor_t cursor,
565                          size_t cursor_size,
566                          void (*finalizeAnObject)(void *, void*))
567{
568#if !defined(NDEBUG)  &&  !__OBJC2__
569    // debug: don't call try/catch before exception handlers are installed
570    objc_exception_functions_t table = {};
571    objc_exception_get_functions(&table);
572    assert(table.throw_exc);
573#endif
574
575    bool needsMainThreadWork = false;
576    for (;;) {
577        @try {
578            foreach(cursor, finalizeAnObject, &needsMainThreadWork);
579            // non-exceptional return means finalization is complete.
580            break;
581        }
582        @catch (id exception) {
583            // whoops, note exception, then restart at cursor's position
584            _objc_inform("GC: -finalize resulted in an exception (%p) being thrown, break on objc_exception_during_finalize_error to debug\n\t%s", exception, (const char*)[[exception description] UTF8String]);
585            objc_exception_during_finalize_error();
586        }
587        @catch (...) {
588            // whoops, note exception, then restart at cursor's position
589            _objc_inform("GC: -finalize resulted in an exception being thrown, break on objc_exception_during_finalize_error to debug");
590            objc_exception_during_finalize_error();
591        }
592    }
593    return needsMainThreadWork;
594}
595
596// Called on main thread-only.
597// Pick up work from global queue.
598// called parasitically by anyone requesting a collection
599// called explicitly when there is known to be main thread only finalization work
600// In both cases we are on the main thread
601// Guard against recursion by something called from a finalizer
602static void batchFinalizeOnMainThread() {
603    pthread_mutex_lock(&MainThreadWorkQ.mutex);
604    if (!MainThreadWorkQ.head || MainThreadWorkQ.head->started) {
605        // No work or we're already here
606        pthread_mutex_unlock(&MainThreadWorkQ.mutex);
607        return;
608    }
609    while (MainThreadWorkQ.head) {
610        BatchFinalizeBlock_t *bfb = MainThreadWorkQ.head;
611        bfb->started = YES;
612        pthread_mutex_unlock(&MainThreadWorkQ.mutex);
613
614        batchFinalize(gc_zone, bfb->foreach, bfb->cursor, bfb->cursor_size, finalizeOneMainThreadOnlyObject);
615        // signal the collector thread(s) that finalization has finished.
616        pthread_mutex_lock(&MainThreadWorkQ.mutex);
617        bfb->finished = YES;
618        pthread_cond_broadcast(&MainThreadWorkQ.condition);
619        MainThreadWorkQ.head = bfb->next;
620    }
621    MainThreadWorkQ.tail = nil;
622    pthread_mutex_unlock(&MainThreadWorkQ.mutex);
623}
624
625
626// Knowing that we possibly have main thread only work to do, first process everything
627// that is not main-thread-only.  If we discover main thread only work, queue a work block
628// to the main thread that will do just the main thread only work.  Wait for it.
629// Called from a non main thread.
630static void batchFinalizeOnTwoThreads(auto_zone_t *zone,
631                                         auto_zone_foreach_object_t foreach,
632                                         auto_zone_cursor_t cursor,
633                                         size_t cursor_size)
634{
635    // First, lets get rid of everything we can on this thread, then ask main thread to help if needed
636    char cursor_copy[cursor_size];
637    memcpy(cursor_copy, cursor, cursor_size);
638    bool needsMainThreadFinalization = batchFinalize(zone, foreach, (auto_zone_cursor_t)cursor_copy, cursor_size, finalizeOneAnywhereObject);
639
640    if (! needsMainThreadFinalization)
641        return;     // no help needed
642
643    // set up the control block.  Either our ping of main thread with _callOnMainThread will get to it, or
644    // an objc_collect(if_needed) will get to it.  Either way, this block will be processed on the main thread.
645    BatchFinalizeBlock_t bfb;
646    bfb.foreach = foreach;
647    bfb.cursor = cursor;
648    bfb.cursor_size = cursor_size;
649    bfb.started = NO;
650    bfb.finished = NO;
651    bfb.next = nil;
652    pthread_mutex_lock(&MainThreadWorkQ.mutex);
653    if (MainThreadWorkQ.tail) {
654
655        // link to end so that ordering of finalization is preserved.
656        MainThreadWorkQ.tail->next = &bfb;
657        MainThreadWorkQ.tail = &bfb;
658    }
659    else {
660        MainThreadWorkQ.head = &bfb;
661        MainThreadWorkQ.tail = &bfb;
662    }
663    pthread_mutex_unlock(&MainThreadWorkQ.mutex);
664
665    //printf("----->asking main thread to finalize\n");
666    dispatch_async(dispatch_get_main_queue(), ^{ batchFinalizeOnMainThread(); });
667
668    // wait for the main thread to finish finalizing instances of classes marked CLS_FINALIZE_ON_MAIN_THREAD.
669    pthread_mutex_lock(&MainThreadWorkQ.mutex);
670    while (!bfb.finished) {
671        // the main thread might be blocked waiting for a synchronous collection to complete, so wake it here
672        pthread_cond_signal(&MainThreadWorkQ.condition);
673        pthread_cond_wait(&MainThreadWorkQ.condition, &MainThreadWorkQ.mutex);
674    }
675    pthread_mutex_unlock(&MainThreadWorkQ.mutex);
676    //printf("<------ main thread finalize done\n");
677
678}
679
680
681
682// collector calls this with garbage ready
683// thread collectors, too, so this needs to be thread-safe
684static void BatchInvalidate(auto_zone_t *zone,
685                                         auto_zone_foreach_object_t foreach,
686                                         auto_zone_cursor_t cursor,
687                                         size_t cursor_size)
688{
689    if (pthread_main_np() || !WantsMainThreadFinalization) {
690        // Collect all objects.  We're either pre-multithreaded on main thread or we're on the collector thread
691        // but no main-thread-only objects have been allocated.
692        batchFinalize(zone, foreach, cursor, cursor_size, finalizeOneObject);
693    }
694    else {
695        // We're on the dedicated thread.  Collect some on main thread, the rest here.
696        batchFinalizeOnTwoThreads(zone, foreach, cursor, cursor_size);
697    }
698
699}
700
701
702/*
703 * Zombie support
704 * Collector calls into this system when it finds resurrected objects.
705 * This keeps them pitifully alive and leaked, even if they reference garbage.
706 */
707
708// idea:  keep a side table mapping resurrected object pointers to their original Class, so we don't
709// need to smash anything. alternatively, could use associative references to track against a secondary
710// object with information about the resurrection, such as a stack crawl, etc.
711
712static Class _NSResurrectedObjectClass;
713static NXMapTable *_NSResurrectedObjectMap = nil;
714static pthread_mutex_t _NSResurrectedObjectLock = PTHREAD_MUTEX_INITIALIZER;
715
716static Class resurrectedObjectOriginalClass(id object) {
717    Class originalClass;
718    pthread_mutex_lock(&_NSResurrectedObjectLock);
719    originalClass = (Class) NXMapGet(_NSResurrectedObjectMap, object);
720    pthread_mutex_unlock(&_NSResurrectedObjectLock);
721    return originalClass;
722}
723
724static id _NSResurrectedObject_classMethod(id self, SEL selector) { return self; }
725
726static id _NSResurrectedObject_instanceMethod(id self, SEL name) {
727    _objc_inform("**resurrected** object %p of class %s being sent message '%s'\n", (void*)self, class_getName(resurrectedObjectOriginalClass(self)), sel_getName(name));
728    return self;
729}
730
731static void _NSResurrectedObject_finalize(id self, SEL _cmd) {
732    Class originalClass;
733    pthread_mutex_lock(&_NSResurrectedObjectLock);
734    originalClass = (Class) NXMapRemove(_NSResurrectedObjectMap, self);
735    pthread_mutex_unlock(&_NSResurrectedObjectLock);
736    if (originalClass) _objc_inform("**resurrected** object %p of class %s being finalized\n", (void*)self, class_getName(originalClass));
737    _objc_rootFinalize(self);
738}
739
740static BOOL _NSResurrectedObject_resolveInstanceMethod(id self, SEL _cmd, SEL name) {
741    class_addMethod((Class)self, name, (IMP)_NSResurrectedObject_instanceMethod, "@@:");
742    return YES;
743}
744
745static BOOL _NSResurrectedObject_resolveClassMethod(id self, SEL _cmd, SEL name) {
746    class_addMethod(self->ISA(), name, (IMP)_NSResurrectedObject_classMethod, "@@:");
747    return YES;
748}
749
750static void _NSResurrectedObject_initialize() {
751    _NSResurrectedObjectMap = NXCreateMapTable(NXPtrValueMapPrototype, 128);
752    _NSResurrectedObjectClass = objc_allocateClassPair(objc_getClass("NSObject"), "_NSResurrectedObject", 0);
753    class_addMethod(_NSResurrectedObjectClass, @selector(finalize), (IMP)_NSResurrectedObject_finalize, "v@:");
754    Class metaClass = _NSResurrectedObjectClass->ISA();
755    class_addMethod(metaClass, @selector(resolveInstanceMethod:), (IMP)_NSResurrectedObject_resolveInstanceMethod, "c@::");
756    class_addMethod(metaClass, @selector(resolveClassMethod:), (IMP)_NSResurrectedObject_resolveClassMethod, "c@::");
757    objc_registerClassPair(_NSResurrectedObjectClass);
758}
759
760static void resurrectZombie(auto_zone_t *zone, void *ptr) {
761    id object = (id) ptr;
762    Class cls = object->ISA();
763    if (cls != _NSResurrectedObjectClass) {
764        // remember the original class for this instance.
765        pthread_mutex_lock(&_NSResurrectedObjectLock);
766        NXMapInsert(_NSResurrectedObjectMap, ptr, cls);
767        pthread_mutex_unlock(&_NSResurrectedObjectLock);
768        object_setClass(object, _NSResurrectedObjectClass);
769    }
770}
771
772/***********************************************************************
773* Pretty printing support
774* For development purposes.
775**********************************************************************/
776
777
778static char *name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset, int withRetainCount);
779
780static char* objc_name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset)
781{
782    return name_for_address(zone, base, offset, false);
783}
784
785static const char* objc_name_for_object(auto_zone_t *zone, void *object) {
786    Class cls = *(Class *)object;
787    if (!objc_isRegisteredClass(cls)) return "";
788    return class_getName(cls);
789}
790
791/***********************************************************************
792* Collection support
793**********************************************************************/
794
795static BOOL objc_isRegisteredClass(Class candidate);
796
797static const unsigned char *objc_layout_for_address(auto_zone_t *zone, void *address) {
798    id object = (id)address;
799    volatile void *clsptr = (void*)object->ISA();
800    Class cls = (Class)clsptr;
801    return objc_isRegisteredClass(cls) ? _object_getIvarLayout(cls, object) : nil;
802}
803
804static const unsigned char *objc_weak_layout_for_address(auto_zone_t *zone, void *address) {
805    id object = (id)address;
806    volatile void *clsptr = (void*)object->ISA();
807    Class cls = (Class)clsptr;
808    return objc_isRegisteredClass(cls) ? class_getWeakIvarLayout(cls) : nil;
809}
810
811void gc_register_datasegment(uintptr_t base, size_t size) {
812    auto_zone_register_datasegment(gc_zone, (void*)base, size);
813}
814
815void gc_unregister_datasegment(uintptr_t base, size_t size) {
816    auto_zone_unregister_datasegment(gc_zone, (void*)base, size);
817}
818
819
820/***********************************************************************
821* Initialization
822**********************************************************************/
823
824static void objc_will_grow(auto_zone_t *zone, auto_heap_growth_info_t info) {
825    if (auto_zone_is_collecting(gc_zone)) {
826        ;
827    }
828    else  {
829        auto_zone_collect(gc_zone, AUTO_ZONE_COLLECT_COALESCE|AUTO_ZONE_COLLECT_RATIO_COLLECTION);
830    }
831}
832
833
834static auto_zone_t *gc_zone_init(void)
835{
836    auto_zone_t *result;
837    static int didOnce = 0;
838    if (!didOnce) {
839        didOnce = 1;
840
841        // initialize the batch finalization queue
842        MainThreadWorkQ.head = nil;
843        MainThreadWorkQ.tail = nil;
844        pthread_mutex_init(&MainThreadWorkQ.mutex, nil);
845        pthread_cond_init(&MainThreadWorkQ.condition, nil);
846    }
847
848    result = auto_zone_create("auto_zone");
849
850    auto_zone_disable_compaction(result);
851
852    auto_collection_control_t *control = auto_collection_parameters(result);
853
854    // set up the magic control parameters
855    control->batch_invalidate = BatchInvalidate;
856    control->will_grow = objc_will_grow;
857    control->resurrect = resurrectZombie;
858    control->layout_for_address = objc_layout_for_address;
859    control->weak_layout_for_address = objc_weak_layout_for_address;
860    control->name_for_address = objc_name_for_address;
861
862    if (control->version >= sizeof(auto_collection_control_t)) {
863        control->name_for_object = objc_name_for_object;
864    }
865
866    return result;
867}
868
869
870/* should be defined in /usr/local/include/libdispatch_private.h. */
871extern void (*dispatch_begin_thread_4GC)(void);
872extern void (*dispatch_end_thread_4GC)(void);
873
874static void objc_reapThreadLocalBlocks()
875{
876    if (UseGC) auto_zone_reap_all_local_blocks(gc_zone);
877}
878
879void objc_registerThreadWithCollector()
880{
881    if (UseGC) auto_zone_register_thread(gc_zone);
882}
883
884void objc_unregisterThreadWithCollector()
885{
886    if (UseGC) auto_zone_unregister_thread(gc_zone);
887}
888
889void objc_assertRegisteredThreadWithCollector()
890{
891    if (UseGC) auto_zone_assert_thread_registered(gc_zone);
892}
893
894// Always called by _objcInit, even if GC is off.
895void gc_init(BOOL wantsGC)
896{
897    assert(UseGC == -1);
898    UseGC = wantsGC;
899
900    if (PrintGC) {
901        _objc_inform("GC: is %s", wantsGC ? "ON" : "OFF");
902    }
903
904    if (UseGC) {
905        // Set up the GC zone
906        gc_zone = gc_zone_init();
907
908        // tell libdispatch to register its threads with the GC.
909        dispatch_begin_thread_4GC = objc_registerThreadWithCollector;
910        dispatch_end_thread_4GC = objc_reapThreadLocalBlocks;
911
912        // set up the registered classes list
913        registeredClassTableInit();
914
915        // tell Blocks to use collectable memory.  CF will cook up the classes separately.
916        gc_block_init();
917
918        // Add GC state to crash log reports
919        _objc_inform_on_crash("garbage collection is ON");
920    }
921}
922
923
924// Called by NSObject +load to perform late GC setup
925// This work must wait until after all of libSystem initializes.
926void gc_init2(void)
927{
928    assert(UseGC);
929
930    // create the _NSResurrectedObject class used to track resurrections.
931    _NSResurrectedObject_initialize();
932
933    // tell libauto to set up its dispatch queues
934    auto_collect_multithreaded(gc_zone);
935}
936
937// Called by Foundation.
938// This function used to initialize NSObject stuff, but now does nothing.
939malloc_zone_t *objc_collect_init(int (*callback)(void) __unused)
940{
941    return (malloc_zone_t *)gc_zone;
942}
943
944/*
945 * Support routines for the Block implementation
946 */
947
948
949// The Block runtime now needs to sometimes allocate a Block that is an Object - namely
950// when it neesd to have a finalizer which, for now, is only if there are C++ destructors
951// in the helper function.  Hence the isObject parameter.
952// Under GC a -copy message should allocate a refcount 0 block, ergo the isOne parameter.
953static void *block_gc_alloc5(const unsigned long size, const bool isOne, const bool isObject) {
954    auto_memory_type_t type = isObject ? (AUTO_OBJECT|AUTO_MEMORY_SCANNED) : AUTO_MEMORY_SCANNED;
955    return auto_zone_allocate_object(gc_zone, size, type, isOne, false);
956}
957
958// The Blocks runtime keeps track of everything above 1 and so it only calls
959// up to the collector to tell it about the 0->1 transition and then the 1->0 transition
960static void block_gc_setHasRefcount(const void *block, const bool hasRefcount) {
961    if (hasRefcount)
962        auto_zone_retain(gc_zone, (void *)block);
963    else
964        auto_zone_release(gc_zone, (void *)block);
965}
966
967static void block_gc_memmove(void *dst, void *src, unsigned long size) {
968    auto_zone_write_barrier_memmove(gc_zone, dst, src, (size_t)size);
969}
970
971static void gc_block_init(void) {
972    _Block_use_GC(
973                  block_gc_alloc5,
974                  block_gc_setHasRefcount,
975                  (void (*)(void *, void **))objc_assign_strongCast_gc,
976                  (void (*)(const void *, void *))objc_assign_weak,
977                  block_gc_memmove
978    );
979}
980
981
982/***********************************************************************
983* Track classes.
984* In addition to the global class hashtable (set) indexed by name, we
985* also keep one based purely by pointer when running under Garbage Collection.
986* This allows the background collector to race against objects recycled from TLC.
987* Specifically, the background collector can read the admin byte and see that
988* a thread local object is an object, get scheduled out, and the TLC recovers it,
989* linking it into the cache, then the background collector reads the isa field and
990* finds linkage info.  By qualifying all isa fields read we avoid this.
991**********************************************************************/
992
993// This is a self-contained hash table of all classes.  The first two elements contain the (size-1) and count.
994static volatile Class *AllClasses = nil;
995
996#define SHIFT 3
997#define INITIALSIZE 512
998#define REMOVED ~0ul
999
1000// Allocate the side table.
1001static void registeredClassTableInit() {
1002    assert(UseGC);
1003    // allocate a collectable (refcount 0) zeroed hunk of unscanned memory
1004    uintptr_t *table = (uintptr_t *)auto_zone_allocate_object(gc_zone, INITIALSIZE*sizeof(void *), AUTO_MEMORY_UNSCANNED, true, true);
1005    // set initial capacity (as mask)
1006    table[0] = INITIALSIZE - 1;
1007    // set initial count
1008    table[1] = 0;
1009    AllClasses = (Class *)table;
1010}
1011
1012// Verify that a particular pointer is to a class.
1013// Safe from any thread anytime
1014static BOOL objc_isRegisteredClass(Class candidate) {
1015    assert(UseGC);
1016    // nil is never a valid ISA.
1017    if (candidate == nil) return NO;
1018    // We don't care about a race with another thread adding a class to which we randomly might have a pointer
1019    // Get local copy of classes so that we're immune from updates.
1020    // We keep the size of the list as the first element so there is no race as the list & size get updated.
1021    uintptr_t *allClasses = (uintptr_t *)AllClasses;
1022    // Slot 0 is always the size of the list in log 2 masked terms (e.g. size - 1) where size is always power of 2
1023    // Slot 1 is count
1024    uintptr_t slot = (((uintptr_t)candidate) >> SHIFT) & allClasses[0];
1025    // avoid slot 0 and 1
1026    if (slot < 2) slot = 2;
1027    for(;;) {
1028        long int slotValue = allClasses[slot];
1029        if (slotValue == (long int)candidate) {
1030            return YES;
1031        }
1032        if (slotValue == 0) {
1033            return NO;
1034        }
1035        ++slot;
1036        if (slot > allClasses[0])
1037            slot = 2;   // skip size, count
1038    }
1039}
1040
1041// Utility used when growing
1042// Assumes lock held
1043static void addClassHelper(uintptr_t *table, uintptr_t candidate) {
1044    uintptr_t slot = (((long int)candidate) >> SHIFT) & table[0];
1045    if (slot < 2) slot = 2;
1046    for(;;) {
1047        uintptr_t slotValue = table[slot];
1048        if (slotValue == 0) {
1049            table[slot] = candidate;
1050            ++table[1];
1051            return;
1052        }
1053        ++slot;
1054        if (slot > table[0])
1055            slot = 2;   // skip size, count
1056    }
1057}
1058
1059// lock held by callers
1060void objc_addRegisteredClass(Class candidate) {
1061    if (!UseGC) return;
1062    uintptr_t *table = (uintptr_t *)AllClasses;
1063    // Slot 0 is always the size of the list in log 2 masked terms (e.g. size - 1) where size is always power of 2
1064    // Slot 1 is count - always non-zero
1065    uintptr_t slot = (((long int)candidate) >> SHIFT) & table[0];
1066    if (slot < 2) slot = 2;
1067    for(;;) {
1068        uintptr_t slotValue = table[slot];
1069        assert(slotValue != (uintptr_t)candidate);
1070        if (slotValue == REMOVED) {
1071            table[slot] = (long)candidate;
1072            return;
1073        }
1074        else if (slotValue == 0) {
1075            table[slot] = (long)candidate;
1076            if (2*++table[1] > table[0]) {  // add to count; check if we cross 50% utilization
1077                // grow
1078                uintptr_t oldSize = table[0]+1;
1079                uintptr_t *newTable = (uintptr_t *)auto_zone_allocate_object(gc_zone, oldSize*2*sizeof(void *), AUTO_MEMORY_UNSCANNED, true, true);
1080                uintptr_t i;
1081                newTable[0] = 2*oldSize - 1;
1082                newTable[1] = 0;
1083                for (i = 2; i < oldSize; ++i) {
1084                    if (table[i] && table[i] != REMOVED)
1085                        addClassHelper(newTable, table[i]);
1086                }
1087                AllClasses = (Class *)newTable;
1088                // let the old table be collected when other threads are no longer reading it.
1089                auto_zone_release(gc_zone, (void *)table);
1090            }
1091            return;
1092        }
1093        ++slot;
1094        if (slot > table[0])
1095            slot = 2;   // skip size, count
1096    }
1097}
1098
1099// lock held by callers
1100void objc_removeRegisteredClass(Class candidate) {
1101    if (!UseGC) return;
1102    uintptr_t *table = (uintptr_t *)AllClasses;
1103    // Slot 0 is always the size of the list in log 2 masked terms (e.g. size - 1) where size is always power of 2
1104    // Slot 1 is count - always non-zero
1105    uintptr_t slot = (((uintptr_t)candidate) >> SHIFT) & table[0];
1106    if (slot < 2) slot = 2;
1107    for(;;) {
1108        uintptr_t slotValue = table[slot];
1109        if (slotValue == (uintptr_t)candidate) {
1110            table[slot] = REMOVED;  // if next slot == 0 we could set to 0 here and decr count
1111            return;
1112        }
1113        assert(slotValue != 0);
1114        ++slot;
1115        if (slot > table[0])
1116            slot = 2;   // skip size, count
1117    }
1118}
1119
1120
1121/***********************************************************************
1122* Debugging - support for smart printouts when errors occur
1123**********************************************************************/
1124
1125
1126static malloc_zone_t *objc_debug_zone(void)
1127{
1128    static malloc_zone_t *z = nil;
1129    if (!z) {
1130        z = malloc_create_zone(PAGE_MAX_SIZE, 0);
1131        malloc_set_zone_name(z, "objc-auto debug");
1132    }
1133    return z;
1134}
1135
1136static char *_malloc_append_unsigned(uintptr_t value, unsigned base, char *head) {
1137    if (!value) {
1138        head[0] = '0';
1139    } else {
1140        if (value >= base) head = _malloc_append_unsigned(value / base, base, head);
1141        value = value % base;
1142        head[0] = (value < 10) ? '0' + value : 'a' + value - 10;
1143    }
1144    return head+1;
1145}
1146
1147static void strlcati(char *str, uintptr_t value, size_t bufSize)
1148{
1149    if ( (bufSize - strlen(str)) < 30)
1150        return;
1151    str = _malloc_append_unsigned(value, 10, str + strlen(str));
1152    str[0] = '\0';
1153}
1154
1155
1156static Ivar ivar_for_offset(Class cls, vm_address_t offset)
1157{
1158    unsigned i;
1159    vm_address_t ivar_offset;
1160    Ivar super_ivar, result;
1161    Ivar *ivars;
1162    unsigned int ivar_count;
1163
1164    if (!cls) return nil;
1165
1166    // scan base classes FIRST
1167    super_ivar = ivar_for_offset(cls->superclass, offset);
1168    // result is best-effort; our ivars may be closer
1169
1170    ivars = class_copyIvarList(cls, &ivar_count);
1171    if (ivars && ivar_count) {
1172        // Try our first ivar. If it's too big, use super's best ivar.
1173        // (lose 64-bit precision)
1174        ivar_offset = ivar_getOffset(ivars[0]);
1175        if (ivar_offset > offset) result = super_ivar;
1176        else if (ivar_offset == offset) result = ivars[0];
1177        else result = nil;
1178
1179        // Try our other ivars. If any is too big, use the previous.
1180        for (i = 1; result == nil && i < ivar_count; i++) {
1181            ivar_offset = ivar_getOffset(ivars[i]);
1182            if (ivar_offset == offset) {
1183                result = ivars[i];
1184            } else if (ivar_offset > offset) {
1185                result = ivars[i - 1];
1186            }
1187        }
1188
1189        // Found nothing. Return our last ivar.
1190        if (result == nil)
1191            result = ivars[ivar_count - 1];
1192
1193        free(ivars);
1194    } else {
1195        result = super_ivar;
1196    }
1197
1198    return result;
1199}
1200
1201static void append_ivar_at_offset(char *buf, Class cls, vm_address_t offset, size_t bufSize)
1202{
1203    Ivar ivar = nil;
1204
1205    if (offset == 0) return;  // don't bother with isa
1206    if (offset >= class_getInstanceSize(cls)) {
1207        strlcat(buf, ".<extra>+", bufSize);
1208        strlcati(buf, offset, bufSize);
1209        return;
1210    }
1211
1212    ivar = ivar_for_offset(cls, offset);
1213    if (!ivar) {
1214        strlcat(buf, ".<?>", bufSize);
1215        return;
1216    }
1217
1218    // fixme doesn't handle structs etc.
1219
1220    strlcat(buf, ".", bufSize);
1221    const char *ivar_name = ivar_getName(ivar);
1222    if (ivar_name) strlcat(buf, ivar_name, bufSize);
1223    else strlcat(buf, "<anonymous ivar>", bufSize);
1224
1225    offset -= ivar_getOffset(ivar);
1226    if (offset > 0) {
1227        strlcat(buf, "+", bufSize);
1228        strlcati(buf, offset, bufSize);
1229    }
1230}
1231
1232
1233static const char *cf_class_for_object(void *cfobj)
1234{
1235    // ick - we don't link against CF anymore
1236
1237    struct fake_cfclass {
1238        size_t version;
1239        const char *className;
1240        // don't care about the rest
1241    };
1242
1243    const char *result;
1244    void *dlh;
1245    size_t (*CFGetTypeID)(void *);
1246    fake_cfclass * (*_CFRuntimeGetClassWithTypeID)(size_t);
1247
1248    result = "anonymous_NSCFType";
1249
1250    dlh = dlopen("/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation", RTLD_LAZY | RTLD_NOLOAD | RTLD_FIRST);
1251    if (!dlh) return result;
1252
1253    CFGetTypeID = (size_t(*)(void*)) dlsym(dlh, "CFGetTypeID");
1254    _CFRuntimeGetClassWithTypeID = (fake_cfclass*(*)(size_t)) dlsym(dlh, "_CFRuntimeGetClassWithTypeID");
1255
1256    if (CFGetTypeID  &&  _CFRuntimeGetClassWithTypeID) {
1257        size_t cfid = (*CFGetTypeID)(cfobj);
1258        result = (*_CFRuntimeGetClassWithTypeID)(cfid)->className;
1259    }
1260
1261    dlclose(dlh);
1262    return result;
1263}
1264
1265
1266static char *name_for_address(auto_zone_t *zone, vm_address_t base, vm_address_t offset, int withRetainCount)
1267{
1268#define APPEND_SIZE(s) \
1269    strlcat(buf, "[", sizeof(buf)); \
1270    strlcati(buf, s, sizeof(buf)); \
1271    strlcat(buf, "]", sizeof(buf));
1272
1273    char buf[1500];
1274    char *result;
1275
1276    buf[0] = '\0';
1277
1278    size_t size =
1279        auto_zone_size(zone, (void *)base);
1280    auto_memory_type_t type = size ?
1281        auto_zone_get_layout_type(zone, (void *)base) : AUTO_TYPE_UNKNOWN;
1282    unsigned int refcount = size ?
1283        auto_zone_retain_count(zone, (void *)base) : 0;
1284
1285    switch (type) {
1286    case AUTO_OBJECT_SCANNED:
1287    case AUTO_OBJECT_UNSCANNED:
1288    case AUTO_OBJECT_ALL_POINTERS: {
1289        const char *class_name = object_getClassName((id)base);
1290        if ((0 == strcmp(class_name, "__NSCFType")) || (0 == strcmp(class_name, "NSCFType"))) {
1291            strlcat(buf, cf_class_for_object((void *)base), sizeof(buf));
1292        } else {
1293            strlcat(buf, class_name, sizeof(buf));
1294        }
1295        if (offset) {
1296            append_ivar_at_offset(buf, ((id)base)->ISA(), offset, sizeof(buf));
1297        }
1298        APPEND_SIZE(size);
1299        break;
1300    }
1301    case AUTO_MEMORY_SCANNED:
1302        strlcat(buf, "{conservative-block}", sizeof(buf));
1303        APPEND_SIZE(size);
1304        break;
1305    case AUTO_MEMORY_UNSCANNED:
1306        strlcat(buf, "{no-pointers-block}", sizeof(buf));
1307        APPEND_SIZE(size);
1308        break;
1309    case AUTO_MEMORY_ALL_POINTERS:
1310        strlcat(buf, "{all-pointers-block}", sizeof(buf));
1311        APPEND_SIZE(size);
1312        break;
1313    case AUTO_MEMORY_ALL_WEAK_POINTERS:
1314        strlcat(buf, "{all-weak-pointers-block}", sizeof(buf));
1315        APPEND_SIZE(size);
1316        break;
1317    case AUTO_TYPE_UNKNOWN:
1318        strlcat(buf, "{uncollectable-memory}", sizeof(buf));
1319        break;
1320    default:
1321        strlcat(buf, "{unknown-memory-type}", sizeof(buf));
1322    }
1323
1324    if (withRetainCount  &&  refcount > 0) {
1325        strlcat(buf, " [[refcount=", sizeof(buf));
1326        strlcati(buf, refcount, sizeof(buf));
1327        strlcat(buf, "]]", sizeof(buf));
1328    }
1329
1330    size_t len = 1 + strlen(buf);
1331    result = (char *)malloc_zone_malloc(objc_debug_zone(), len);
1332    memcpy(result, buf, len);
1333    return result;
1334
1335#undef APPEND_SIZE
1336}
1337
1338
1339
1340
1341
1342#endif
1343