1/*
2 * Copyright (c) 2010-2012 Apple Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. Please obtain a copy of the License at
10 * http://www.opensource.apple.com/apsl/ and read it before using this
11 * file.
12 *
13 * The Original Code and all software distributed under the License are
14 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
15 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
16 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
18 * Please see the License for the specific language governing rights and
19 * limitations under the License.
20 *
21 * @APPLE_LICENSE_HEADER_END@
22 */
23
24#include "objc-private.h"
25#include "NSObject.h"
26
27#include "objc-weak.h"
28#include "llvm-DenseMap.h"
29#include "NSObject.h"
30
31#include <malloc/malloc.h>
32#include <stdint.h>
33#include <stdbool.h>
34#include <mach/mach.h>
35#include <mach-o/dyld.h>
36#include <mach-o/nlist.h>
37#include <sys/types.h>
38#include <sys/mman.h>
39#include <libkern/OSAtomic.h>
40#include <Block.h>
41#include <map>
42#include <execinfo.h>
43
44@interface NSInvocation
45- (SEL)selector;
46@end
47
48// HACK -- the use of these functions must be after the @implementation
49id bypass_msgSend_retain(NSObject *obj) asm("-[NSObject retain]");
50void bypass_msgSend_release(NSObject *obj) asm("-[NSObject release]");
51id bypass_msgSend_autorelease(NSObject *obj) asm("-[NSObject autorelease]");
52
53
54#if TARGET_OS_MAC
55
56// NSObject used to be in Foundation/CoreFoundation.
57
58#define SYMBOL_ELSEWHERE_IN_3(sym, vers, n)                             \
59    OBJC_EXPORT const char elsewhere_ ##n __asm__("$ld$hide$os" #vers "$" #sym); const char elsewhere_ ##n = 0
60#define SYMBOL_ELSEWHERE_IN_2(sym, vers, n)     \
61    SYMBOL_ELSEWHERE_IN_3(sym, vers, n)
62#define SYMBOL_ELSEWHERE_IN(sym, vers)                  \
63    SYMBOL_ELSEWHERE_IN_2(sym, vers, __COUNTER__)
64
65#if __OBJC2__
66# define NSOBJECT_ELSEWHERE_IN(vers)                       \
67    SYMBOL_ELSEWHERE_IN(_OBJC_CLASS_$_NSObject, vers);     \
68    SYMBOL_ELSEWHERE_IN(_OBJC_METACLASS_$_NSObject, vers); \
69    SYMBOL_ELSEWHERE_IN(_OBJC_IVAR_$_NSObject.isa, vers)
70#else
71# define NSOBJECT_ELSEWHERE_IN(vers)                       \
72    SYMBOL_ELSEWHERE_IN(.objc_class_name_NSObject, vers)
73#endif
74
75#if TARGET_OS_IPHONE
76    NSOBJECT_ELSEWHERE_IN(5.1);
77    NSOBJECT_ELSEWHERE_IN(5.0);
78    NSOBJECT_ELSEWHERE_IN(4.3);
79    NSOBJECT_ELSEWHERE_IN(4.2);
80    NSOBJECT_ELSEWHERE_IN(4.1);
81    NSOBJECT_ELSEWHERE_IN(4.0);
82    NSOBJECT_ELSEWHERE_IN(3.2);
83    NSOBJECT_ELSEWHERE_IN(3.1);
84    NSOBJECT_ELSEWHERE_IN(3.0);
85    NSOBJECT_ELSEWHERE_IN(2.2);
86    NSOBJECT_ELSEWHERE_IN(2.1);
87    NSOBJECT_ELSEWHERE_IN(2.0);
88#else
89    NSOBJECT_ELSEWHERE_IN(10.7);
90    NSOBJECT_ELSEWHERE_IN(10.6);
91    NSOBJECT_ELSEWHERE_IN(10.5);
92    NSOBJECT_ELSEWHERE_IN(10.4);
93    NSOBJECT_ELSEWHERE_IN(10.3);
94    NSOBJECT_ELSEWHERE_IN(10.2);
95    NSOBJECT_ELSEWHERE_IN(10.1);
96    NSOBJECT_ELSEWHERE_IN(10.0);
97#endif
98
99// TARGET_OS_MAC
100#endif
101
102#if SUPPORT_RETURN_AUTORELEASE
103// We cannot peek at where we are returning to unless we always inline this:
104__attribute__((always_inline))
105static bool callerAcceptsFastAutorelease(const void * const ra0);
106#endif
107
108
109/***********************************************************************
110* Weak ivar support
111**********************************************************************/
112
113static id defaultBadAllocHandler(Class cls)
114{
115    _objc_fatal("attempt to allocate object of class '%s' failed",
116                class_getName(cls));
117}
118
119static id(*badAllocHandler)(Class) = &defaultBadAllocHandler;
120
121static id callBadAllocHandler(Class cls)
122{
123    // fixme add re-entrancy protection in case allocation fails inside handler
124    return (*badAllocHandler)(cls);
125}
126
127void _objc_setBadAllocHandler(id(*newHandler)(Class))
128{
129    badAllocHandler = newHandler;
130}
131
132
133namespace {
134
135#if TARGET_OS_EMBEDDED
136#   define SIDE_TABLE_STRIPE 1
137#else
138#   define SIDE_TABLE_STRIPE 8
139#endif
140
141// should be a multiple of cache line size (64)
142#define SIDE_TABLE_SIZE 128
143
144// The order of these bits is important.
145#define SIDE_TABLE_WEAKLY_REFERENCED (1<<0)
146#define SIDE_TABLE_DEALLOCATING      (1<<1)  // MSB-ward of weak bit
147#define SIDE_TABLE_RC_ONE            (1<<2)  // MSB-ward of deallocating bit
148
149#define SIDE_TABLE_RC_SHIFT 2
150
151
152typedef objc::DenseMap<id,size_t,true> RefcountMap;
153
154class SideTable {
155private:
156    static uint8_t table_buf[SIDE_TABLE_STRIPE * SIDE_TABLE_SIZE];
157
158public:
159    spinlock_t slock;
160    RefcountMap refcnts;
161    weak_table_t weak_table;
162
163    SideTable() : slock(SPINLOCK_INITIALIZER)
164    {
165        memset(&weak_table, 0, sizeof(weak_table));
166    }
167
168    ~SideTable()
169    {
170        // never delete side_table in case other threads retain during exit
171        assert(0);
172    }
173
174    static SideTable *tableForPointer(const void *p)
175    {
176#     if SIDE_TABLE_STRIPE == 1
177        return (SideTable *)table_buf;
178#     else
179        uintptr_t a = (uintptr_t)p;
180        int index = ((a >> 4) ^ (a >> 9)) & (SIDE_TABLE_STRIPE - 1);
181        return (SideTable *)&table_buf[index * SIDE_TABLE_SIZE];
182#     endif
183    }
184
185    static void init() {
186        // use placement new instead of static ctor to avoid dtor at exit
187        for (int i = 0; i < SIDE_TABLE_STRIPE; i++) {
188            new (&table_buf[i * SIDE_TABLE_SIZE]) SideTable;
189        }
190    }
191};
192
193STATIC_ASSERT(sizeof(SideTable) <= SIDE_TABLE_SIZE);
194__attribute__((aligned(SIDE_TABLE_SIZE))) uint8_t
195SideTable::table_buf[SIDE_TABLE_STRIPE * SIDE_TABLE_SIZE];
196
197// Avoid false-negative reports from tools like "leaks"
198#define DISGUISE(x) ((id)~(uintptr_t)(x))
199
200// anonymous namespace
201};
202
203
204//
205// The -fobjc-arc flag causes the compiler to issue calls to objc_{retain/release/autorelease/retain_block}
206//
207
208id objc_retainBlock(id x) {
209    return (id)_Block_copy(x);
210}
211
212//
213// The following SHOULD be called by the compiler directly, but the request hasn't been made yet :-)
214//
215
216BOOL objc_should_deallocate(id object) {
217    return YES;
218}
219
220id
221objc_retain_autorelease(id obj)
222{
223    return objc_autorelease(objc_retain(obj));
224}
225
226/**
227 * This function stores a new value into a __weak variable. It would
228 * be used anywhere a __weak variable is the target of an assignment.
229 *
230 * @param location The address of the weak pointer itself
231 * @param newObj The new object this weak ptr should now point to
232 *
233 * @return \e newObj
234 */
235id
236objc_storeWeak(id *location, id newObj)
237{
238    id oldObj;
239    SideTable *oldTable;
240    SideTable *newTable;
241    spinlock_t *lock1;
242#if SIDE_TABLE_STRIPE > 1
243    spinlock_t *lock2;
244#endif
245
246    // Acquire locks for old and new values.
247    // Order by lock address to prevent lock ordering problems.
248    // Retry if the old value changes underneath us.
249 retry:
250    oldObj = *location;
251
252    oldTable = SideTable::tableForPointer(oldObj);
253    newTable = SideTable::tableForPointer(newObj);
254
255    lock1 = &newTable->slock;
256#if SIDE_TABLE_STRIPE > 1
257    lock2 = &oldTable->slock;
258    if (lock1 > lock2) {
259        spinlock_t *temp = lock1;
260        lock1 = lock2;
261        lock2 = temp;
262    }
263    if (lock1 != lock2) spinlock_lock(lock2);
264#endif
265    spinlock_lock(lock1);
266
267    if (*location != oldObj) {
268        spinlock_unlock(lock1);
269#if SIDE_TABLE_STRIPE > 1
270        if (lock1 != lock2) spinlock_unlock(lock2);
271#endif
272        goto retry;
273    }
274
275    weak_unregister_no_lock(&oldTable->weak_table, oldObj, location);
276    newObj = weak_register_no_lock(&newTable->weak_table, newObj, location);
277    // weak_register_no_lock returns nil if weak store should be rejected
278
279    // Set is-weakly-referenced bit in refcount table.
280    if (newObj  &&  !newObj->isTaggedPointer()) {
281        newTable->refcnts[DISGUISE(newObj)] |= SIDE_TABLE_WEAKLY_REFERENCED;
282    }
283
284    // Do not set *location anywhere else. That would introduce a race.
285    *location = newObj;
286
287    spinlock_unlock(lock1);
288#if SIDE_TABLE_STRIPE > 1
289    if (lock1 != lock2) spinlock_unlock(lock2);
290#endif
291
292    return newObj;
293}
294
295id
296objc_loadWeakRetained(id *location)
297{
298    id result;
299
300    SideTable *table;
301    spinlock_t *lock;
302
303 retry:
304    result = *location;
305    if (!result) return nil;
306
307    table = SideTable::tableForPointer(result);
308    lock = &table->slock;
309
310    spinlock_lock(lock);
311    if (*location != result) {
312        spinlock_unlock(lock);
313        goto retry;
314    }
315
316    result = weak_read_no_lock(&table->weak_table, location);
317
318    spinlock_unlock(lock);
319    return result;
320}
321
322/**
323 * This loads the object referenced by a weak pointer and returns it, after
324 * retaining and autoreleasing the object to ensure that it stays alive
325 * long enough for the caller to use it. This function would be used
326 * anywhere a __weak variable is used in an expression.
327 *
328 * @param location The weak pointer address
329 *
330 * @return The object pointed to by \e location, or \c nil if \e location is \c nil.
331 */
332id
333objc_loadWeak(id *location)
334{
335    if (!*location) return nil;
336    return objc_autorelease(objc_loadWeakRetained(location));
337}
338
339/**
340 * Initialize a fresh weak pointer to some object location.
341 * It would be used for code like:
342 *
343 * (The nil case)
344 * __weak id weakPtr;
345 * (The non-nil case)
346 * NSObject *o = ...;
347 * __weak id weakPtr = o;
348 *
349 * @param addr Address of __weak ptr.
350 * @param val Object ptr.
351 */
352id
353objc_initWeak(id *addr, id val)
354{
355    *addr = 0;
356    if (!val) return nil;
357    return objc_storeWeak(addr, val);
358}
359
360__attribute__((noinline, used)) void
361objc_destroyWeak_slow(id *addr)
362{
363    SideTable *oldTable;
364    spinlock_t *lock;
365    id oldObj;
366
367    // No need to see weak refs, we are destroying
368
369    // Acquire lock for old value only
370    // retry if the old value changes underneath us
371 retry:
372    oldObj = *addr;
373    oldTable = SideTable::tableForPointer(oldObj);
374
375    lock = &oldTable->slock;
376    spinlock_lock(lock);
377
378    if (*addr != oldObj) {
379        spinlock_unlock(lock);
380        goto retry;
381    }
382
383    weak_unregister_no_lock(&oldTable->weak_table, oldObj, addr);
384
385    spinlock_unlock(lock);
386}
387
388/**
389 * Destroys the relationship between a weak pointer
390 * and the object it is referencing in the internal weak
391 * table. If the weak pointer is not referencing anything,
392 * there is no need to edit the weak table.
393 *
394 * @param addr The weak pointer address.
395 */
396void
397objc_destroyWeak(id *addr)
398{
399    if (!*addr) return;
400    return objc_destroyWeak_slow(addr);
401}
402
403/**
404 * This function copies a weak pointer from one location to another,
405 * when the destination doesn't already contain a weak pointer. It
406 * would be used for code like:
407 *
408 *  __weak id weakPtr1 = ...;
409 *  __weak id weakPtr2 = weakPtr1;
410 *
411 * @param to weakPtr2 in this ex
412 * @param from weakPtr1
413 */
414void
415objc_copyWeak(id *to, id *from)
416{
417    id val = objc_loadWeakRetained(from);
418    objc_initWeak(to, val);
419    objc_release(val);
420}
421
422/**
423 * Move a weak pointer from one location to another.
424 * Before the move, the destination must be uninitialized.
425 * After the move, the source is nil.
426 */
427void
428objc_moveWeak(id *to, id *from)
429{
430    objc_copyWeak(to, from);
431    objc_storeWeak(from, 0);
432}
433
434
435/* Autorelease pool implementation
436   A thread's autorelease pool is a stack of pointers.
437   Each pointer is either an object to release, or POOL_SENTINEL which is
438     an autorelease pool boundary.
439   A pool token is a pointer to the POOL_SENTINEL for that pool. When
440     the pool is popped, every object hotter than the sentinel is released.
441   The stack is divided into a doubly-linked list of pages. Pages are added
442     and deleted as necessary.
443   Thread-local storage points to the hot page, where newly autoreleased
444     objects are stored.
445 */
446
447BREAKPOINT_FUNCTION(void objc_autoreleaseNoPool(id obj));
448
449namespace {
450
451struct magic_t {
452    static const uint32_t M0 = 0xA1A1A1A1;
453#   define M1 "AUTORELEASE!"
454    static const size_t M1_len = 12;
455    uint32_t m[4];
456
457    magic_t() {
458        assert(M1_len == strlen(M1));
459        assert(M1_len == 3 * sizeof(m[1]));
460
461        m[0] = M0;
462        strncpy((char *)&m[1], M1, M1_len);
463    }
464
465    ~magic_t() {
466        m[0] = m[1] = m[2] = m[3] = 0;
467    }
468
469    bool check() const {
470        return (m[0] == M0 && 0 == strncmp((char *)&m[1], M1, M1_len));
471    }
472
473    bool fastcheck() const {
474#ifdef NDEBUG
475        return (m[0] == M0);
476#else
477        return check();
478#endif
479    }
480
481#   undef M1
482};
483
484
485// Set this to 1 to mprotect() autorelease pool contents
486#define PROTECT_AUTORELEASEPOOL 0
487
488class AutoreleasePoolPage
489{
490
491#define POOL_SENTINEL nil
492    static pthread_key_t const key = AUTORELEASE_POOL_KEY;
493    static uint8_t const SCRIBBLE = 0xA3;  // 0xA3A3A3A3 after releasing
494    static size_t const SIZE =
495#if PROTECT_AUTORELEASEPOOL
496        PAGE_SIZE;  // must be multiple of vm page size
497#else
498        PAGE_SIZE;  // size and alignment, power of 2
499#endif
500    static size_t const COUNT = SIZE / sizeof(id);
501
502    magic_t const magic;
503    id *next;
504    pthread_t const thread;
505    AutoreleasePoolPage * const parent;
506    AutoreleasePoolPage *child;
507    uint32_t const depth;
508    uint32_t hiwat;
509
510    // SIZE-sizeof(*this) bytes of contents follow
511
512    static void * operator new(size_t size) {
513        return malloc_zone_memalign(malloc_default_zone(), SIZE, SIZE);
514    }
515    static void operator delete(void * p) {
516        return free(p);
517    }
518
519    inline void protect() {
520#if PROTECT_AUTORELEASEPOOL
521        mprotect(this, SIZE, PROT_READ);
522        check();
523#endif
524    }
525
526    inline void unprotect() {
527#if PROTECT_AUTORELEASEPOOL
528        check();
529        mprotect(this, SIZE, PROT_READ | PROT_WRITE);
530#endif
531    }
532
533    AutoreleasePoolPage(AutoreleasePoolPage *newParent)
534        : magic(), next(begin()), thread(pthread_self()),
535          parent(newParent), child(nil),
536          depth(parent ? 1+parent->depth : 0),
537          hiwat(parent ? parent->hiwat : 0)
538    {
539        if (parent) {
540            parent->check();
541            assert(!parent->child);
542            parent->unprotect();
543            parent->child = this;
544            parent->protect();
545        }
546        protect();
547    }
548
549    ~AutoreleasePoolPage()
550    {
551        check();
552        unprotect();
553        assert(empty());
554
555        // Not recursive: we don't want to blow out the stack
556        // if a thread accumulates a stupendous amount of garbage
557        assert(!child);
558    }
559
560
561    void busted(bool die = true)
562    {
563        (die ? _objc_fatal : _objc_inform)
564            ("autorelease pool page %p corrupted\n"
565             "  magic 0x%08x 0x%08x 0x%08x 0x%08x\n  pthread %p\n",
566             this, magic.m[0], magic.m[1], magic.m[2], magic.m[3],
567             this->thread);
568    }
569
570    void check(bool die = true)
571    {
572        if (!magic.check() || !pthread_equal(thread, pthread_self())) {
573            busted(die);
574        }
575    }
576
577    void fastcheck(bool die = true)
578    {
579        if (! magic.fastcheck()) {
580            busted(die);
581        }
582    }
583
584
585    id * begin() {
586        return (id *) ((uint8_t *)this+sizeof(*this));
587    }
588
589    id * end() {
590        return (id *) ((uint8_t *)this+SIZE);
591    }
592
593    bool empty() {
594        return next == begin();
595    }
596
597    bool full() {
598        return next == end();
599    }
600
601    bool lessThanHalfFull() {
602        return (next - begin() < (end() - begin()) / 2);
603    }
604
605    id *add(id obj)
606    {
607        assert(!full());
608        unprotect();
609        *next++ = obj;
610        protect();
611        return next-1;
612    }
613
614    void releaseAll()
615    {
616        releaseUntil(begin());
617    }
618
619    void releaseUntil(id *stop)
620    {
621        // Not recursive: we don't want to blow out the stack
622        // if a thread accumulates a stupendous amount of garbage
623
624        while (this->next != stop) {
625            // Restart from hotPage() every time, in case -release
626            // autoreleased more objects
627            AutoreleasePoolPage *page = hotPage();
628
629            // fixme I think this `while` can be `if`, but I can't prove it
630            while (page->empty()) {
631                page = page->parent;
632                setHotPage(page);
633            }
634
635            page->unprotect();
636            id obj = *--page->next;
637            memset((void*)page->next, SCRIBBLE, sizeof(*page->next));
638            page->protect();
639
640            if (obj != POOL_SENTINEL) {
641                objc_release(obj);
642            }
643        }
644
645        setHotPage(this);
646
647#ifndef NDEBUG
648        // we expect any children to be completely empty
649        for (AutoreleasePoolPage *page = child; page; page = page->child) {
650            assert(page->empty());
651        }
652#endif
653    }
654
655    void kill()
656    {
657        // Not recursive: we don't want to blow out the stack
658        // if a thread accumulates a stupendous amount of garbage
659        AutoreleasePoolPage *page = this;
660        while (page->child) page = page->child;
661
662        AutoreleasePoolPage *deathptr;
663        do {
664            deathptr = page;
665            page = page->parent;
666            if (page) {
667                page->unprotect();
668                page->child = nil;
669                page->protect();
670            }
671            delete deathptr;
672        } while (deathptr != this);
673    }
674
675    static void tls_dealloc(void *p)
676    {
677        // reinstate TLS value while we work
678        setHotPage((AutoreleasePoolPage *)p);
679        pop(0);
680        setHotPage(nil);
681    }
682
683    static AutoreleasePoolPage *pageForPointer(const void *p)
684    {
685        return pageForPointer((uintptr_t)p);
686    }
687
688    static AutoreleasePoolPage *pageForPointer(uintptr_t p)
689    {
690        AutoreleasePoolPage *result;
691        uintptr_t offset = p % SIZE;
692
693        assert(offset >= sizeof(AutoreleasePoolPage));
694
695        result = (AutoreleasePoolPage *)(p - offset);
696        result->fastcheck();
697
698        return result;
699    }
700
701
702    static inline AutoreleasePoolPage *hotPage()
703    {
704        AutoreleasePoolPage *result = (AutoreleasePoolPage *)
705            tls_get_direct(key);
706        if (result) result->fastcheck();
707        return result;
708    }
709
710    static inline void setHotPage(AutoreleasePoolPage *page)
711    {
712        if (page) page->fastcheck();
713        tls_set_direct(key, (void *)page);
714    }
715
716    static inline AutoreleasePoolPage *coldPage()
717    {
718        AutoreleasePoolPage *result = hotPage();
719        if (result) {
720            while (result->parent) {
721                result = result->parent;
722                result->fastcheck();
723            }
724        }
725        return result;
726    }
727
728
729    static inline id *autoreleaseFast(id obj)
730    {
731        AutoreleasePoolPage *page = hotPage();
732        if (page && !page->full()) {
733            return page->add(obj);
734        } else {
735            return autoreleaseSlow(obj);
736        }
737    }
738
739    static __attribute__((noinline))
740    id *autoreleaseSlow(id obj)
741    {
742        AutoreleasePoolPage *page;
743        page = hotPage();
744
745        // The code below assumes some cases are handled by autoreleaseFast()
746        assert(!page || page->full());
747
748        if (!page) {
749            // No pool. Silently push one.
750            assert(obj != POOL_SENTINEL);
751
752            if (DebugMissingPools) {
753                _objc_inform("MISSING POOLS: Object %p of class %s "
754                             "autoreleased with no pool in place - "
755                             "just leaking - break on "
756                             "objc_autoreleaseNoPool() to debug",
757                             (void*)obj, object_getClassName(obj));
758                objc_autoreleaseNoPool(obj);
759                return nil;
760            }
761
762            push();
763            page = hotPage();
764        }
765
766        do {
767            if (page->child) page = page->child;
768            else page = new AutoreleasePoolPage(page);
769        } while (page->full());
770
771        setHotPage(page);
772        return page->add(obj);
773    }
774
775public:
776    static inline id autorelease(id obj)
777    {
778        assert(obj);
779        assert(!obj->isTaggedPointer());
780        id *dest __unused = autoreleaseFast(obj);
781        assert(!dest  ||  *dest == obj);
782        return obj;
783    }
784
785
786    static inline void *push()
787    {
788        if (!hotPage()) {
789            setHotPage(new AutoreleasePoolPage(nil));
790        }
791        id *dest = autoreleaseFast(POOL_SENTINEL);
792        assert(*dest == POOL_SENTINEL);
793        return dest;
794    }
795
796    static inline void pop(void *token)
797    {
798        AutoreleasePoolPage *page;
799        id *stop;
800
801        if (token) {
802            page = pageForPointer(token);
803            stop = (id *)token;
804            assert(*stop == POOL_SENTINEL);
805        } else {
806            // Token 0 is top-level pool
807            page = coldPage();
808            assert(page);
809            stop = page->begin();
810        }
811
812        if (PrintPoolHiwat) printHiwat();
813
814        page->releaseUntil(stop);
815
816        // memory: delete empty children
817        // hysteresis: keep one empty child if this page is more than half full
818        // special case: delete everything for pop(0)
819        // special case: delete everything for pop(top) with DebugMissingPools
820        if (!token  ||
821            (DebugMissingPools  &&  page->empty()  &&  !page->parent))
822        {
823            page->kill();
824            setHotPage(nil);
825        } else if (page->child) {
826            if (page->lessThanHalfFull()) {
827                page->child->kill();
828            }
829            else if (page->child->child) {
830                page->child->child->kill();
831            }
832        }
833    }
834
835    static void init()
836    {
837        int r __unused = pthread_key_init_np(AutoreleasePoolPage::key,
838                                             AutoreleasePoolPage::tls_dealloc);
839        assert(r == 0);
840    }
841
842    void print()
843    {
844        _objc_inform("[%p]  ................  PAGE %s %s %s", this,
845                     full() ? "(full)" : "",
846                     this == hotPage() ? "(hot)" : "",
847                     this == coldPage() ? "(cold)" : "");
848        check(false);
849        for (id *p = begin(); p < next; p++) {
850            if (*p == POOL_SENTINEL) {
851                _objc_inform("[%p]  ################  POOL %p", p, p);
852            } else {
853                _objc_inform("[%p]  %#16lx  %s",
854                             p, (unsigned long)*p, object_getClassName(*p));
855            }
856        }
857    }
858
859    static void printAll()
860    {
861        _objc_inform("##############");
862        _objc_inform("AUTORELEASE POOLS for thread %p", pthread_self());
863
864        AutoreleasePoolPage *page;
865        ptrdiff_t objects = 0;
866        for (page = coldPage(); page; page = page->child) {
867            objects += page->next - page->begin();
868        }
869        _objc_inform("%llu releases pending.", (unsigned long long)objects);
870
871        for (page = coldPage(); page; page = page->child) {
872            page->print();
873        }
874
875        _objc_inform("##############");
876    }
877
878    static void printHiwat()
879    {
880        // Check and propagate high water mark
881        // Ignore high water marks under 256 to suppress noise.
882        AutoreleasePoolPage *p = hotPage();
883        uint32_t mark = p->depth*COUNT + (uint32_t)(p->next - p->begin());
884        if (mark > p->hiwat  &&  mark > 256) {
885            for( ; p; p = p->parent) {
886                p->unprotect();
887                p->hiwat = mark;
888                p->protect();
889            }
890
891            _objc_inform("POOL HIGHWATER: new high water mark of %u "
892                         "pending autoreleases for thread %p:",
893                         mark, pthread_self());
894
895            void *stack[128];
896            int count = backtrace(stack, sizeof(stack)/sizeof(stack[0]));
897            char **sym = backtrace_symbols(stack, count);
898            for (int i = 0; i < count; i++) {
899                _objc_inform("POOL HIGHWATER:     %s", sym[i]);
900            }
901            free(sym);
902        }
903    }
904
905#undef POOL_SENTINEL
906};
907
908// anonymous namespace
909};
910
911// API to only be called by root classes like NSObject or NSProxy
912
913extern "C" {
914__attribute__((used,noinline,nothrow))
915static id _objc_rootRetain_slow(id obj, SideTable *table);
916__attribute__((used,noinline,nothrow))
917static bool _objc_rootReleaseWasZero_slow(id obj, SideTable *table);
918};
919
920id
921_objc_rootRetain_slow(id obj, SideTable *table)
922{
923    spinlock_lock(&table->slock);
924    table->refcnts[DISGUISE(obj)] += SIDE_TABLE_RC_ONE;
925    spinlock_unlock(&table->slock);
926
927    return obj;
928}
929
930bool
931_objc_rootTryRetain(id obj)
932{
933    assert(obj);
934    assert(!UseGC);
935
936    if (obj->isTaggedPointer()) return true;
937
938    SideTable *table = SideTable::tableForPointer(obj);
939
940    // NO SPINLOCK HERE
941    // _objc_rootTryRetain() is called exclusively by _objc_loadWeak(),
942    // which already acquired the lock on our behalf.
943
944    // fixme can't do this efficiently with os_lock_handoff_s
945    // if (table->slock == 0) {
946    //     _objc_fatal("Do not call -_tryRetain.");
947    // }
948
949    bool result = true;
950    RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
951    if (it == table->refcnts.end()) {
952        table->refcnts[DISGUISE(obj)] = SIDE_TABLE_RC_ONE;
953    } else if (it->second & SIDE_TABLE_DEALLOCATING) {
954        result = false;
955    } else {
956        it->second += SIDE_TABLE_RC_ONE;
957    }
958
959    return result;
960}
961
962bool
963_objc_rootIsDeallocating(id obj)
964{
965    assert(obj);
966    assert(!UseGC);
967
968    if (obj->isTaggedPointer()) return false;
969
970    SideTable *table = SideTable::tableForPointer(obj);
971
972    // NO SPINLOCK HERE
973    // _objc_rootIsDeallocating() is called exclusively by _objc_storeWeak(),
974    // which already acquired the lock on our behalf.
975
976
977    // fixme can't do this efficiently with os_lock_handoff_s
978    // if (table->slock == 0) {
979    //     _objc_fatal("Do not call -_isDeallocating.");
980    // }
981
982    RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
983    return (it != table->refcnts.end()) && (it->second & SIDE_TABLE_DEALLOCATING);
984}
985
986
987void
988objc_clear_deallocating(id obj)
989{
990    assert(obj);
991    assert(!UseGC);
992
993    SideTable *table = SideTable::tableForPointer(obj);
994
995    // clear any weak table items
996    // clear extra retain count and deallocating bit
997    // (fixme warn or abort if extra retain count == 0 ?)
998    spinlock_lock(&table->slock);
999    RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
1000    if (it != table->refcnts.end()) {
1001        if (it->second & SIDE_TABLE_WEAKLY_REFERENCED) {
1002            weak_clear_no_lock(&table->weak_table, obj);
1003        }
1004        table->refcnts.erase(it);
1005    }
1006    spinlock_unlock(&table->slock);
1007}
1008
1009
1010bool
1011_objc_rootReleaseWasZero_slow(id obj, SideTable *table)
1012{
1013    bool do_dealloc = false;
1014
1015    spinlock_lock(&table->slock);
1016    RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
1017    if (it == table->refcnts.end()) {
1018        do_dealloc = true;
1019        table->refcnts[DISGUISE(obj)] = SIDE_TABLE_DEALLOCATING;
1020    } else if (it->second < SIDE_TABLE_DEALLOCATING) {
1021        // SIDE_TABLE_WEAKLY_REFERENCED may be set. Don't change it.
1022        do_dealloc = true;
1023        it->second |= SIDE_TABLE_DEALLOCATING;
1024    } else {
1025        it->second -= SIDE_TABLE_RC_ONE;
1026    }
1027    spinlock_unlock(&table->slock);
1028    return do_dealloc;
1029}
1030
1031bool
1032_objc_rootReleaseWasZero(id obj)
1033{
1034    assert(obj);
1035    assert(!UseGC);
1036
1037    if (obj->isTaggedPointer()) return false;
1038
1039    SideTable *table = SideTable::tableForPointer(obj);
1040
1041    bool do_dealloc = false;
1042
1043    if (spinlock_trylock(&table->slock)) {
1044        RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
1045        if (it == table->refcnts.end()) {
1046            do_dealloc = true;
1047            table->refcnts[DISGUISE(obj)] = SIDE_TABLE_DEALLOCATING;
1048        } else if (it->second < SIDE_TABLE_DEALLOCATING) {
1049            // SIDE_TABLE_WEAKLY_REFERENCED may be set. Don't change it.
1050            do_dealloc = true;
1051            it->second |= SIDE_TABLE_DEALLOCATING;
1052        } else {
1053            it->second -= SIDE_TABLE_RC_ONE;
1054        }
1055        spinlock_unlock(&table->slock);
1056        return do_dealloc;
1057    }
1058
1059    return _objc_rootReleaseWasZero_slow(obj, table);
1060}
1061
1062__attribute__((noinline,used))
1063static id _objc_rootAutorelease2(id obj)
1064{
1065    if (obj->isTaggedPointer()) return obj;
1066    return AutoreleasePoolPage::autorelease(obj);
1067}
1068
1069uintptr_t
1070_objc_rootRetainCount(id obj)
1071{
1072    assert(obj);
1073    assert(!UseGC);
1074
1075    // XXX -- There is no way that anybody can use this API race free in a
1076    // threaded environment because the result is immediately stale by the
1077    // time the caller receives it.
1078
1079    if (obj->isTaggedPointer()) return (uintptr_t)obj;
1080
1081    SideTable *table = SideTable::tableForPointer(obj);
1082
1083    size_t refcnt_result = 1;
1084
1085    spinlock_lock(&table->slock);
1086    RefcountMap::iterator it = table->refcnts.find(DISGUISE(obj));
1087    if (it != table->refcnts.end()) {
1088        refcnt_result += it->second >> SIDE_TABLE_RC_SHIFT;
1089    }
1090    spinlock_unlock(&table->slock);
1091    return refcnt_result;
1092}
1093
1094id
1095_objc_rootInit(id obj)
1096{
1097    // In practice, it will be hard to rely on this function.
1098    // Many classes do not properly chain -init calls.
1099    return obj;
1100}
1101
1102id
1103_objc_rootAllocWithZone(Class cls, malloc_zone_t *zone)
1104{
1105    id obj;
1106
1107#if __OBJC2__
1108    // allocWithZone under __OBJC2__ ignores the zone parameter
1109    (void)zone;
1110    obj = class_createInstance(cls, 0);
1111#else
1112    if (!zone || UseGC) {
1113        obj = class_createInstance(cls, 0);
1114    }
1115    else {
1116        obj = class_createInstanceFromZone(cls, 0, zone);
1117    }
1118#endif
1119
1120    if (!obj) obj = callBadAllocHandler(cls);
1121    return obj;
1122}
1123
1124id
1125_objc_rootAlloc(Class cls)
1126{
1127#if __OBJC2__
1128    // Skip over the +allocWithZone: call if the class doesn't override it.
1129    if (! cls->ISA()->hasCustomAWZ()) {
1130        id obj = class_createInstance(cls, 0);
1131        if (!obj) obj = callBadAllocHandler(cls);
1132        return obj;
1133    }
1134#endif
1135    return [cls allocWithZone: nil];
1136}
1137
1138id
1139objc_alloc(Class cls)
1140{
1141#if __OBJC2__
1142    // Skip over +alloc and +allocWithZone: if the class doesn't override them.
1143    if (cls  &&
1144        cls->ISA()->isInitialized_meta()  &&
1145        ! cls->ISA()->hasCustomAWZ())
1146    {
1147        id obj = class_createInstance(cls, 0);
1148        if (!obj) obj = callBadAllocHandler(cls);
1149        return obj;
1150    }
1151#endif
1152    return [cls alloc];
1153}
1154
1155id
1156objc_allocWithZone(Class cls)
1157{
1158#if __OBJC2__
1159    // Skip over the +allocWithZone: call if the class doesn't override it.
1160    if (cls  &&
1161        cls->ISA()->isInitialized_meta()  &&
1162        ! cls->ISA()->hasCustomAWZ())
1163    {
1164        id obj = class_createInstance(cls, 0);
1165        if (!obj) obj = callBadAllocHandler(cls);
1166        return obj;
1167    }
1168#endif
1169    return [cls allocWithZone: nil];
1170}
1171
1172void
1173_objc_rootDealloc(id obj)
1174{
1175    assert(obj);
1176    assert(!UseGC);
1177
1178    if (obj->isTaggedPointer()) return;
1179
1180    object_dispose(obj);
1181}
1182
1183void
1184_objc_rootFinalize(id obj __unused)
1185{
1186    assert(obj);
1187    assert(UseGC);
1188
1189    if (UseGC) {
1190        return;
1191    }
1192    _objc_fatal("_objc_rootFinalize called with garbage collection off");
1193}
1194
1195malloc_zone_t *
1196_objc_rootZone(id obj)
1197{
1198    (void)obj;
1199    if (gc_zone) {
1200        return gc_zone;
1201    }
1202#if __OBJC2__
1203    // allocWithZone under __OBJC2__ ignores the zone parameter
1204    return malloc_default_zone();
1205#else
1206    malloc_zone_t *rval = malloc_zone_from_ptr(obj);
1207    return rval ? rval : malloc_default_zone();
1208#endif
1209}
1210
1211uintptr_t
1212_objc_rootHash(id obj)
1213{
1214    if (UseGC) {
1215        return _object_getExternalHash(obj);
1216    }
1217    return (uintptr_t)obj;
1218}
1219
1220// make CF link for now
1221void *_objc_autoreleasePoolPush(void) { return objc_autoreleasePoolPush(); }
1222void _objc_autoreleasePoolPop(void *ctxt) { objc_autoreleasePoolPop(ctxt); }
1223
1224void *
1225objc_autoreleasePoolPush(void)
1226{
1227    if (UseGC) return nil;
1228    return AutoreleasePoolPage::push();
1229}
1230
1231void
1232objc_autoreleasePoolPop(void *ctxt)
1233{
1234    if (UseGC) return;
1235
1236    // fixme rdar://9167170
1237    if (!ctxt) return;
1238
1239    AutoreleasePoolPage::pop(ctxt);
1240}
1241
1242void
1243_objc_autoreleasePoolPrint(void)
1244{
1245    if (UseGC) return;
1246    AutoreleasePoolPage::printAll();
1247}
1248
1249#if SUPPORT_RETURN_AUTORELEASE
1250
1251/*
1252  Fast handling of returned autoreleased values.
1253  The caller and callee cooperate to keep the returned object
1254  out of the autorelease pool.
1255
1256  Caller:
1257    ret = callee();
1258    objc_retainAutoreleasedReturnValue(ret);
1259    // use ret here
1260
1261  Callee:
1262    // compute ret
1263    [ret retain];
1264    return objc_autoreleaseReturnValue(ret);
1265
1266  objc_autoreleaseReturnValue() examines the caller's instructions following
1267  the return. If the caller's instructions immediately call
1268  objc_autoreleaseReturnValue, then the callee omits the -autorelease and saves
1269  the result in thread-local storage. If the caller does not look like it
1270  cooperates, then the callee calls -autorelease as usual.
1271
1272  objc_autoreleaseReturnValue checks if the returned value is the same as the
1273  one in thread-local storage. If it is, the value is used directly. If not,
1274  the value is assumed to be truly autoreleased and is retained again.  In
1275  either case, the caller now has a retained reference to the value.
1276
1277  Tagged pointer objects do participate in the fast autorelease scheme,
1278  because it saves message sends. They are not entered in the autorelease
1279  pool in the slow case.
1280*/
1281
1282# if __x86_64__
1283
1284static bool callerAcceptsFastAutorelease(const void * const ra0)
1285{
1286    const uint8_t *ra1 = (const uint8_t *)ra0;
1287    const uint16_t *ra2;
1288    const uint32_t *ra4 = (const uint32_t *)ra1;
1289    const void **sym;
1290
1291#define PREFER_GOTPCREL 0
1292#if PREFER_GOTPCREL
1293    // 48 89 c7    movq  %rax,%rdi
1294    // ff 15       callq *symbol@GOTPCREL(%rip)
1295    if (*ra4 != 0xffc78948) {
1296        return false;
1297    }
1298    if (ra1[4] != 0x15) {
1299        return false;
1300    }
1301    ra1 += 3;
1302#else
1303    // 48 89 c7    movq  %rax,%rdi
1304    // e8          callq symbol
1305    if (*ra4 != 0xe8c78948) {
1306        return false;
1307    }
1308    ra1 += (long)*(const int32_t *)(ra1 + 4) + 8l;
1309    ra2 = (const uint16_t *)ra1;
1310    // ff 25       jmpq *symbol@DYLDMAGIC(%rip)
1311    if (*ra2 != 0x25ff) {
1312        return false;
1313    }
1314#endif
1315    ra1 += 6l + (long)*(const int32_t *)(ra1 + 2);
1316    sym = (const void **)ra1;
1317    if (*sym != objc_retainAutoreleasedReturnValue)
1318    {
1319        return false;
1320    }
1321
1322    return true;
1323}
1324
1325// __x86_64__
1326# elif __arm__
1327
1328static bool callerAcceptsFastAutorelease(const void *ra)
1329{
1330    // if the low bit is set, we're returning to thumb mode
1331    if ((uintptr_t)ra & 1) {
1332        // 3f 46          mov r7, r7
1333        // we mask off the low bit via subtraction
1334        if (*(uint16_t *)((uint8_t *)ra - 1) == 0x463f) {
1335            return true;
1336        }
1337    } else {
1338        // 07 70 a0 e1    mov r7, r7
1339        if (*(uint32_t *)ra == 0xe1a07007) {
1340            return true;
1341        }
1342    }
1343    return false;
1344}
1345
1346// __arm__
1347# elif __i386__  &&  TARGET_IPHONE_SIMULATOR
1348
1349static bool callerAcceptsFastAutorelease(const void *ra)
1350{
1351    return false;
1352}
1353
1354// __i386__  &&  TARGET_IPHONE_SIMULATOR
1355# else
1356
1357#warning unknown architecture
1358
1359static bool callerAcceptsFastAutorelease(const void *ra)
1360{
1361    return false;
1362}
1363
1364# endif
1365
1366// SUPPORT_RETURN_AUTORELEASE
1367#endif
1368
1369
1370id
1371objc_autoreleaseReturnValue(id obj)
1372{
1373#if SUPPORT_RETURN_AUTORELEASE
1374    assert(tls_get_direct(AUTORELEASE_POOL_RECLAIM_KEY) == nil);
1375
1376    if (callerAcceptsFastAutorelease(__builtin_return_address(0))) {
1377        tls_set_direct(AUTORELEASE_POOL_RECLAIM_KEY, obj);
1378        return obj;
1379    }
1380#endif
1381
1382    return objc_autorelease(obj);
1383}
1384
1385id
1386objc_retainAutoreleaseReturnValue(id obj)
1387{
1388    return objc_autoreleaseReturnValue(objc_retain(obj));
1389}
1390
1391id
1392objc_retainAutoreleasedReturnValue(id obj)
1393{
1394#if SUPPORT_RETURN_AUTORELEASE
1395    if (obj == tls_get_direct(AUTORELEASE_POOL_RECLAIM_KEY)) {
1396        tls_set_direct(AUTORELEASE_POOL_RECLAIM_KEY, 0);
1397        return obj;
1398    }
1399#endif
1400    return objc_retain(obj);
1401}
1402
1403void
1404objc_storeStrong(id *location, id obj)
1405{
1406    // XXX FIXME -- GC support?
1407    id prev = *location;
1408    if (obj == prev) {
1409        return;
1410    }
1411    objc_retain(obj);
1412    *location = obj;
1413    objc_release(prev);
1414}
1415
1416id
1417objc_retainAutorelease(id obj)
1418{
1419    return objc_autorelease(objc_retain(obj));
1420}
1421
1422void
1423_objc_deallocOnMainThreadHelper(void *context)
1424{
1425    id obj = (id)context;
1426    [obj dealloc];
1427}
1428
1429#undef objc_retainedObject
1430#undef objc_unretainedObject
1431#undef objc_unretainedPointer
1432
1433// convert objc_objectptr_t to id, callee must take ownership.
1434id objc_retainedObject(objc_objectptr_t pointer) { return (id)pointer; }
1435
1436// convert objc_objectptr_t to id, without ownership transfer.
1437id objc_unretainedObject(objc_objectptr_t pointer) { return (id)pointer; }
1438
1439// convert id to objc_objectptr_t, no ownership transfer.
1440objc_objectptr_t objc_unretainedPointer(id object) { return object; }
1441
1442
1443void arr_init(void)
1444{
1445    AutoreleasePoolPage::init();
1446    SideTable::init();
1447}
1448
1449@implementation NSObject
1450
1451+ (void)load {
1452    if (UseGC) gc_init2();
1453}
1454
1455+ (void)initialize {
1456}
1457
1458+ (id)self {
1459    return (id)self;
1460}
1461
1462- (id)self {
1463    return self;
1464}
1465
1466+ (Class)class {
1467    return self;
1468}
1469
1470- (Class)class {
1471    return object_getClass(self);
1472}
1473
1474+ (Class)superclass {
1475    return self->superclass;
1476}
1477
1478- (Class)superclass {
1479    return [self class]->superclass;
1480}
1481
1482+ (BOOL)isMemberOfClass:(Class)cls {
1483    return object_getClass((id)self) == cls;
1484}
1485
1486- (BOOL)isMemberOfClass:(Class)cls {
1487    return [self class] == cls;
1488}
1489
1490+ (BOOL)isKindOfClass:(Class)cls {
1491    for (Class tcls = object_getClass((id)self); tcls; tcls = tcls->superclass) {
1492        if (tcls == cls) return YES;
1493    }
1494    return NO;
1495}
1496
1497- (BOOL)isKindOfClass:(Class)cls {
1498    for (Class tcls = [self class]; tcls; tcls = tcls->superclass) {
1499        if (tcls == cls) return YES;
1500    }
1501    return NO;
1502}
1503
1504+ (BOOL)isSubclassOfClass:(Class)cls {
1505    for (Class tcls = self; tcls; tcls = tcls->superclass) {
1506        if (tcls == cls) return YES;
1507    }
1508    return NO;
1509}
1510
1511+ (BOOL)isAncestorOfObject:(NSObject *)obj {
1512    for (Class tcls = [obj class]; tcls; tcls = tcls->superclass) {
1513        if (tcls == self) return YES;
1514    }
1515    return NO;
1516}
1517
1518+ (BOOL)instancesRespondToSelector:(SEL)sel {
1519    if (!sel) return NO;
1520    return class_respondsToSelector(self, sel);
1521}
1522
1523+ (BOOL)respondsToSelector:(SEL)sel {
1524    if (!sel) return NO;
1525    return class_respondsToSelector(object_getClass((id)self), sel);
1526}
1527
1528- (BOOL)respondsToSelector:(SEL)sel {
1529    if (!sel) return NO;
1530    return class_respondsToSelector([self class], sel);
1531}
1532
1533+ (BOOL)conformsToProtocol:(Protocol *)protocol {
1534    if (!protocol) return NO;
1535    for (Class tcls = self; tcls; tcls = tcls->superclass) {
1536        if (class_conformsToProtocol(tcls, protocol)) return YES;
1537    }
1538    return NO;
1539}
1540
1541- (BOOL)conformsToProtocol:(Protocol *)protocol {
1542    if (!protocol) return NO;
1543    for (Class tcls = [self class]; tcls; tcls = tcls->superclass) {
1544        if (class_conformsToProtocol(tcls, protocol)) return YES;
1545    }
1546    return NO;
1547}
1548
1549+ (NSUInteger)hash {
1550    return _objc_rootHash(self);
1551}
1552
1553- (NSUInteger)hash {
1554    return _objc_rootHash(self);
1555}
1556
1557+ (BOOL)isEqual:(id)obj {
1558    return obj == (id)self;
1559}
1560
1561- (BOOL)isEqual:(id)obj {
1562    return obj == self;
1563}
1564
1565
1566+ (BOOL)isFault {
1567    return NO;
1568}
1569
1570- (BOOL)isFault {
1571    return NO;
1572}
1573
1574+ (BOOL)isProxy {
1575    return NO;
1576}
1577
1578- (BOOL)isProxy {
1579    return NO;
1580}
1581
1582
1583+ (IMP)instanceMethodForSelector:(SEL)sel {
1584    if (!sel) [self doesNotRecognizeSelector:sel];
1585    return class_getMethodImplementation(self, sel);
1586}
1587
1588+ (IMP)methodForSelector:(SEL)sel {
1589    if (!sel) [self doesNotRecognizeSelector:sel];
1590    return object_getMethodImplementation((id)self, sel);
1591}
1592
1593- (IMP)methodForSelector:(SEL)sel {
1594    if (!sel) [self doesNotRecognizeSelector:sel];
1595    return object_getMethodImplementation(self, sel);
1596}
1597
1598+ (BOOL)resolveClassMethod:(SEL)sel {
1599    return NO;
1600}
1601
1602+ (BOOL)resolveInstanceMethod:(SEL)sel {
1603    return NO;
1604}
1605
1606// Replaced by CF (throws an NSException)
1607+ (void)doesNotRecognizeSelector:(SEL)sel {
1608    _objc_fatal("+[%s %s]: unrecognized selector sent to instance %p",
1609                class_getName(self), sel_getName(sel), self);
1610}
1611
1612// Replaced by CF (throws an NSException)
1613- (void)doesNotRecognizeSelector:(SEL)sel {
1614    _objc_fatal("-[%s %s]: unrecognized selector sent to instance %p",
1615                object_getClassName(self), sel_getName(sel), self);
1616}
1617
1618
1619+ (id)performSelector:(SEL)sel {
1620    if (!sel) [self doesNotRecognizeSelector:sel];
1621    return ((id(*)(id, SEL))objc_msgSend)((id)self, sel);
1622}
1623
1624+ (id)performSelector:(SEL)sel withObject:(id)obj {
1625    if (!sel) [self doesNotRecognizeSelector:sel];
1626    return ((id(*)(id, SEL, id))objc_msgSend)((id)self, sel, obj);
1627}
1628
1629+ (id)performSelector:(SEL)sel withObject:(id)obj1 withObject:(id)obj2 {
1630    if (!sel) [self doesNotRecognizeSelector:sel];
1631    return ((id(*)(id, SEL, id, id))objc_msgSend)((id)self, sel, obj1, obj2);
1632}
1633
1634- (id)performSelector:(SEL)sel {
1635    if (!sel) [self doesNotRecognizeSelector:sel];
1636    return ((id(*)(id, SEL))objc_msgSend)(self, sel);
1637}
1638
1639- (id)performSelector:(SEL)sel withObject:(id)obj {
1640    if (!sel) [self doesNotRecognizeSelector:sel];
1641    return ((id(*)(id, SEL, id))objc_msgSend)(self, sel, obj);
1642}
1643
1644- (id)performSelector:(SEL)sel withObject:(id)obj1 withObject:(id)obj2 {
1645    if (!sel) [self doesNotRecognizeSelector:sel];
1646    return ((id(*)(id, SEL, id, id))objc_msgSend)(self, sel, obj1, obj2);
1647}
1648
1649
1650// Replaced by CF (returns an NSMethodSignature)
1651+ (NSMethodSignature *)instanceMethodSignatureForSelector:(SEL)sel {
1652    _objc_fatal("+[NSObject instanceMethodSignatureForSelector:] "
1653                "not available without CoreFoundation");
1654}
1655
1656// Replaced by CF (returns an NSMethodSignature)
1657+ (NSMethodSignature *)methodSignatureForSelector:(SEL)sel {
1658    _objc_fatal("+[NSObject methodSignatureForSelector:] "
1659                "not available without CoreFoundation");
1660}
1661
1662// Replaced by CF (returns an NSMethodSignature)
1663- (NSMethodSignature *)methodSignatureForSelector:(SEL)sel {
1664    _objc_fatal("-[NSObject methodSignatureForSelector:] "
1665                "not available without CoreFoundation");
1666}
1667
1668+ (void)forwardInvocation:(NSInvocation *)invocation {
1669    [self doesNotRecognizeSelector:(invocation ? [invocation selector] : 0)];
1670}
1671
1672- (void)forwardInvocation:(NSInvocation *)invocation {
1673    [self doesNotRecognizeSelector:(invocation ? [invocation selector] : 0)];
1674}
1675
1676+ (id)forwardingTargetForSelector:(SEL)sel {
1677    return nil;
1678}
1679
1680- (id)forwardingTargetForSelector:(SEL)sel {
1681    return nil;
1682}
1683
1684
1685// Replaced by CF (returns an NSString)
1686+ (NSString *)description {
1687    return nil;
1688}
1689
1690// Replaced by CF (returns an NSString)
1691- (NSString *)description {
1692    return nil;
1693}
1694
1695+ (NSString *)debugDescription {
1696    return [self description];
1697}
1698
1699- (NSString *)debugDescription {
1700    return [self description];
1701}
1702
1703
1704+ (id)new {
1705    return [[self alloc] init];
1706}
1707
1708+ (id)retain {
1709    return (id)self;
1710}
1711
1712// Replaced by ObjectAlloc
1713#pragma clang diagnostic push
1714#pragma clang diagnostic ignored "-Wmismatched-method-attributes"
1715- (id)retain
1716__attribute__((aligned(16)))
1717{
1718    if (((id)self)->isTaggedPointer()) return self;
1719
1720    SideTable *table = SideTable::tableForPointer(self);
1721
1722    if (spinlock_trylock(&table->slock)) {
1723        table->refcnts[DISGUISE(self)] += SIDE_TABLE_RC_ONE;
1724        spinlock_unlock(&table->slock);
1725        return self;
1726    }
1727    return _objc_rootRetain_slow(self, table);
1728}
1729#pragma clang diagnostic pop
1730
1731
1732+ (BOOL)_tryRetain {
1733    return YES;
1734}
1735
1736// Replaced by ObjectAlloc
1737- (BOOL)_tryRetain {
1738    return _objc_rootTryRetain(self);
1739}
1740
1741+ (BOOL)_isDeallocating {
1742    return NO;
1743}
1744
1745- (BOOL)_isDeallocating {
1746    return _objc_rootIsDeallocating(self);
1747}
1748
1749+ (BOOL)allowsWeakReference {
1750    return YES;
1751}
1752
1753+ (BOOL)retainWeakReference {
1754    return YES;
1755}
1756
1757- (BOOL)allowsWeakReference {
1758    return ! [self _isDeallocating];
1759}
1760
1761- (BOOL)retainWeakReference {
1762    return [self _tryRetain];
1763}
1764
1765+ (oneway void)release {
1766}
1767
1768// Replaced by ObjectAlloc
1769#pragma clang diagnostic push
1770#pragma clang diagnostic ignored "-Wmismatched-method-attributes"
1771- (oneway void)release
1772__attribute__((aligned(16)))
1773{
1774    // tagged pointer check is inside _objc_rootReleaseWasZero().
1775
1776    if (_objc_rootReleaseWasZero(self) == false) {
1777        return;
1778    }
1779    [self dealloc];
1780}
1781#pragma clang diagnostic pop
1782
1783+ (id)autorelease {
1784    return (id)self;
1785}
1786
1787// Replaced by ObjectAlloc
1788#pragma clang diagnostic push
1789#pragma clang diagnostic ignored "-Wmismatched-method-attributes"
1790- (id)autorelease
1791__attribute__((aligned(16)))
1792{
1793    // no tag check here: tagged pointers DO use fast autoreleasing
1794
1795#if SUPPORT_RETURN_AUTORELEASE
1796    assert(tls_get_direct(AUTORELEASE_POOL_RECLAIM_KEY) == nil);
1797
1798    if (callerAcceptsFastAutorelease(__builtin_return_address(0))) {
1799        tls_set_direct(AUTORELEASE_POOL_RECLAIM_KEY, self);
1800        return self;
1801    }
1802#endif
1803    return _objc_rootAutorelease2(self);
1804}
1805#pragma clang diagnostic pop
1806
1807+ (NSUInteger)retainCount {
1808    return ULONG_MAX;
1809}
1810
1811- (NSUInteger)retainCount {
1812    return _objc_rootRetainCount(self);
1813}
1814
1815+ (id)alloc {
1816    return _objc_rootAlloc(self);
1817}
1818
1819// Replaced by ObjectAlloc
1820+ (id)allocWithZone:(struct _NSZone *)zone {
1821    return _objc_rootAllocWithZone(self, (malloc_zone_t *)zone);
1822}
1823
1824// Replaced by CF (throws an NSException)
1825+ (id)init {
1826    return (id)self;
1827}
1828
1829- (id)init {
1830    return _objc_rootInit(self);
1831}
1832
1833// Replaced by CF (throws an NSException)
1834+ (void)dealloc {
1835}
1836
1837// Replaced by NSZombies
1838- (void)dealloc {
1839    _objc_rootDealloc(self);
1840}
1841
1842// Replaced by CF (throws an NSException)
1843+ (void)finalize {
1844}
1845
1846- (void)finalize {
1847    _objc_rootFinalize(self);
1848}
1849
1850+ (struct _NSZone *)zone {
1851    return (struct _NSZone *)_objc_rootZone(self);
1852}
1853
1854- (struct _NSZone *)zone {
1855    return (struct _NSZone *)_objc_rootZone(self);
1856}
1857
1858+ (id)copy {
1859    return (id)self;
1860}
1861
1862+ (id)copyWithZone:(struct _NSZone *)zone {
1863    return (id)self;
1864}
1865
1866- (id)copy {
1867    return [(id)self copyWithZone:nil];
1868}
1869
1870+ (id)mutableCopy {
1871    return (id)self;
1872}
1873
1874+ (id)mutableCopyWithZone:(struct _NSZone *)zone {
1875    return (id)self;
1876}
1877
1878- (id)mutableCopy {
1879    return [(id)self mutableCopyWithZone:nil];
1880}
1881
1882@end
1883
1884__attribute__((aligned(16)))
1885id
1886objc_retain(id obj)
1887{
1888    if (!obj || obj->isTaggedPointer()) {
1889        goto out_slow;
1890    }
1891#if __OBJC2__
1892    if (((Class)obj->isa)->hasCustomRR()) {
1893        return [obj retain];
1894    }
1895    return bypass_msgSend_retain(obj);
1896#else
1897    return [obj retain];
1898#endif
1899 out_slow:
1900    // clang really wants to reorder the "mov %rdi, %rax" early
1901    // force better code gen with a data barrier
1902    asm volatile("");
1903    return obj;
1904}
1905
1906__attribute__((aligned(16)))
1907void
1908objc_release(id obj)
1909{
1910    if (!obj || obj->isTaggedPointer()) {
1911        return;
1912    }
1913#if __OBJC2__
1914    if (((Class)obj->isa)->hasCustomRR()) {
1915        return (void)[obj release];
1916    }
1917    return bypass_msgSend_release(obj);
1918#else
1919    [obj release];
1920#endif
1921}
1922
1923__attribute__((aligned(16)))
1924id
1925objc_autorelease(id obj)
1926{
1927    if (!obj || obj->isTaggedPointer()) {
1928        goto out_slow;
1929    }
1930#if __OBJC2__
1931    if (((Class)obj->isa)->hasCustomRR()) {
1932        return [obj autorelease];
1933    }
1934    return bypass_msgSend_autorelease(obj);
1935#else
1936    return [obj autorelease];
1937#endif
1938 out_slow:
1939    // clang really wants to reorder the "mov %rdi, %rax" early
1940    // force better code gen with a data barrier
1941    asm volatile("");
1942    return obj;
1943}
1944
1945id
1946_objc_rootRetain(id obj)
1947{
1948    assert(obj);
1949    assert(!UseGC);
1950
1951    if (obj->isTaggedPointer()) return obj;
1952
1953    return bypass_msgSend_retain(obj);
1954}
1955
1956void
1957_objc_rootRelease(id obj)
1958{
1959    assert(obj);
1960    assert(!UseGC);
1961
1962    if (obj->isTaggedPointer()) return;
1963
1964    bypass_msgSend_release(obj);
1965}
1966
1967id
1968_objc_rootAutorelease(id obj)
1969{
1970    assert(obj); // root classes shouldn't get here, since objc_msgSend ignores nil
1971    // assert(!UseGC);
1972
1973    if (UseGC) {
1974        return obj;
1975    }
1976
1977    // no tag check here: tagged pointers DO use fast autoreleasing
1978
1979    return bypass_msgSend_autorelease(obj);
1980}
1981