/* * Copyright (c) 2010-2011 Apple Inc. All rights reserved. * * @APPLE_LICENSE_HEADER_START@ * * This file contains Original Code and/or Modifications of Original Code * as defined in and that are subject to the Apple Public Source License * Version 2.0 (the 'License'). You may not use this file except in * compliance with the License. Please obtain a copy of the License at * http://www.opensource.apple.com/apsl/ and read it before using this * file. * * The Original Code and all software distributed under the License are * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. * Please see the License for the specific language governing rights and * limitations under the License. * * @APPLE_LICENSE_HEADER_END@ */ #include "objc-private.h" #include "objc-weak.h" #include #include #include #include #define TABLE_SIZE(entry) (entry->mask ? entry->mask + 1 : 0) static void append_referrer(weak_entry_t *entry, id *new_referrer); /** * Unique hash function for object pointers only. * * @param key The object pointer * * @return Size unrestricted hash of pointer. */ static inline uintptr_t hash_pointer(id key) { uintptr_t k = (uintptr_t)key; return (k >> 4) ^ (k >> 9); } /** * Unique hash function for weak object pointers only. * * @param key The weak object pointer. * * @return Size unrestricted hash of pointer. */ static inline uintptr_t w_hash_pointer(id *key) { uintptr_t k = (uintptr_t)key; return (sizeof(size_t) == 8) ? (k >> 3) : (k >> 2); } /** * Grow the entry's hash table of referrers. Rehashes each * of the referrers. * * @param entry Weak pointer hash set for a particular object. */ __attribute__((noinline, used)) static void grow_refs_and_insert(weak_entry_t *entry, id *new_referrer) { assert(entry->out_of_line); size_t old_size = TABLE_SIZE(entry); size_t new_size = old_size ? old_size * 2 : 8; size_t num_refs = entry->num_refs; weak_referrer_t *old_refs = entry->referrers; entry->mask = new_size - 1; entry->referrers = (weak_referrer_t *) _calloc_internal(TABLE_SIZE(entry), sizeof(weak_referrer_t)); entry->num_refs = 0; entry->max_hash_displacement = 0; for (size_t i = 0; i < old_size && num_refs > 0; i++) { if (old_refs[i] != nil) { append_referrer(entry, old_refs[i]); num_refs--; } } // Insert append_referrer(entry, new_referrer); if (old_refs) _free_internal(old_refs); } /** * Add the given referrer to set of weak pointers in this entry. * Does not perform duplicate checking (b/c weak pointers are never * added to a set twice). * * @param entry The entry holding the set of weak pointers. * @param new_referrer The new weak pointer to be added. */ static void append_referrer(weak_entry_t *entry, id *new_referrer) { if (! entry->out_of_line) { // Try to insert inline. for (size_t i = 0; i < WEAK_INLINE_COUNT; i++) { if (entry->inline_referrers[i] == nil) { entry->inline_referrers[i] = new_referrer; return; } } // Couldn't insert inline. Allocate out of line. weak_referrer_t *new_referrers = (weak_referrer_t *) _calloc_internal(WEAK_INLINE_COUNT, sizeof(weak_referrer_t)); // This constructed table is invalid, but grow_refs_and_insert // will fix it and rehash it. for (size_t i = 0; i < WEAK_INLINE_COUNT; i++) { new_referrers[i] = entry->inline_referrers[i]; } entry->referrers = new_referrers; entry->num_refs = WEAK_INLINE_COUNT; entry->out_of_line = 1; entry->mask = WEAK_INLINE_COUNT-1; entry->max_hash_displacement = 0; } assert(entry->out_of_line); if (entry->num_refs >= TABLE_SIZE(entry) * 3/4) { return grow_refs_and_insert(entry, new_referrer); } size_t index = w_hash_pointer(new_referrer) & (entry->mask); size_t hash_displacement = 0; while (entry->referrers[index] != NULL) { index = (index+1) & entry->mask; hash_displacement++; } if (hash_displacement > entry->max_hash_displacement) { entry->max_hash_displacement = hash_displacement; } weak_referrer_t &ref = entry->referrers[index]; ref = new_referrer; entry->num_refs++; } /** * Remove old_referrer from set of referrers, if it's present. * Does not remove duplicates, because duplicates should not exist. * * @todo this is slow if old_referrer is not present. But, is this ever the case? * * @param entry The entry holding the referrers. * @param old_referrer The referrer to remove. */ static void remove_referrer(weak_entry_t *entry, id *old_referrer) { if (! entry->out_of_line) { for (size_t i = 0; i < WEAK_INLINE_COUNT; i++) { if (entry->inline_referrers[i] == old_referrer) { entry->inline_referrers[i] = nil; return; } } _objc_inform("attempted to remove unregistered weak referrer %p\n", old_referrer); } size_t index = w_hash_pointer(old_referrer) & (entry->mask); size_t hash_displacement = 0; while (entry->referrers[index] != old_referrer) { index = (index+1) & entry->mask; hash_displacement++; if (hash_displacement > entry->max_hash_displacement) { _objc_inform("attempted to remove unregistered weak referrer %p\n", old_referrer); return; } } entry->referrers[index] = nil; entry->num_refs--; } /** * Add new_entry to the object's table of weak references. * Does not check whether the referent is already in the table. */ static void weak_entry_insert(weak_table_t *weak_table, weak_entry_t *new_entry) { weak_entry_t *weak_entries = weak_table->weak_entries; assert(weak_entries != nil); size_t index = hash_pointer(new_entry->referent) & (weak_table->mask); size_t hash_displacement = 0; while (weak_entries[index].referent != nil) { index = (index+1) & weak_table->mask; hash_displacement++; } weak_entries[index] = *new_entry; weak_table->num_entries++; if (hash_displacement > weak_table->max_hash_displacement) { weak_table->max_hash_displacement = hash_displacement; } } static void weak_resize(weak_table_t *weak_table, size_t new_size) { size_t old_size = TABLE_SIZE(weak_table); weak_entry_t *old_entries = weak_table->weak_entries; weak_entry_t *new_entries = (weak_entry_t *) _calloc_internal(new_size, sizeof(weak_entry_t)); weak_table->mask = new_size - 1; weak_table->weak_entries = new_entries; weak_table->max_hash_displacement = 0; weak_table->num_entries = 0; // restored by weak_entry_insert below if (old_entries) { weak_entry_t *entry; weak_entry_t *end = old_entries + old_size; for (entry = old_entries; entry < end; entry++) { if (entry->referent) { weak_entry_insert(weak_table, entry); } } _free_internal(old_entries); } } // Grow the given zone's table of weak references if it is full. static void weak_grow_maybe(weak_table_t *weak_table) { size_t old_size = TABLE_SIZE(weak_table); // Grow if at least 3/4 full. if (weak_table->num_entries >= old_size * 3 / 4) { weak_resize(weak_table, old_size ? old_size*2 : 64); } } // Shrink the table if it is mostly empty. static void weak_compact_maybe(weak_table_t *weak_table) { size_t old_size = TABLE_SIZE(weak_table); // Shrink if larger than 1024 buckets and at most 1/16 full. if (old_size >= 1024 && old_size / 16 >= weak_table->num_entries) { weak_resize(weak_table, old_size / 8); // leaves new table no more than 1/2 full } } /** * Remove entry from the zone's table of weak references. */ static void weak_entry_remove(weak_table_t *weak_table, weak_entry_t *entry) { // remove entry if (entry->out_of_line) _free_internal(entry->referrers); bzero(entry, sizeof(*entry)); weak_table->num_entries--; weak_compact_maybe(weak_table); } /** * Return the weak reference table entry for the given referent. * If there is no entry for referent, return NULL. * Performs a lookup. * * @param weak_table * @param referent The object. Must not be nil. * * @return The table of weak referrers to this object. */ static weak_entry_t *weak_entry_for_referent(weak_table_t *weak_table, id referent) { assert(referent); weak_entry_t *weak_entries = weak_table->weak_entries; if (!weak_entries) return nil; size_t index = hash_pointer(referent) & weak_table->mask; size_t hash_displacement = 0; while (weak_table->weak_entries[index].referent != referent) { index = (index+1) & weak_table->mask; hash_displacement++; if (hash_displacement > weak_table->max_hash_displacement) { return nil; } } return &weak_table->weak_entries[index]; } /** * Unregister an already-registered weak reference. * This is used when referrer's storage is about to go away, but referent * isn't dead yet. (Otherwise, zeroing referrer later would be a * bad memory access.) * Does nothing if referent/referrer is not a currently active weak reference. * Does not zero referrer. * * FIXME currently requires old referent value to be passed in (lame) * FIXME unregistration should be automatic if referrer is collected * * @param weak_table The global weak table. * @param referent The object. * @param referrer The weak reference. */ void weak_unregister_no_lock(weak_table_t *weak_table, id referent, id *referrer) { weak_entry_t *entry; if (!referent) return; if ((entry = weak_entry_for_referent(weak_table, referent))) { remove_referrer(entry, referrer); bool empty = true; if (entry->out_of_line && entry->num_refs != 0) { empty = false; } else { for (size_t i = 0; i < WEAK_INLINE_COUNT; i++) { if (entry->inline_referrers[i]) { empty = false; break; } } } if (empty) { weak_entry_remove(weak_table, entry); } } // Do not set *referrer = nil. objc_storeWeak() requires that the // value not change. } /** * Registers a new (object, weak pointer) pair. Creates a new weak * object entry if it does not exist. * * @param weak_table The global weak table. * @param referent The object pointed to by the weak reference. * @param referrer The weak pointer address. */ id weak_register_no_lock(weak_table_t *weak_table, id referent, id *referrer) { if (referent && !referent->isTaggedPointer()) { // ensure that the referenced object is viable BOOL (*allowsWeakReference)(id, SEL) = (BOOL(*)(id, SEL)) object_getMethodImplementation(referent, @selector(allowsWeakReference)); if ((IMP)allowsWeakReference != _objc_msgForward) { if (! (*allowsWeakReference)(referent, @selector(allowsWeakReference))) { _objc_fatal("Cannot form weak reference to instance (%p) of class %s. It is possible that this object was over-released, or is in the process of deallocation.", (void*)referent, object_getClassName(referent)); } } else { return nil; } // now remember it and where it is being stored weak_entry_t *entry; if ((entry = weak_entry_for_referent(weak_table, referent))) { append_referrer(entry, referrer); } else { weak_entry_t new_entry; new_entry.referent = referent; new_entry.out_of_line = 0; new_entry.inline_referrers[0] = referrer; for (size_t i = 1; i < WEAK_INLINE_COUNT; i++) { new_entry.inline_referrers[i] = nil; } weak_grow_maybe(weak_table); weak_entry_insert(weak_table, &new_entry); } } // Do not set *referrer. objc_storeWeak() requires that the // value not change. return referent; } /** * Called by dealloc; nils out all weak pointers that point to the * provided object so that they can no longer be used. * * @param weak_table * @param referent The object being deallocated. */ void weak_clear_no_lock(weak_table_t *weak_table, id referent) { weak_entry_t *entry = weak_entry_for_referent(weak_table, referent); if (entry == nil) { /// XXX shouldn't happen, but does with mismatched CF/objc //printf("XXX no entry for clear deallocating %p\n", referent); return; } // zero out references weak_referrer_t *referrers; size_t count; if (entry->out_of_line) { referrers = entry->referrers; count = TABLE_SIZE(entry); } else { referrers = entry->inline_referrers; count = WEAK_INLINE_COUNT; } for (size_t i = 0; i < count; ++i) { id *referrer = referrers[i]; if (referrer) { if (*referrer == referent) { *referrer = nil; } else if (*referrer) { _objc_inform("__weak variable @ %p holds %p instead of %p\n", referrer, (void*)*referrer, (void*)referent); } } } weak_entry_remove(weak_table, entry); } /** * This function gets called when the value of a weak pointer is being * used in an expression. Called by objc_loadWeakRetained() which is * ultimately called by objc_loadWeak(). The objective is to assert that * there is in fact a weak pointer(s) entry for this particular object being * stored in the weak-table, and to retain that object so it is not deallocated * during the weak pointer's usage. * * @param weak_table * @param referrer The weak pointer address. */ id weak_read_no_lock(weak_table_t *weak_table, id *referrer) { id referent = *referrer; if (referent->isTaggedPointer()) return referent; weak_entry_t *entry; if (referent == nil || !(entry = weak_entry_for_referent(weak_table, referent))) { *referrer = nil; return nil; } BOOL (*tryRetain)(id, SEL) = (BOOL(*)(id, SEL)) object_getMethodImplementation(referent, @selector(retainWeakReference)); if ((IMP)tryRetain == _objc_msgForward) { *referrer = nil; return nil; } if (! (*tryRetain)(referent, @selector(retainWeakReference))) { return nil; } return referent; }