Weak keyword

Weak is a very common keyword in ios development, which can be used in the following scenarios:

2. Use __weak to create a weak reference pointerCopy the code

Weak is mainly used for memory management. After the object pointed to by the weak pointer WPTR is released, the system automatically sets the WPTR to nil to prevent the exception of the wild pointer caused by other codes accessing the WPTR. Today mainly follow the source code to explore how the system is to achieve this mechanism.

Code sample

The code is as follows:

int main(int argc, const char * argv[]) {
    @autoreleasepool {

        DemoObject *object1 = [DemoObject new];
        DemoObject *object2 = [DemoObject new];

        __weak id wObject = object1;
        wObject = object2;
    }
    return 0;
}
Copy the code

The main call flow and corresponding relationship are as follows:

  1. objc_initWeak —> __weak id wObject = object1
  2. objc_storeWeak —> wObject = object2
  3. objc_destroyWeak–> Out of scopeWObject pointerNeed to be recycled
  4. objc_storeStrong–> Out of scopeObject1, object2 PointersIt needs to be reclaimed, called twice

The source code to track

Let’s follow the libobJC source code to explore the internal implementation of Weak. But before we do that, we need to understand a few structures to make it easier to read the source code

Several structure

StripedMap

// RefcountMap disguises its pointers because we // don't want the table to act as a root for `leaks`. typedef objc::DenseMap<DisguisedPtr<objc_object>,size_t,RefcountMapValuePurgeable> RefcountMap; // StripedMap<T> is a map of void* -> T, sized appropriately // for cache-friendly lock striping. // For example, this may be used as StripedMap<spinlock_t> // or as StripedMap<SomeStruct> where SomeStruct stores a spin lock. template<typename T> class StripedMap { #if TARGET_OS_IPHONE && ! TARGET_OS_SIMULATOR enum { StripeCount = 8 }; #else enum { StripeCount = 64 }; #endif ... struct PaddedT { T value alignas(CacheLineSize); }; PaddedT array[StripeCount]; Uintptr_t addr = reinterpret_cast<uintptr_t>(p); return ((addr >> 4) ^ (addr >> 9)) % StripeCount; } T& operator[] (const void *p) {return array[indexForPointer(p)]. Value; }... }Copy the code

StripedMap is a hash table that contains an array of size 8 in ios. Each element of the array is a structure PaddedT, which contains a value variable of type T. In this example, it is simply a hash table that stores the SideTable. The capacity is 8

SideTable

// RefcountMap disguises its pointers because we // don't want the table to act as a root for `leaks`. typedef objc::DenseMap<DisguisedPtr<objc_object>,size_t,RefcountMapValuePurgeable> RefcountMap; // Template parameters. enum HaveOld { DontHaveOld = false, DoHaveOld = true }; enum HaveNew { DontHaveNew = false, DoHaveNew = true }; struct SideTable { spinlock_t slock; / / lock RefcountMap refcnts; Weak_table_t weak_table; SideTable() {memset(&weak_table, 0, sizeof(weak_table)); SideTable() {memset(&weak_table, 0, sizeof(weak_table)); } ~SideTable() { _objc_fatal("Do not delete SideTable."); } void lock() { slock.lock(); } void unlock() { slock.unlock(); } void forceReset() { slock.forceReset(); }... };Copy the code

SideTable has three variables:

  • The mutexslock
  • Reference counter tablerefcnts
  • A weak reference tableweak_table

weak_table_t

/** * The global weak references table. Stores object ids as keys, * and weak_entry_t structs as their values. */ struct weak_table_t { weak_entry_t *weak_entries; // entry array size_t num_entries; // Uintptr_t mask; // 1 uintptr_t max_hash_displacement; };Copy the code

weak_entry_t

typedef DisguisedPtr<objc_object *> weak_referrer_t; // is a pointer to an object. #if __LP64__ #define PTR_MINUS_2 62 // The bits of num_refs #else #define PTR_MINUS_2 30 #endif #define WEAK_INLINE_COUNT 4 // Inline_referrers array size #define REFERRERS_OUT_OF_LINE 2 // Whether to store weak reference pointer struct with array or referrers  weak_entry_t { DisguisedPtr<objc_object> referent; Struct {weak_referrer_t *referrers; // Uintptr_t out_of_line_ness: 2; uintptr_t num_refs : PTR_MINUS_2; uintptr_t mask; uintptr_t max_hash_displacement; }; struct { // out_of_line_ness field is low bits of inline_referrers[1] weak_referrer_t inline_referrers[WEAK_INLINE_COUNT]; / / array}; }; bool out_of_line() { return (out_of_line_ness == REFERRERS_OUT_OF_LINE); } weak_entry_t& operator=(const weak_entry_t& other) { memcpy(this, &other, sizeof(other)); return *this; Weak_entry_t (objC_object *newReferent, objC_object **newReferrer) : referent(newReferent) { inline_referrers[0] = newReferrer; for (int i = 1; i < WEAK_INLINE_COUNT; i++) { inline_referrers[i] = nil; }}};Copy the code

The correlation between these structures is shown as follows:With a general understanding of the above structures, we can proceed to the next step by clicking directly on the interrupt point in the image aboveStep intoWill go right intoobjc_initWeakFunction.

objc_initWeak

// Initialize a fresh weak pointer to some object location. id objc_initWeak(id *location, id newObj) { if (! newObj) { *location = nil; return nil; } return storeWeak<DontHaveOld, DoHaveNew, DoCrashIfDeallocating> (location, (objc_object*)newObj); }Copy the code

__weak ID wObject = object1

Notice that the storeWeak template parameter is:

  • DontHaveOldThere is no old value
  • DoHaveNewThere is a new value

I’m going to skip storeWeak, so let’s look at the objc_storeWeak function

objc_storeWeak

// This function stores a new value into a __weak variable. It would
 * be used anywhere a __weak variable is the target of an assignment.
id
objc_storeWeak(id *location, id newObj)
{
    return storeWeak<DoHaveOld, DoHaveNew, DoCrashIfDeallocating>
        (location, (objc_object *)newObj);
}
Copy the code

WObject = object2;

Notice that the storeWeak template parameter is:

  • DoHaveOldThe old value
  • DoHaveNewThere is a new value

As you can see from the above code, the init/store methods are called on initialization and on reassignment, and both end up calling storeWeak. Obviously, the storeWeak function is the focus of our research.

storeWeak

// Update a weak variable. // If HaveOld is true, the variable has an existing value // that needs to be cleaned up. This value might be nil. // If HaveNew is true, there is a new value that needs to be // assigned into the variable. This value might be nil. // If CrashIfDeallocating is true, the process is halted if newObj is // deallocating or newObj's class does not support weak references. // If CrashIfDeallocating is false, nil is stored instead. enum CrashIfDeallocating { DontCrashIfDeallocating = false, DoCrashIfDeallocating = true }; template <HaveOld haveOld, HaveNew haveNew, CrashIfDeallocating crashIfDeallocating> static id storeWeak(id *location, objc_object *newObj) { ASSERT(haveOld || haveNew); if (! haveNew) ASSERT(newObj == nil); / / declare variables need Class previouslyInitializedClass = nil; id oldObj; // weak pointer to the old value SideTable *oldTable; Sidetable sideTable *newTable; // Acquire locks for old and new values. // Order by lock address to prevent lock ordering problems. // Retry if the old value changes underneath us. retry: if (haveOld) { oldObj = *location; Sidetable oldTable = &sidetables ()[oldObj]; sidetable oldTable = &sidetables ()[oldObj]; } else { oldTable = nil; } if (haveNew) { newTable = &SideTables()[newObj]; Sidetable} else {newTable = nil; SideTable::lockTwo<haveOld, haveNew>(oldTable, newTable); If (haveOld && *location!) {if (haveOld && *location! = oldObj) { SideTable::unlockTwo<haveOld, haveNew>(oldTable, newTable); goto retry; } // Prevent a deadlock between the weak reference machinery // and the +initialize machinery by ensuring that no // Weakly referenced object has an UN -+ Initialized isa. // The logic here is to prevent logic related to weak references from the initialize method, as recursive calls can occur before initialize has finished executing, To break through previouslyInitializedClass recursion if (haveNew && newObj) {Class CLS = newObj - > getIsa (); if (cls ! = previouslyInitializedClass && ! ((objc_class *)cls)->isInitialized()) { SideTable::unlockTwo<haveOld, haveNew>(oldTable, newTable); class_initialize(cls, (id)newObj); // If this class is finished with +initialize then we're good. // If this class is still running +initialize on this thread // (i.e. +initialize called storeWeak on an instance of itself) // then we may proceed but it will appear initializing and // not yet initialized to the check above. // Instead set previouslyInitializedClass to recognize it on  retry. previouslyInitializedClass = cls; goto retry; } } // Clean up old value, Weak_unregister_no_lock (&oldTable-> Weak_table, oldObj, location); weak_unregister_no_lock(&oldTable->weak_table, oldObj, location); weak_unregister_no_lock(&oldTable->weak_table, oldObj, location); } // Assign new value, If any. if (haveNew) {// Register the current weak reference pointer in weak_table where the new value is weak_table. weak_register_no_lock(&newTable->weak_table, (id)newObj, location, crashIfDeallocating); // weak_register_no_lock returns nil if weak store should be rejected // Set is-weakly-referenced bit in refcount table.  if (newObj && ! NewObj ->setWeaklyReferenced_nolock(); newObj->isTaggedPointer(); newObj->isTaggedPointer(); } // Do not set *location anywhere else. That would introduce a race. *location = (id)newObj; } else { // No new value. The storage is not changed. } SideTable::unlockTwo<haveOld, haveNew>(oldTable, newTable); return (id)newObj; }Copy the code

There are many storeWeak codes, which can be roughly divided into the following three steps:

  1. Retrieves the new and old values from the hash tablesidetable
  2. weak_unregister_no_lockClear old values insidetableThe corresponding information in
  3. weak_register_no_lockIn the new value corresponding tosidetableTo register the corresponding information and update the new valueisa.

Weak_unregister_no_lock Unregisters the weak pointer

void weak_unregister_no_lock(weak_table_t *weak_table, id referent_id, Id *referrer_id) {objc_object *referent = (objc_object *)referent_id; Objc_object **referrer = (objc_object **)referrer_id; weak_entry_t *entry; if (! referent) return; If ((entry = weak_entry_for_referent(weak_table, referent))) {// Remove the pointer inside the corresponding entry, The weak pointer is removed from the current object. The weak pointer may point to other objects. remove_referrer(entry, referrer); bool empty = true; If (entry->out_of_line() && entry->num_refs! = 0) { empty = false; } else { for (size_t i = 0; i < WEAK_INLINE_COUNT; i++) { if (entry->inline_referrers[i]) { empty = false; break; Weak_entry_remove (weak_table, entry); weak_entry_remove(weak_table, entry); weak_entry_remove(weak_table, entry); } } // Do not set *referrer = nil. objc_storeWeak() requires that the // value not change. }Copy the code

After combing through the logic, the process is clear:

  1. inweak_tableTo look forreferent_id(object)The correspondingentry
  2. fromentryRemove theReferrer (pointer to weak)
  3. entryHandles removal when nullentryThe logic of the

Weak_register_no_lock Registers the weak pointer

id weak_register_no_lock(weak_table_t *weak_table, id referent_id, id *referrer_id, bool crashIfDeallocating) { // object objc_object *referent = (objc_object *)referent_id; Objc_object **referrer = (objc_object **)referrer_id; if (! referent || referent->isTaggedPointer()) return referent_id; // ensure that the referenced object is viable bool deallocating; if (! referent->ISA()->hasCustomRR()) { deallocating = referent->rootIsDeallocating(); } else { BOOL (*allowsWeakReference)(objc_object *, SEL) = (BOOL(*)(objc_object *, SEL)) object_getMethodImplementation((id)referent, @selector(allowsWeakReference)); if ((IMP)allowsWeakReference == _objc_msgForward) { return nil; } deallocating = ! (*allowsWeakReference)(referent, @selector(allowsWeakReference)); } if (deallocating) { if (crashIfDeallocating) { _objc_fatal("Cannot form weak reference to instance (%p) of " "class %s. It is possible that this object was " "over-released, or is in the process of deallocation.", (void*)referent, object_getClassName((id)referent)); } else { return nil; } } // now remember it and where it is being stored weak_entry_t *entry; Weak_entry_t */ if ((entry = weak_entry_for_referent(weak_table, referent))) {// Insert address into the entry array, This involves changing the internal structure of the entry append_referrer(entry, referrer); Weak_entry_t new_entry(referent, referrer);} else {// No entry was found. /** (object, pointer) // After adding weak_table may have capacity change weak_grow_maybe(weak_table); Weak_entry_insert (weak_table, &new_entry); // Entry inserted into weak_table. } // Do not set *referrer. objc_storeWeak() requires that the // value not change. return referent_id; }Copy the code

Similar to deregistration, after finding the corresponding entry through object, the address of weak reference pointer is stored in the corresponding entry. If no entry is found, a new entry needs to be created and inserted into weak_table.

objc_destroyWeak

In the previous code, when the scope ends, we need to disassociate the weak pointer from pointing to the object and call the objc_destoryWeak method.

void
objc_destroyWeak(id *location)
{
    (void)storeWeak<DoHaveOld, DontHaveNew, DontCrashIfDeallocating>
        (location, nil);
}
Copy the code

The storeWeak function is essentially called, but the arguments passed in are DoHaveOld, DontHaveNew, so the register operation is performed.

Weak pointer is automatically set to nil

Since weak does not increase the reference count of object, object will also be released when the weak pointer is pointed to. When the object pointed to by the weak pointer is released, the system will automatically point the weak pointer to nil to prevent the occurrence of wild pointer exceptions. We start with the dealloc method. Let’s see how that works

release

deallocHit the breakpoint, look at the call stack, and you can see when ourobject1When out of scope, the system callsobjc_storeStrong(id *location, id obj)The method ofOne of thelocationRecord theobject1Address of pointer,objThe value ofnilAnd theprevIs a global variableglobalObjectThe address. In this situation,objc_storeStrongIt’s mostly triggeredobjc_releasenamelyobject1thereleaseOperation.

By the way, here’s what follows, just for the moment:

  1. throughobject1theisa_t, to one of themextra_rcThat is, reference counting goes on--operation
  2. extra_rcWhen it overflows, it will judge isa_thas_sidetable_rcIdentifier found not usedsidetableTo store additional reference counts.
  3. At this time to enterobject1thedeallocProcess, which is executed through message sendingdeallocprocess

dealloc

A disassembly implementation of the dealloc function is shown below:Can be found in deallocThe last call to thesuperThe implementation of the.

- (void)dealloc { _objc_rootDealloc(self); } void _objc_rootDealloc(id obj) { ASSERT(obj); obj->rootDealloc(); } inline void objc_object::rootDealloc() { if (isTaggedPointer()) return; // fixme necessary? if (fastpath(isa.nonpointer && ! isa.weakly_referenced && ! isa.has_assoc && ! isa.has_cxx_dtor && ! isa.has_sidetable_rc)) { assert(! sidetable_present()); free(this); } else { object_dispose((id)this); }}Copy the code

You can see that a judgment is made in the rootDealloc method:

* Not nonpointer * Weakly_referenced: weak reference * has_assoc: associated object * has_cxx_dtor: CXX destructor * has_sidetable_rc: The sideTable is used to store additional reference countsCopy the code

If any of the above conditions is matched, the object_dispose function is executed, otherwise the memory space of the current object is directly released. Among the five conditions, one of them is weakly_referenced.

id object_dispose(id obj) { if (! obj) return nil; objc_destructInstance(obj); free(obj); return nil; } void *objc_destructInstance(id obj) { if (obj) { // Read all of the flags at once for performance. bool cxx = obj->hasCxxDtor(); bool assoc = obj->hasAssociatedObjects(); // This order is important. if (cxx) object_cxxDestruct(obj); If (assoc) _object_remove_assocations(obj); // Remove the associated object obj->clearDeallocating(); } return obj; } inline void objc_object::clearDeallocating() { if (slowpath(! isa.nonpointer)) { // Slow path for raw pointer isa. sidetable_clearDeallocating(); } else if (slowpath(isa.weakly_referenced || isa.has_sidetable_rc)) { // Slow path for non-pointer isa with weak refs // Handle non-pointer isa clearDeallocating_slow() with weak references or with sideTable to store reference counts; } assert(! sidetable_present()); } NEVER_INLINE void objc_object::clearDeallocating_slow() { ASSERT(isa.nonpointer && (isa.weakly_referenced || isa.has_sidetable_rc)); SideTable& table = SideTables()[this]; table.lock(); If (ISa.weakly_referenced) {// Clear weak_no_lock (&table. Weak_table, (id)this); } if (isa.has_sidetable_rc) {table.refcnt.erase (this); } table.unlock(); }Copy the code

weak_clear_no_lock

It takes a long time to get to the part related to the weak pointer.

void weak_clear_no_lock(weak_table_t *weak_table, id referent_id) { objc_object *referent = (objc_object *)referent_id; Weak_entry_t *entry = Weak_entry_for_referent (weak_table, referent); if (entry == nil) { /// XXX shouldn't happen, but does with mismatched CF/objc //printf("XXX no entry for clear deallocating %p\n", referent); return; } // zero out references weak_referrer_t *referrers; size_t count; If (entry->out_of_line()) {// If in a non-inline way, count = mask+1 referrers = entry->referrers; count = TABLE_SIZE(entry); } else {// inline count = 4 referrers = entry->inline_referrers; count = WEAK_INLINE_COUNT; } for (size_t i = 0; i < count; ++i) { objc_object **referrer = referrers[i]; If (*referrer == referent) {*referrer = nil; } else if (*referrer) { _objc_inform("__weak variable at %p holds %p instead of %p. " "This is probably incorrect use of  " "objc_storeWeak() and objc_loadWeak(). " "Break on objc_weak_error to debug.\n", referrer, (void*)*referrer, (void*)referent); objc_weak_error(); Weak_entry_remove (weak_table, entry); weak_entry_remove(weak_table, entry); }Copy the code

So far, we’ve explained how the weak pointer automatically sets nil when the object it points to is released.

Some specific implementation of weak_entry_t and Weak_table operations

weak_entry_for_referent

Find the corresponding entry of object in weak_table

static weak_entry_t * weak_entry_for_referent(weak_table_t *weak_table, objc_object *referent) { ASSERT(referent); weak_entry_t *weak_entries = weak_table->weak_entries; if (! weak_entries) return nil; Size_t begin = hash_pointer(referent) & Weak_table ->mask; // Use hash function to get object position in Weak_entries. size_t index = begin; size_t hash_displacement = 0; Weak_table -> Weak_entries [index].referent! = referent) { index = (index+1) & weak_table->mask; if (index == begin) bad_weak_table(weak_table->weak_entries); hash_displacement++; if (hash_displacement > weak_table->max_hash_displacement) { return nil; } } return &weak_table->weak_entries[index]; }Copy the code

append_referrer

Puts the pointer address into the corresponding entry

static void append_referrer(weak_entry_t *entry, objc_object **new_referrer) { if (! Entry ->out_of_line()) {// Try to insert inline. // Insert for (size_t I = 0; i < WEAK_INLINE_COUNT; i++) { if (entry->inline_referrers[i] == nil) { entry->inline_referrers[i] = new_referrer; return; }} // Inline is already full, Weak_referrer_t *new_referrers = (weak_referrer_t *) weak_referrers = (weak_referrer_t *) calloc(WEAK_INLINE_COUNT, sizeof(weak_referrer_t)); // This constructed table is invalid, But grow_refs_and_insert // will fix it and rehash it. Because new_referrers will be expanded later... for (size_t i = 0; i < WEAK_INLINE_COUNT; i++) { new_referrers[i] = entry->inline_referrers[i]; } entry->referrers = new_referrers; entry->num_refs = WEAK_INLINE_COUNT; entry->out_of_line_ness = REFERRERS_OUT_OF_LINE; entry->mask = WEAK_INLINE_COUNT-1; entry->max_hash_displacement = 0; } ASSERT(entry->out_of_line()); // Store more entries than 3/4 of the capacity, If (entry->num_refs >= TABLE_SIZE(entry) * 3/4) {return grow_refs_and_insert(entry, new_referrer); Size_t begin = w_hash_pointer(new_referrer) & (entry->mask); size_t index = begin; size_t hash_displacement = 0; while (entry->referrers[index] ! = nil) { hash_displacement++; index = (index+1) & entry->mask; if (index == begin) bad_weak_table(entry); } if (hash_displacement > entry->max_hash_displacement) {// Store the number of steps taken to find an empty space at the last inserted address hash_displacement; } weak_referrer_t &ref = entry->referrers[index]; ref = new_referrer; entry->num_refs++; }Copy the code

grow_refs_and_insert

Entry Expands and inserts the address of a weak reference pointer

__attribute__((noinline, used)) static void grow_refs_and_insert(weak_entry_t *entry, objc_object **new_referrer) { ASSERT(entry->out_of_line()); size_t old_size = TABLE_SIZE(entry); Size_t new_size = old_size? old_size * 2 : 8; size_t num_refs = entry->num_refs; weak_referrer_t *old_refs = entry->referrers; entry->mask = new_size - 1; entry->referrers = (weak_referrer_t *) calloc(TABLE_SIZE(entry), sizeof(weak_referrer_t)); entry->num_refs = 0; entry->max_hash_displacement = 0; For (size_t I = 0; i < old_size && num_refs > 0; i++) { if (old_refs[i] ! = nil) { append_referrer(entry, old_refs[i]); num_refs--; } // Insert the new address append_referrer(entry, new_referrer); // Free old entry if (old_refs) free(old_refs); }Copy the code

remove_referrer

static void remove_referrer(weak_entry_t *entry, objc_object **old_referrer) { if (! Entry ->out_of_line()) {// Inline only needs to remove the corresponding address for (size_t I = 0; i < WEAK_INLINE_COUNT; i++) { if (entry->inline_referrers[i] == old_referrer) { entry->inline_referrers[i] = nil; return; } } _objc_inform("Attempted to unregister unknown __weak variable " "at %p. This is probably incorrect use of " "objc_storeWeak() and objc_loadWeak(). " "Break on objc_weak_error to debug.\n", old_referrer); objc_weak_error(); return; } size_t begin = w_hash_pointer(old_referrer) & (entry->mask); size_t index = begin; size_t hash_displacement = 0; while (entry->referrers[index] ! = old_referrer) { index = (index+1) & entry->mask; if (index == begin) bad_weak_table(entry); hash_displacement++; // Exceeding the maximum number of steps indicates an exception, since max_hash_displacement currently records the maximum number of steps, which should be an optimization. if (hash_displacement > entry->max_hash_displacement) { _objc_inform("Attempted to unregister unknown __weak variable " "at %p. This is probably incorrect use of " "objc_storeWeak() and objc_loadWeak(). " "Break on objc_weak_error to debug.\n", old_referrer); objc_weak_error(); return; } // Referrers [index] = nil; referrers[index] = nil; entry->num_refs--; }Copy the code

weak_entry_remove

Remove the entry

static void weak_entry_remove(weak_table_t *weak_table, weak_entry_t *entry)
{
    // remove entry
    if (entry->out_of_line()) free(entry->referrers);
    bzero(entry, sizeof(*entry));

    weak_table->num_entries--;

    weak_compact_maybe(weak_table);
}

Copy the code

weak_compact_maybe

The size of weak_table is too large, but the number of internal entries is too small, so the capacity needs to be reduced

// Shrink the table if it is mostly empty.
static void weak_compact_maybe(weak_table_t *weak_table)
{
    size_t old_size = TABLE_SIZE(weak_table);

    // Shrink if larger than 1024 buckets and at most 1/16 full.
    if (old_size >= 1024  && old_size / 16 >= weak_table->num_entries) {
        weak_resize(weak_table, old_size / 8);
        // leaves new table no more than 1/2 full
    }
}
Copy the code

weak_grow_maybe

Check whether the Weak_table needs to be expanded

static void weak_grow_maybe(weak_table_t *weak_table) { size_t old_size = TABLE_SIZE(weak_table); Weak_table ->num_entries >= old_size * 3/4) {weak_table->num_entries >= old_size * 3/4) {weak_table->num_entries >= old_size * 3/4) { weak_resize(weak_table, old_size ? old_size*2 : 64); }}Copy the code

weak_resize

static void weak_resize(weak_table_t *weak_table, size_t new_size) { size_t old_size = TABLE_SIZE(weak_table); weak_entry_t *old_entries = weak_table->weak_entries; weak_entry_t *new_entries = (weak_entry_t *) calloc(new_size, sizeof(weak_entry_t)); weak_table->mask = new_size - 1; weak_table->weak_entries = new_entries; weak_table->max_hash_displacement = 0; weak_table->num_entries = 0; // restored by weak_entry_insert below if (old_entries) { weak_entry_t *entry; Weak_entry_t *end = old_entries + old_size; weak_entry_t *end = old_entries + old_size; For (entry = old_entries; entry < end; entry++) { if (entry->referent) { weak_entry_insert(weak_table, entry); } } free(old_entries); }}Copy the code