In development, we often use the weak keyword, which is used to declare weak reference attributes and weak reference Pointers
@property (nonatomic, weak) id<Delegate>delegate
__weak typeof(self) weakSelf = self;
Copy the code
- If there is a strong reference pointer to an object when it is freed, the object will not be freed, and memory leaks will occur if two objects hold each other.
- If there is a weak reference pointer to an object during object release, the system will set the weak pointer to null before releasing the object, preventing memory leakage
Today we explore the underlying implementation of weak
int main(int argc, char * argv[]) { @autoreleasepool { JPerson *person1 = [JPerson new]; JPerson *person2 = [JPerson new]; __weak id wPerson = person1; WPerson = person2; } return 0; }Copy the code
objc_initWeak
Declare a weak pointer, corresponding to__weak id wPerson = person1
objc_storeWeak
Assign to a weak pointer, corresponding towPerson = person2
objc_destroyWeak
Out of scope if the pointer is releasedobjc_storeStrong
Out of scope strong pointer release, there are two so called twice
Open objC4-781.2 source code search objc_initWeak, objc_initWeak method internal call storeWeak, storeWeak is involved in the SideTables() structure
static objc::ExplicitInit<StripedMap<SideTable>> SideTablesMap;
static StripedMap<SideTable>& SideTables() {
return SideTablesMap.get();
}
Copy the code
The data structure
StripedMap
enum { CacheLineSize = 64 }; // StripedMap<T> is a map of void* -> T, Sized // StripedMap<T> structure is a void* pointer to T type // for cache-friendly lock striping. // for example, this may be used as StripedMap<spinlock_t> // or as StripedMap<SomeStruct> where SomeStruct stores a spin lock. // <typename T> class StripedMap {#if TARGET_OS_IPHONE &&! TARGET_OS_SIMULATOR enum { StripeCount = 8 }; #else enum {StripeCount = 64}; //iPhone emulator or other #endif //PaddedT struct PaddedT {T value alignas(CacheLineSize); }; // PaddedT array, length is StripeCount PaddedT array[StripeCount]; // StripedMap is a hash table, here is the hash algorithm, Static unsigned int indexForPointer(const void *p) {uintptr_t addr = reinterpret_cast<uintptr_t>(p); return ((addr >> 4) ^ (addr >> 9)) % StripeCount; } public: // T& operator[] (const void *p) {return array[indexForPointer(p)].value; } const T& operator[] (const void *p) const { return const_cast<StripedMap<T>>(this)[p]; }... }Copy the code
StripedMap
is a fixed-length hash table, in which the structure PaddedT is stored. PaddedT stores the value of normality T. Here, the specific type of normality T is SideTable
SideTable
// Template parameters. enum HaveOld { DontHaveOld = false, DoHaveOld = true }; enum HaveNew { DontHaveNew = false, DoHaveNew = true }; struct SideTable { spinlock_t slock; // There is a spinlock_t RefcountMap refcnts; Weak_table_t weak_table; SideTable() {memset(&weak_table, 0, sizeof(weak_table)); // Destructor ~SideTable() {_objc_fatal("Do not delete SideTable."); } void lock() { slock.lock(); } void unlock() { slock.unlock(); } void forceReset() { slock.forceReset(); } // Address-ordered lock discipline for a pair of side tables. template<HaveOld, HaveNew> static void lockTwo(SideTable *lock1, SideTable *lock2); template<HaveOld, HaveNew> static void unlockTwo(SideTable *lock1, SideTable *lock2); };Copy the code
weak_table_t
/** * The global weak references table. Stores object ids as keys, * and weak_entry_t structs as their values. Weak_entry_t is value */ struct Weak_table_t {weak_entry_t *weak_entries; // Store weak_entry_t array size_t num_entries; // Current store weak_entry_t number of uintptr_t mask; // Uintptr_t max_hash_displacement; };Copy the code
weak_entry_t
There are a lot of comments here, read patiently, from the comments inside can often get very useful information
/* The weak table is a hash table conversation with a single spin lock. Weak table is a hash table conversation with a single spin lock. Allocated blob of memory by a single splock, most often An object, but under GC any such allocation, may have its address stored in a __weak marked storage location through use of compiler generated write-barriers or hand coded uses of the register weak primitive. Associated with the registration can be a callback block for the case when one of the allocated chunks of memory is reclaimed. The table is hashed on the address of the allocated memory. When __weak marked memory changes its reference, we count on the fact that we can still see its previous reference. So, in the hash table, indexed by the weakly referenced item, is a list of all locations where this address is currently being stored. For ARC, we also keep track of whether an arbitrary object is being deallocated by briefly placing it in the table just prior to invoking dealloc, and removing it via objc_clear_deallocating just prior to memory reclamation. */ // The address of a __weak variable. // These pointers are stored disguised so memory analysis tools // don't see lots of interior pointers from the weak table // The reason why DisguisedPtr is used to disguise the pointer is to prevent the memory detection tool from mistaking the memory leak typedef DisguisedPtr<objc_object *> Weak_REFERrer_t; #if __LP64__ #define PTR_MINUS_2 62 #else #define PTR_MINUS_2 30 #endif /** * The internal structure stored in the weak references table. * It maintains and stores * a hash set of weak references pointing to an object. * If out_of_line_ness ! Inline_referrers [4] = REFERRERS_OUT_OF_LINE then the set is instead a small inline array. When */ #define WEAK_INLINE_COUNT 4 // out_of_line_ness field overlaps with the low two bits of inline_referrers[1] inline_referrers[1] is a DisguisedPtr of a pointer-aligned address. // The low two bits of a pointer-aligned DisguisedPtr will always be 0b00 // (disguised nil or 0x80.. 00) or 0b11 (any other address). // Therefore out_of_line_ness == 0b10 is used to mark the out-of-line state. #define REFERRERS_OUT_OF_LINE 2 struct Weak_entry_t {// Object address DisguisedPtr<objc_object> Referent; // Inline_referrers and inline_referrers are mutually unique, default use inline_referrers[4] store union {struct {weak_referrer_t *referrers; uintptr_t out_of_line_ness : 2; uintptr_t num_refs : PTR_MINUS_2; uintptr_t mask; uintptr_t max_hash_displacement; }; struct { // out_of_line_ness field is low bits of inline_referrers[1] weak_referrer_t inline_referrers[WEAK_INLINE_COUNT]; }; }; bool out_of_line() { return (out_of_line_ness == REFERRERS_OUT_OF_LINE); } weak_entry_t& operator=(const weak_entry_t& other) { memcpy(this, &other, sizeof(other)); return *this; } weak_entry_t(objc_object *newReferent, objc_object **newReferrer) : referent(newReferent) { inline_referrers[0] = newReferrer; for (int i = 1; i < WEAK_INLINE_COUNT; i++) { inline_referrers[i] = nil; }}};Copy the code
objc_initWeak
/** * Initialize a fresh weak pointer to some object location. * * (The nil case) * __weak id weakPtr; An uninitialized weak reference pointer * (The non-nil case) * NSObject *o =... ; * __weak id weakPtr = o; * * This function IS NOT thread-safe with respect to concurrent * modifications to the weak variable. (Concurrent weak clear is safe.) * This method is not thread-safe for Concurrent changes * * @param location Address of __weak PTR. Location is the Address of the weak reference pointer * @param newObj Object PTR. NewObj is the Object pointer */ id objc_initWeak(id *location, id newObj) {if (! newObj) { *location = nil; return nil; Return storeWeak<DontHaveOld, DoHaveNew, DoCrashIfDeallocating> (location, (objc_object*)newObj); }Copy the code
objc_storeWeak
/** * This function stores a new value into a __weak variable. It would * be used anywhere a __weak variable is the * * @param location The address of The weak pointer itself * * This method is used to reassign an existing weak reference pointer * @param newObj The new object this weak PTR should now point to * * newObj is The pointer to The new object * * @return \e *newObj* */ id objc_storeWeak(id *location, Id newObj) {return storeWeak<DoHaveOld, DoHaveNew, DoCrashIfDeallocating> (location, (objc_object *)newObj); }Copy the code
objc_destroyWeak
/** * Destroys the relationship between a weak pointer * and the object it is referencing in the internal weak * table. If the weak pointer is not referencing anything, * There is no need to edit the weak table. * There is no need to edit the weak table. * There is no need to edit the weak table with respect to concurrent * modifications to the weak variable. (Concurrent weak clear is safe.) * * @param location The weak pointer address. */ void objc_destroyWeak(id *location) { (void)storeWeak<DoHaveOld, DontHaveNew, DontCrashIfDeallocating> (location, nil); }Copy the code
We find that either objc_initWeak, objc_storeWeak, or objc_destroyWeak eventually call storeWeak, so storeWeak is probably the focus of our study
storeWeak
Update a weak variable. If HaveOld is true, The variable has an existing value // that needs to be cleaned up. This value might be nil. // If HaveNew is true, there is a new value that needs to be // assigned into the variable. This value might be nil. // If HaveNew is true then you need to store the new value in a weak variable, // If CrashIfDeallocating is true, the process is halted if newObj is // deallocating or newObj's class does not support weak references. // If CrashIfDeallocating is true, // If CrashIfDeallocating is false, the process will crash If newObj is locating or the class does not support weak reference Pointers // If CrashIfDeallocating is false, Enum CrashIfDeallocating {DontCrashIfDeallocating = false, DoCrashIfDeallocating = True}; template <HaveOld haveOld, HaveNew haveNew, CrashIfDeallocating CrashIfDeallocating > // Location is the address of the weak reference pointer from the objc_initWeak and objc_storeWeak methods NewObj is static object pointer id storeWeak (id * the location, objc_object * newObj) {ASSERT (haveOld | | haveNew); if (! haveNew) ASSERT(newObj == nil); Class previouslyInitializedClass = nil; id oldObj; SideTable *oldTable; SideTable *newTable; // Acquire locks for old and new values. // Order by lock address to prevent lock ordering problems. // Retry if the old value changes underneath us. retry: If (haveOld) {// location is the address of the weak reference pointer, *location is the object pointer, oldObj is the old object pointer oldObj = *location; SideTable oldTable = &SideTables()[oldObj]; sideTable oldTable = &SideTables()[oldObj]; } else { oldTable = nil; SideTable newTable = &SideTables()[newObj]; sideTable newTable = &sidetables ()[newObj]; } else { newTable = nil; } // Lock sideTable ::lockTwo<haveOld, haveNew>(oldTable, newTable); // If the weak reference pointer *location does not point to an old object, this should be changed during this process. = oldObj) { SideTable::unlockTwo<haveOld, haveNew>(oldTable, newTable); goto retry; } // Prevent a deadlock between the weak reference machinery // and the +initialize machinery by ensuring that no // Weakly referenced object has an UN -+initialized isa. // To prevent uninitialized situations // Recursion will occur if weak references occur in +initialize methods, Through previouslyInitializedClass break recursion if (haveNew && newObj) {get newObj Class Class CLS = newObj - > getIsa (); // First time: if CLS is not empty and the object is not initialized if (CLS! = previouslyInitializedClass && ! (((objc_class *) CLS)->isInitialized()) {SideTable::unlockTwo<haveOld, haveNew>(oldTable, newTable); class_initialize(cls, (id)newObj); // If this class is finished with initialize then we're good. // If this class is finished with initialize then we're good still running +initialize on this thread // (i.e. +initialize called storeWeak on an instance of itself) // then we may Proceed but it will appear initializing and // not yet initialized to the check above. Later to check/home/set previouslyInitializedClass to recognize it on retry. / / CLS after previouslyInitializedClass = CLS! = previouslyInitializedClass is not established, / / so, next time won't come in, guarantee class_initialize is performed only once previouslyInitializedClass = CLS; goto retry; Weak_unregister_no_lock (&oldTable->weak_table, oldObj, weak_unregister_no_lock(&oldTable->weak_table, oldObj)); location); } // Assign new value, if any. If (haveNew) {// If there is a new value, NewObj = (objc_object *) Weak_register_no_lock (&newTable-> Weak_TABLE, (ID)newObj, location, crashIfDeallocating); // weak_register_no_lock returns nil if weak store should be rejected // Set is-weakly-referenced bit in refcount table. if (newObj && ! NewObj ->setWeaklyReferenced_nolock(); newObj->isTaggedPointer(); newObj->isTaggedPointer(); } // Do not set *location anywhere else. That would introduce a race. *location = (id)newObj; } else { // No new value. The storage is not changed. } SideTable::unlockTwo<haveOld, haveNew>(oldTable, newTable); return (id)newObj; }Copy the code
weak_register_no_lock
/** * Registers a new (object, Weak pointer) pair. Creates a new weak * object entry if it does not exist Register an object and weak reference pointer to * * @param weak_table The global weak table. * @param referent The object pointed to by The weak reference. * @param referrer The weak pointer address. Weak_register_no_lock (weak_table_t *weak_table, ID referent_id, ID *referrer_id, Bool crashIfDeallocating) {// Referent Object pointer The address of the referrer weak reference pointer objc_Object * Referent = (objc_object *) Referent_id; objc_object **referrer = (objc_object **)referrer_id; // taggedPointer does not need to use reference count management, and weak_table does not need to use weak_table management if (! referent || referent->isTaggedPointer()) return referent_id; // Ensure that the referenced object is viable Ensure that the reference object is legitimate bool deallocating; 😄 if (! referent->ISA()->hasCustomRR()) { deallocating = referent->rootIsDeallocating(); } else { BOOL (*allowsWeakReference)(objc_object *, SEL) = (BOOL(*)(objc_object *, SEL)) object_getMethodImplementation((id)referent, @selector(allowsWeakReference)); if ((IMP)allowsWeakReference == _objc_msgForward) { return nil; } deallocating = ! (*allowsWeakReference)(referent, @selector(allowsWeakReference)); } if (deallocating) { if (crashIfDeallocating) { _objc_fatal("Cannot form weak reference to instance (%p) of " "class %s. It is possible that this object was " "over-released, or is in the process of deallocation.", (void*)referent, object_getClassName((id)referent)); } else { return nil; }} // Up to this point we can guarantee that the object is valid, // Now remember it and where it is being stored Weak_entry_t *entry; Weak_entry_for_referent (weak_table, referent))) {// If (entry = weak_entry_for_referent(weak_table, referent)) { So store the referrer append_referrer(entry, referrer); Weak_entry_t new_entry(referent, referrer); weak_entry_t new_entry(referent, referrer); // Check whether weak_entries of weak_table need to be expanded. If necessary, expand weak_grow_maybe(weak_table); Weak_entry_insert (Weak_table, &new_entry); // Store newly created entry in weak_entries of weak_table. } // Do not set *referrer. objc_storeWeak() requires that the // value not change. return referent_id; }Copy the code
weak_entry_for_referent
/** * Return the weak reference table entry for the given referent. * If there is no entry for referent, Return NULL. * Search for the corresponding table entry in the Weak_table through the object pointer, * * Performs when he does not perform a lookup. * * @param weak_table * @param referent The object. Must not be nil of weak referrers to this object. */ static weak_entry_t * weak_entry_for_referent(weak_table_t *weak_table, objc_object *referent) { ASSERT(referent); weak_entry_t *weak_entries = weak_table->weak_entries; if (! weak_entries) return nil; 😄 size_t begin = hash_pointer(referent) & Weak_table ->mask; size_t index = begin; size_t hash_displacement = 0; while (weak_table->weak_entries[index].referent ! = referent) { index = (index+1) & weak_table->mask; if (index == begin) bad_weak_table(weak_table->weak_entries); hash_displacement++; if (hash_displacement > weak_table->max_hash_displacement) { return nil; Return &weak_table->weak_entries[index]; weak_table->weak_entries[index]; }Copy the code
append_referrer
/** * Add the given referrer to set of weak pointers in this entry. * Does not perform duplicate checking (b/c weak Pointers are never * added to a set of twice). * Store the address of the weakly referenced pointer in the corresponding entry. * * @param entry The entry holding The set of weak Pointers. * @param new_referrer The new weak pointer to be added. */ static void append_referrer(weak_entry_t *entry, Objc_object **new_referrer) {// If inline_referrers is not full if (! Entry ->out_of_line()) {// Try to insert inline. // Insert an empty position in the inline_referrers for (size_t I = 0; i < WEAK_INLINE_COUNT; i++) { if (entry->inline_referrers[i] == nil) { entry->inline_referrers[i] = new_referrer; return; } // Inline_referrers was not inserted successfully. Weak_referrer_t *new_referrers = weak_referrer_t *new_referrers = weak_referrer_t *new_referrers = (weak_referrer_t *) calloc(WEAK_INLINE_COUNT, sizeof(weak_referrer_t)); // This constructed table is invalid, For (size_t I = 0; for (size_t I = 0; i < WEAK_INLINE_COUNT; i++) { new_referrers[i] = entry->inline_referrers[i]; } // referrers pointer to the new space entry->referrers = new_referrers; // num_refs stores how many values entry->num_refs = WEAK_INLINE_COUNT; Entry ->out_of_line_ness = REFERRERS_OUT_OF_LINE; // mask store referrers capacity -1 value entry->mask = WEAK_INLINE_COUNT-1; // entry->max_hash_displacement = 0; } ASSERT(entry->out_of_line()); // If the number of values stored reaches three-quarters of the hash table, If (entry->num_refs >= TABLE_SIZE(entry) * 3/4) {// call append_referrer to store new_referrer return grow_refs_and_insert(entry, new_referrer); Size_t begin = w_hash_pointer(new_referrer) & (entry->mask); size_t index = begin; size_t hash_displacement = 0; while (entry->referrers[index] ! = nil) { hash_displacement++; index = (index+1) & entry->mask; if (index == begin) bad_weak_table(entry); } if (hash_displacement > entry->max_hash_displacement) { entry->max_hash_displacement = hash_displacement; Weak_referrer_t &ref = entry->referrers[index];} // Store new_referrer and modify the number weak_referrer_t &ref = entry->referrers[index]; ref = new_referrer; entry->num_refs++; }Copy the code
grow_refs_and_insert
/** * Grow the entry's hash table of referrers. Rehashes each * of the referrers. * This function requires that weak_entries be stored inside weak_entries. Weak_entries * * @param entry Weak pointer Hash set for a particular object. */ __attribute__((noinline, used)) static void grow_refs_and_insert(weak_entry_t *entry, Objc_object **new_referrer) {// This ASSERT must be stored in weak_entries (entry->out_of_line()); // Old weak_entries size size_t old_size = TABLE_SIZE(entry); // Weak_entries Size after expansion size_t new_size = old_size? old_size * 2 : 8; Size_t num_refs = entry->num_refs; weak_referrer_t *old_refs = entry->referrers; entry->mask = new_size - 1; entry->referrers = (weak_referrer_t *) calloc(TABLE_SIZE(entry), sizeof(weak_referrer_t)); entry->num_refs = 0; entry->max_hash_displacement = 0; For (size_t I = 0; i < old_size && num_refs > 0; i++) { if (old_refs[i] ! = nil) { append_referrer(entry, old_refs[i]); num_refs--; } // Insert new new_referrer append_referrer(entry, new_referrer); If (old_refs) free(old_refs); }Copy the code
weak_grow_maybe
This function is used to check whether weak_entries of a Weak_table need to be expanded
// Grow the given zone's table of weak references if it is full.
static void weak_grow_maybe(weak_table_t *weak_table)
{
size_t old_size = TABLE_SIZE(weak_table);
// Grow if at least 3/4 full.
if (weak_table->num_entries >= old_size * 3 / 4) {
weak_resize(weak_table, old_size ? old_size*2 : 64);
}
}
Copy the code
weak_resize
This function is very simple, is to expand the weak_entries of weak_table and store the value of old Weak_entries in the new Weak_entries
static void weak_resize(weak_table_t *weak_table, size_t new_size) { size_t old_size = TABLE_SIZE(weak_table); weak_entry_t *old_entries = weak_table->weak_entries; weak_entry_t *new_entries = (weak_entry_t *) calloc(new_size, sizeof(weak_entry_t)); weak_table->mask = new_size - 1; weak_table->weak_entries = new_entries; weak_table->max_hash_displacement = 0; weak_table->num_entries = 0; // restored by weak_entry_insert below if (old_entries) { weak_entry_t *entry; weak_entry_t *end = old_entries + old_size; for (entry = old_entries; entry < end; entry++) { if (entry->referent) { weak_entry_insert(weak_table, entry); } } free(old_entries); }}Copy the code
weak_entry_insert
/** * Add new_entry to the object's table of weak references. * Does not check whether the referent is already in the Table. * stores the newly created Weak_entry, Static void weak_entry_insert(weak_table_t *weak_table, weak_entry_t *new_entry) { weak_entry_t *weak_entries = weak_table->weak_entries; ASSERT(weak_entries ! = nil); Size_t begin = hash_pointer(new_entry->referent) & (weak_table->mask); size_t index = begin; size_t hash_displacement = 0; while (weak_entries[index].referent ! = nil) { index = (index+1) & weak_table->mask; if (index == begin) bad_weak_table(weak_entries); hash_displacement++; Weak_entries [index] = *new_entry; weak_entries[index] = *new_entry; Weak_table ->num_entries++; weak_table->num_entries++; if (hash_displacement > weak_table->max_hash_displacement) { weak_table->max_hash_displacement = hash_displacement; }}Copy the code
weak_unregister_no_lock
/** * Unregister an already registered weak reference. * This is used when the referrer's storage is about to go away, but referent * isn't dead yet. (Otherwise, Zeroing referrer later would be a * bad memory access. is not a currently active weak reference. * Does not zero referrer. * * FIXME currently requires old referent value to be passed in (lame) * FIXME unregistration should be automatic if referrer is collected * * @param weak_table The global weak table. * @param referent The object. * @param referrer The weak reference. */ void weak_unregister_no_lock(weak_table_t *weak_table, id referent_id, Objc_object *referent = (objc_object *)referent_id; Objc_object **referrer = (objc_object **)referrer_id; weak_entry_t *entry; if (! referent) return; Weak_entry_t if ((entry = weak_entry_for_referent(weak_table, Referent))) {// Remove weak reference referrer in weak_entry_t remove_referrer(entry, referrer); bool empty = true; If (entry->out_of_line() && entry->num_refs! = 0) { empty = false; } else {// If the inline_referrers is stored in the inline_referrers, then if there is a value of empty, it will be false for (size_t I = 0; i < WEAK_INLINE_COUNT; i++) { if (entry->inline_referrers[i]) { empty = false; break; Weak_entry_t if (empty) {weak_entry_remove(weak_table, entry); }} // Do not set the referrer to empty, // Do not set *referrer = nil. Objc_storeWeak () requires that the // value not change.}Copy the code
remove_referrer
/** * Remove old_referrer from set of referrers, if it's present. * Does not remove duplicates, Because duplicates should not exist. * Removes the reference. * * @todo this is slow if old_referrer is not present. Is this ever the case? * * @param entry The entry holding the referrers. * @param old_referrer The referrer to remove. */ static void Weak_entry_t *entry, objc_object **old_referrer) {// If (! entry->out_of_line()) { for (size_t i = 0; i < WEAK_INLINE_COUNT; I ++) {if (entry->inline_referrers[I] == old_referrer) {entry->inline_referrers[I] == nil; return; }} // Failed _objc_inform(" Runtime to unregister unknown __weak variable ""at %p. This is probably incorrect use of " "objc_storeWeak() and objc_loadWeak(). " "Break on objc_weak_error to debug.\n", old_referrer); objc_weak_error(); return; Size_t begin = w_hash_pointer(old_referrer) & (entry->mask); size_t index = begin; size_t hash_displacement = 0; while (entry->referrers[index] ! = old_referrer) { index = (index+1) & entry->mask; if (index == begin) bad_weak_table(entry); hash_displacement++; if (hash_displacement > entry->max_hash_displacement) { _objc_inform("Attempted to unregister unknown __weak variable " "at %p. This is probably incorrect use of " "objc_storeWeak() and objc_loadWeak(). " "Break on objc_weak_error to debug.\n", old_referrer); objc_weak_error(); return; // Referrers [index] = nil; // Quantity -1 entry->num_refs--; }Copy the code
weak_entry_remove
/** * Remove entry from the zone's table of weak references. * Remove entry */ static void weak_entry_remove(weak_table_t) *weak_table, weak_entry_t *entry) {// remove entry if it is stored in referrers, If (entry->out_of_line()) free(entry->referrers); // Release entry bzero(entry, sizeof(*entry)); Weak_table stores number of entries- 1 Weak_table ->num_entries--; Weak_compact_maybe (weak_table); weak_compact_maybe(weak_table); }Copy the code
weak_compact_maybe
// Shrink the table if it is mostly empty.
static void weak_compact_maybe(weak_table_t *weak_table)
{
size_t old_size = TABLE_SIZE(weak_table);
// Shrink if larger than 1024 buckets and at most 1/16 full.
if (old_size >= 1024 && old_size / 16 >= weak_table->num_entries) {
weak_resize(weak_table, old_size / 8);
// leaves new table no more than 1/2 full
}
}
Copy the code
At this point, we have a basic understanding of the process, but it is far from complete