__x86_64__ Environment debugging

TaggedPointer

parsing

judge

static inline bool
_objc_isTaggedPointer(const void * _Nullable ptr)
{
    return ((uintptr_t)ptr & _OBJC_TAG_MASK) == _OBJC_TAG_MASK;
}
 
ptr&(1UL) == 1UL
Copy the code

The values

value = (ptr^objc_debug_taggedpointer_obfuscator)>>8
Copy the code

NONPOINTER_ISA

parsing

Determine the class pointer == ISA

inline Class objc_object::ISA() { ASSERT(! isTaggedPointer()); #if SUPPORT_INDEXED_ISA if (isa.nonpointer) { uintptr_t slot = isa.indexcls; return classForIndex((unsigned)slot); } return (Class)isa.bits; #else return (Class)(isa.bits & ISA_MASK); #endif }Copy the code

The values

(lldb) p/x ob.class (Class) $22 = 0x00007fff8e578118 NSObject (lldb) p/x ob->isa (Class) $21 = 0x001dffff8e578119 NSObject 0000 0000 extra_rc 0 has_sidetable_rc 0 deallocating 0 weakly_referenced 1 1101 1 magic 111 1111 1111 1111 1000  1110 0101 0111 1000 0001 0001 1 shiftcls 11111111111111110001110010101111000000100011 0 has_cxx_dtor 0 has_assoc 1 nonpointerCopy the code

Shiftcls calculates the address of the class object

Initialize the isa

inline void objc_object::initIsa(Class cls, bool nonpointer, bool hasCxxDtor) { ASSERT(! isTaggedPointer()); if (! nonpointer) { isa = isa_t((uintptr_t)cls); } else { ASSERT(! DisableNonpointerIsa); ASSERT(! cls->instancesRequireRawIsa()); isa_t newisa(0); #if SUPPORT_INDEXED_ISA ASSERT(cls->classArrayIndex() > 0); newisa.bits = ISA_INDEX_MAGIC_VALUE; // isa.magic is part of ISA_MAGIC_VALUE // isa.nonpointer is part of ISA_MAGIC_VALUE newisa.has_cxx_dtor = hasCxxDtor; newisa.indexcls = (uintptr_t)cls->classArrayIndex(); #else newisa.bits = ISA_MAGIC_VALUE; // isa.magic is part of ISA_MAGIC_VALUE // isa.nonpointer is part of ISA_MAGIC_VALUE newisa.has_cxx_dtor = hasCxxDtor; newisa.shiftcls = (uintptr_t)cls >> 3; #endif // This write must be performed in a single store in some cases // (for example when realizing a class because other threads // may simultaneously try to use the class). // fixme use atomics here to guarantee single-store and to //  guarantee memory order w.r.t. the class index table // ... but not too atomic because we don't want to hurt instantiation isa = newisa; }}Copy the code

Move three places to the left to get the CLS pointer

11111111111111110001110010101111000000100011<<3
 
 
>>> hex(int('11111111111110001110010101111000000100011000', 2))
'0xfff8e578118'
Copy the code

Hash table

SideTable

struct SideTable {
    spinlock_t slock;
    RefcountMap refcnts;
    weak_table_t weak_table;
 
 
    SideTable() {
        memset(&weak_table, 0, sizeof(weak_table));
    }
 
 
    ~SideTable() {
        _objc_fatal("Do not delete SideTable.");
    }
 
 
    void lock() { slock.lock(); }
    void unlock() { slock.unlock(); }
    void forceReset() { slock.forceReset(); }
 
 
    // Address-ordered lock discipline for a pair of side tables.
 
 
    template<HaveOld, HaveNew>
    static void lockTwo(SideTable *lock1, SideTable *lock2);
    template<HaveOld, HaveNew>
    static void unlockTwo(SideTable *lock1, SideTable *lock2);
};
Copy the code

RefcountMap

template <typename KeyT, typename ValueT, typename ValueInfoT = DenseMapValueInfo<ValueT>, typename KeyInfoT = DenseMapInfo<KeyT>, typename BucketT = detail::DenseMapPair<KeyT, ValueT>> class DenseMap : public DenseMapBase<DenseMap<KeyT, ValueT, ValueInfoT, KeyInfoT, BucketT>, KeyT, ValueT, ValueInfoT, KeyInfoT, BucketT> { friend class DenseMapBase<DenseMap, KeyT, ValueT, ValueInfoT, KeyInfoT, BucketT>; // Lift some types from the dependent base class into this class for // simplicity of referring to them. using BaseT = DenseMapBase<DenseMap, KeyT, ValueT, ValueInfoT, KeyInfoT, BucketT>; BucketT *Buckets; unsigned NumEntries; unsigned NumTombstones; unsigned NumBuckets; .Copy the code

BucketT

namespace detail { // We extend a pair to allow users to override the bucket type with their own // implementation without requiring two members. template <typename KeyT, typename ValueT> struct DenseMapPair : public std::pair<KeyT, ValueT> { // FIXME: Switch to inheriting constructors when we drop support for older // clang versions. // NOTE: This default constructor is declared with '{}' rather than // '= default' to work around a separate bug in clang-3.8. This can // also go when we switch to inheriting constructors. DenseMapPair() {} DenseMapPair(const KeyT &Key, const ValueT &Value) : std::pair<KeyT, ValueT>(Key, Value) {} DenseMapPair(KeyT &&Key, ValueT &&Value) : std::pair<KeyT, ValueT>(std::move(Key), std::move(Value)) {} template <typename AltKeyT, typename AltValueT> DenseMapPair(AltKeyT &&AltKey, AltValueT &&AltValue, typename std::enable_if< std::is_convertible<AltKeyT, KeyT>::value && std::is_convertible<AltValueT, ValueT>::value>::type * = 0) : std::pair<KeyT, ValueT>(std::forward<AltKeyT>(AltKey), std::forward<AltValueT>(AltValue)) {} template <typename AltPairT> DenseMapPair(AltPairT &&AltPair, typename std::enable_if<std::is_convertible< AltPairT, std::pair<KeyT, ValueT>>::value>::type * = 0) : std::pair<KeyT, ValueT>(std::forward<AltPairT>(AltPair)) {} KeyT &getFirst() { return std::pair<KeyT, ValueT>::first; } const KeyT &getFirst() const { return std::pair<KeyT, ValueT>::first; } ValueT &getSecond() { return std::pair<KeyT, ValueT>::second; } const ValueT &getSecond() const { return std::pair<KeyT, ValueT>::second; }}; } // end namespace detailCopy the code

weak_table_t

struct weak_table_t {
    weak_entry_t *weak_entries;
    size_t    num_entries;
    uintptr_t mask;
    uintptr_t max_hash_displacement;
};
Copy the code

weak_entry_t

struct weak_entry_t { DisguisedPtr<objc_object> referent; Weak_referrer_t *referrers; weak_referrer_t *referrers; weak_referrer_t *referrers; // Uintptr_out_of_line_ness: 2; Uintptr_t num_refs: PTR_MINUS_2; // Uintptr_t mask; // Uintptr_t max_hash_displacement; // Maximum hash offset}; struct { // out_of_line_ness field is low bits of inline_referrers[1] weak_referrer_t inline_referrers[WEAK_INLINE_COUNT]; }; }; bool out_of_line() { return (out_of_line_ness == REFERRERS_OUT_OF_LINE); } weak_entry_t& operator=(const weak_entry_t& other) { memcpy(this, &other, sizeof(other)); return *this; } weak_entry_t(objc_object *newReferent, objc_object **newReferrer) : referent(newReferent) { inline_referrers[0] = newReferrer; for (int i = 1; i < WEAK_INLINE_COUNT; i++) { inline_referrers[i] = nil; }}};Copy the code