1/*
2 * Copyright (c) 2011 Apple Inc. All rights reserved.
3 *
4 * @APPLE_APACHE_LICENSE_HEADER_START@
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 *     http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * @APPLE_APACHE_LICENSE_HEADER_END@
19 */
20/*
21    auto_weak.cpp
22    Weak reference accounting
23    Copyright (c) 2004-2011 Apple Inc. All rights reserved.
24 */
25
26#include "auto_weak.h"
27#include "Locks.h"
28#include "Definitions.h"
29#include "Range.h"
30#include "Zone.h"
31
32using namespace Auto;
33
34struct weak_referrer_array_t {
35    weak_referrer_t     *refs;
36    usword_t            num_refs;
37    usword_t            num_allocated;
38    usword_t            max_hash_displacement;
39};
40typedef struct weak_referrer_array_t weak_referrer_array_t;
41
42struct Auto::weak_entry_t {
43    const void *referent;
44    weak_referrer_array_t referrers;
45};
46typedef struct Auto::weak_entry_t weak_entry_t;
47
48typedef std::pair<const void *, void **> WeakPair;
49typedef std::vector<WeakPair, Auto::AuxAllocator<WeakPair> > WeakPairVector;
50typedef std::vector<weak_referrer_t, Auto::AuxAllocator<WeakPair> > WeakReferrerVector;
51
52static void append_referrer_no_lock(weak_referrer_array_t *list, void **new_referrer,  auto_weak_callback_block_t *new_block);
53
54static inline uintptr_t hash(const void *key)
55{
56    uintptr_t k = (uintptr_t)key;
57
58    // Code from CFSet.c
59#if __LP64__
60    uintptr_t a = 0x4368726973746F70ULL;
61    uintptr_t b = 0x686572204B616E65ULL;
62#else
63    uintptr_t a = 0x4B616E65UL;
64    uintptr_t b = 0x4B616E65UL;
65#endif
66    uintptr_t c = 1;
67    a += k;
68#if __LP64__
69    a -= b; a -= c; a ^= (c >> 43);
70    b -= c; b -= a; b ^= (a << 9);
71    c -= a; c -= b; c ^= (b >> 8);
72    a -= b; a -= c; a ^= (c >> 38);
73    b -= c; b -= a; b ^= (a << 23);
74    c -= a; c -= b; c ^= (b >> 5);
75    a -= b; a -= c; a ^= (c >> 35);
76    b -= c; b -= a; b ^= (a << 49);
77    c -= a; c -= b; c ^= (b >> 11);
78    a -= b; a -= c; a ^= (c >> 12);
79    b -= c; b -= a; b ^= (a << 18);
80    c -= a; c -= b; c ^= (b >> 22);
81#else
82    a -= b; a -= c; a ^= (c >> 13);
83    b -= c; b -= a; b ^= (a << 8);
84    c -= a; c -= b; c ^= (b >> 13);
85    a -= b; a -= c; a ^= (c >> 12);
86    b -= c; b -= a; b ^= (a << 16);
87    c -= a; c -= b; c ^= (b >> 5);
88    a -= b; a -= c; a ^= (c >> 3);
89    b -= c; b -= a; b ^= (a << 10);
90    c -= a; c -= b; c ^= (b >> 15);
91#endif
92    return c;
93}
94
95// Up until this size the weak referrer array grows one slot at a time. Above this size it grows by doubling.
96#define WEAK_TABLE_DOUBLE_SIZE 8
97
98// Grow the refs list. Rehashes the entries.
99static void grow_refs(weak_referrer_array_t *list)
100{
101    usword_t old_num_allocated = list->num_allocated;
102    usword_t num_refs = list->num_refs;
103    weak_referrer_t *old_refs = list->refs;
104    usword_t new_allocated = old_num_allocated < WEAK_TABLE_DOUBLE_SIZE ? old_num_allocated + 1 : old_num_allocated + old_num_allocated;
105    list->refs = (weak_referrer_t *)aux_malloc(new_allocated * sizeof(weak_referrer_t));
106    list->num_allocated = aux_malloc_size(list->refs)/sizeof(weak_referrer_t);
107    bzero(list->refs, list->num_allocated * sizeof(weak_referrer_t));
108    // for larger tables drop one entry from the end to give an odd number of hash buckets for better hashing
109    if ((list->num_allocated > WEAK_TABLE_DOUBLE_SIZE) && !(list->num_allocated & 1)) list->num_allocated--;
110    list->num_refs = 0;
111    list->max_hash_displacement = 0;
112
113    usword_t i;
114    for (i=0; i < old_num_allocated && num_refs > 0; i++) {
115        if (old_refs[i].referrer != NULL) {
116            append_referrer_no_lock(list, old_refs[i].referrer, old_refs[i].block);
117            num_refs--;
118        }
119    }
120    if (old_refs)
121        aux_free(old_refs);
122}
123
124// Add the given referrer to list
125// Does not perform duplicate checking.
126static void append_referrer_no_lock(weak_referrer_array_t *list, void **new_referrer,  auto_weak_callback_block_t *new_block)
127{
128    if ((list->num_refs == list->num_allocated) || ((list->num_refs >= WEAK_TABLE_DOUBLE_SIZE) && (list->num_refs >= list->num_allocated * 2 / 3))) {
129        grow_refs(list);
130    }
131    usword_t index = hash(new_referrer) % list->num_allocated, hash_displacement = 0;
132    while (list->refs[index].referrer != NULL) {
133        index++;
134        hash_displacement++;
135        if (index == list->num_allocated)
136            index = 0;
137    }
138    if (list->max_hash_displacement < hash_displacement) {
139        list->max_hash_displacement = hash_displacement;
140        //malloc_printf("max_hash_displacement: %d allocated: %d\n", list->max_hash_displacement, list->num_allocated);
141    }
142    weak_referrer_t &ref = list->refs[index];
143    ref.referrer = new_referrer;
144    ref.block = new_block;
145    list->num_refs++;
146}
147
148
149// Remove old_referrer from list, if it's present.
150// Does not remove duplicates.
151// fixme this is slow if old_referrer is not present.
152static void remove_referrer_no_lock(weak_referrer_array_t *list, void **old_referrer)
153{
154    usword_t index = hash(old_referrer) % list->num_allocated;
155    usword_t start_index = index, hash_displacement = 0;
156    while (list->refs[index].referrer != old_referrer) {
157        index++;
158        hash_displacement++;
159        if (index == list->num_allocated)
160            index = 0;
161        if (index == start_index || hash_displacement > list->max_hash_displacement) {
162            malloc_printf("%s: attempted to remove unregistered weak referrer %p\n", auto_prelude(), old_referrer);
163            return;
164        }
165    }
166    list->refs[index].referrer = NULL;
167    list->num_refs--;
168}
169
170
171// Add new_entry to the zone's table of weak references.
172// Does not check whether the referent is already in the table.
173// Does not update num_weak_refs.
174static void weak_entry_insert_no_lock(Zone *azone, weak_entry_t *new_entry)
175{
176    weak_entry_t *table = azone->weak_refs_table;
177
178    if (!table) { malloc_printf("no auto weak ref table!\n"); return; }
179
180    usword_t table_size = azone->max_weak_refs;
181    usword_t hash_index = hash(new_entry->referent) % table_size;
182    usword_t index = hash_index;
183
184    do {
185        weak_entry_t *entry = table + index;
186        if (entry->referent == NULL) {
187            *entry = *new_entry;
188            return;
189        }
190        index++; if (index == table_size) index = 0;
191    } while (index != hash_index);
192    malloc_printf("no room for new entry in auto weak ref table!\n");
193}
194
195
196// Remove entry from the zone's table of weak references, and rehash
197// Does not update num_weak_refs.
198static void weak_entry_remove_no_lock(Zone *azone, weak_entry_t *entry)
199{
200    // remove entry
201    entry->referent = NULL;
202    if (entry->referrers.refs) aux_free(entry->referrers.refs);
203    entry->referrers.refs = NULL;
204    entry->referrers.num_refs = 0;
205    entry->referrers.num_allocated = 0;
206
207    // rehash after entry
208    weak_entry_t *table = azone->weak_refs_table;
209    usword_t table_size = azone->max_weak_refs;
210    usword_t hash_index = entry - table;
211    usword_t index = hash_index;
212
213    if (!table) return;
214
215    do {
216        index++; if (index == table_size) index = 0;
217        if (!table[index].referent) return;
218        weak_entry_t entry = table[index];
219        table[index].referent = NULL;
220        weak_entry_insert_no_lock(azone, &entry);
221    } while (index != hash_index);
222}
223
224
225// Grow the given zone's table of weak references if it is full.
226static void weak_grow_maybe_no_lock(Zone *azone)
227{
228    if (azone->num_weak_refs >= azone->max_weak_refs * 3 / 4) {
229        // grow table
230        usword_t old_max = azone->max_weak_refs;
231        usword_t new_max = old_max ? old_max * 2 + 1 : 15;
232        weak_entry_t *old_entries = azone->weak_refs_table;
233        weak_entry_t *new_entries = (weak_entry_t *)aux_calloc(new_max, sizeof(weak_entry_t));
234        azone->max_weak_refs = new_max;
235        azone->weak_refs_table = new_entries;
236
237        if (old_entries) {
238            weak_entry_t *entry;
239            weak_entry_t *end = old_entries + old_max;
240            for (entry = old_entries; entry < end; entry++) {
241                weak_entry_insert_no_lock(azone, entry);
242            }
243            aux_free(old_entries);
244        }
245    }
246}
247
248// Verify that no locations in the range supplied are registered as referrers
249// (Expensive, but good for debugging)
250// Return first location in range found to still be registered
251
252void **auto_weak_find_first_referrer(auto_zone_t *zone, void **location, unsigned long count) {
253    Zone *azone = (Zone *)zone;
254    Auto::SpinLock lock(&azone->weak_refs_table_lock);
255    usword_t counter = 0;
256    for (; counter < azone->max_weak_refs; ++counter) {
257        if (!azone->weak_refs_table[counter].referent) continue;
258        weak_referrer_t *refs = azone->weak_refs_table[counter].referrers.refs;
259        usword_t index = 0;
260        for (; index < azone->weak_refs_table[counter].referrers.num_allocated; ++index) {
261            if ((refs[index].referrer >= location) && (refs[index].referrer < (location + count))) {
262                void **result = refs[index].referrer;
263                return result;
264            }
265        }
266    }
267    return NULL;
268}
269
270// Return the weak reference table entry for the given referent.
271// If there is no entry for referent, return NULL.
272static weak_entry_t *weak_entry_for_referent(Zone *azone, const void *referent)
273{
274    weak_entry_t *table = azone->weak_refs_table;
275
276    if (!table) return NULL;
277
278    usword_t table_size = azone->max_weak_refs;
279    usword_t hash_index = hash(referent) % table_size;
280    usword_t index = hash_index;
281
282    do {
283        weak_entry_t *entry = table + index;
284        if (entry->referent == referent) return entry;
285        if (entry->referent == NULL) return NULL;
286        index++; if (index == table_size) index = 0;
287    } while (index != hash_index);
288
289    return NULL;
290}
291
292#ifdef __BLOCKS__
293
294void weak_enumerate_weak_references(Auto::Zone *azone, const void *referent, weak_ref_visitor_t visitor) {
295    weak_entry_t *entry = weak_entry_for_referent(azone, referent);
296    if (entry) {
297        weak_referrer_t *refs = entry->referrers.refs;
298        usword_t count = entry->referrers.num_allocated;
299        for (usword_t i = 0; i < count; ++i) {
300            weak_referrer_t ref = refs[i];
301            if (ref.referrer) {
302                if ((uintptr_t(ref.block) & 1)) ref.block = (auto_weak_callback_block_t*)displace(ref.block, -1);
303                ASSERTION(ref.referrer[0] == referent);
304                visitor(ref);
305            }
306        }
307    }
308}
309
310// run through the table while all threads are quiescent and just dump the contents
311
312void weak_enumerate_table(Zone *zone, weak_ref_visitor_t visitor) {
313    for (usword_t i = 0, count = zone->max_weak_refs; i < count; ++i) {
314        weak_entry_t &entry = zone->weak_refs_table[i];
315        const void *referent = entry.referent;
316        if (!referent) continue;
317        weak_referrer_t *refs = entry.referrers.refs;
318        usword_t ref_count = entry.referrers.num_allocated;
319        for (usword_t j = 0; j < ref_count; ++j) {
320            weak_referrer_t ref = refs[j];
321            if (ref.referrer) {
322                ASSERTION(referent == ref.referrer[0]);
323                if ((uintptr_t(ref.block) & 1)) ref.block = (auto_weak_callback_block_t*)displace(ref.block, -1);
324                visitor(ref);
325            }
326        }
327    }
328}
329
330void weak_enumerate_table_fixup(Auto::Zone *zone, weak_ref_fixer_t fixer) {
331    for (usword_t i = 0, count = zone->max_weak_refs; i < count; ++i) {
332        weak_entry_t &entry = zone->weak_refs_table[i];
333        const void *referent = entry.referent;
334        if (!referent) continue;
335        weak_referrer_t *refs = entry.referrers.refs;
336        usword_t ref_count = entry.referrers.num_allocated;
337        for (usword_t j = 0; j < ref_count; ++j) {
338            weak_referrer_t &ref = refs[j];
339            if (ref.referrer) {
340                fixer(ref);
341            }
342        }
343    }
344}
345
346#endif
347
348
349// Given a pointer to a weak reference entry, clear all referrers, etc.
350
351static void weak_clear_entry_no_lock(Zone *azone, weak_entry_t *entry, uintptr_t *weak_refs_count, auto_weak_callback_block_t **head)
352{
353    // clear referrers, update counters, update lists
354    usword_t count = entry->referrers.num_allocated;
355    usword_t index = 0;
356    for (; index < count; ++index) {
357        weak_referrer_t *ref = &entry->referrers.refs[index];
358        if (ref->referrer) {
359            if (azone->control.log & AUTO_LOG_WEAK) malloc_printf("%s: WEAK: clearing ref to %p at %p (value was %p)\n", auto_prelude(), entry->referent, ref->referrer, *ref->referrer);
360            if (*ref->referrer != entry->referent) {
361                malloc_printf("__weak value %p at location %p not equal to %p and so will not be cleared\n", *ref->referrer, ref->referrer, entry->referent);
362                void **base = (void **)auto_zone_base_pointer((auto_zone_t*)azone, ref->referrer);
363                if (base) {
364                    auto_memory_type_t type = auto_zone_get_layout_type((auto_zone_t*)azone, base);
365                    malloc_printf("...location is %s starting at %p with first slot value %p\n",
366                                  (type & AUTO_OBJECT) ? "an object" : "a data block",
367                                  base,
368                                  *base);
369                }
370                continue;
371            }
372            *ref->referrer = NULL;
373            ++*weak_refs_count;
374            if (ref->block) {
375                if (uintptr_t(ref->block) & 1) {
376                    old_auto_weak_callback_block *old_block = (old_auto_weak_callback_block *)displace(ref->block, -1);
377                    if (old_block->callback_function && old_block->next == NULL) {
378                        old_block->next = *head;
379                        *head = ref->block;
380                    }
381                } else {
382                    auto_weak_callback_block_t *block = ref->block;
383                    if (block->callback_function && block->next == NULL) {
384                        // chain it if isn't already chained & there is a callout to call
385                        block->next = *head;
386                        *head = block;
387                    }
388                }
389            }
390        }
391    }
392
393    weak_entry_remove_no_lock(azone, entry);
394    azone->num_weak_refs--;
395}
396
397
398// Given a set of newly-discovered garbage, zero out any weak
399// references to the garbage.
400auto_weak_callback_block_t *weak_clear_references(Zone *azone, size_t garbage_count, vm_address_t *garbage,
401                           uintptr_t *weak_referents_count, uintptr_t *weak_refs_count)
402{
403    usword_t i;
404
405    *weak_referents_count = *weak_refs_count = 0;
406    auto_weak_callback_block_t *head = reinterpret_cast<auto_weak_callback_block_t *>(-1);
407
408    Auto::SpinLock lock(&azone->weak_refs_table_lock);
409
410    // if no weak references, don't bother searching.
411    if (azone->num_weak_refs == 0) {
412        return NULL;
413    }
414
415    for (i = 0; i < garbage_count; i++) {
416        weak_entry_t *entry = weak_entry_for_referent(azone, (void *)garbage[i]);
417        if (entry) {
418            weak_clear_entry_no_lock(azone, entry, weak_refs_count, &head);
419            ++*weak_referents_count;
420        }
421    }
422    return head;
423}
424
425// Unregister an already-registered weak reference.
426// This is used when referrer's storage is about to go away, but referent
427//   isn't dead yet. (Otherwise, zeroing referrer later would be a
428//   bad memory access.)
429// Does nothing if referent/referrer is not a currently active weak reference.
430// fixme currently requires old referent value to be passed in (lame)
431// fixme unregistration should be automatic if referrer is collected
432static void weak_unregister_no_lock(Zone *azone, const void *referent, void **referrer)
433{
434    weak_entry_t *entry;
435
436    if (azone->control.log & AUTO_LOG_WEAK) malloc_printf("%s: WEAK: unregistering weak reference to %p at %p\n", auto_prelude(), referent, referrer);
437
438    if ((entry = weak_entry_for_referent(azone, referent))) {
439        remove_referrer_no_lock(&entry->referrers, referrer);
440        if (entry->referrers.num_refs == 0) {
441            weak_entry_remove_no_lock(azone, entry);
442            azone->num_weak_refs--;
443        }
444    }
445}
446
447static void weak_register_no_lock(Zone *azone, const void *referent, void **referrer,  auto_weak_callback_block_t *block) {
448    if (*referrer) weak_unregister_no_lock(azone, *referrer, referrer);
449    if (referent) {
450        weak_entry_t *entry;
451        if ((entry = weak_entry_for_referent(azone, referent))) {
452            append_referrer_no_lock(&entry->referrers, referrer, block);
453        }
454        else {
455            weak_entry_t new_entry;
456            new_entry.referent = referent;
457            new_entry.referrers.refs = NULL;
458            new_entry.referrers.num_refs = 0;
459            new_entry.referrers.num_allocated = 0;
460            append_referrer_no_lock(&new_entry.referrers, referrer, block);
461            weak_grow_maybe_no_lock(azone);
462            azone->num_weak_refs++;
463            weak_entry_insert_no_lock(azone, &new_entry);
464        }
465    }
466    // make sure that anyone accessing this via a read gets the value
467    *referrer = (void *)referent;
468}
469
470// Register a new weak reference.
471// referent is the object being weakly pointed to
472// referrer is the memory location that will be zeroed when referent dies
473// Does not check whether referent is currently live (fixme probably should)
474// Does not check whether referrer is already a weak reference location for
475//   the given referent or any other referent (fixme maybe should)
476// Does not change the scannability of referrer; referrer should be non-scanned
477void weak_register(Zone *azone, const void *referent, void **referrer,  auto_weak_callback_block_t *block)
478{
479    if (azone->control.log & AUTO_LOG_WEAK) malloc_printf("%s: WEAK: registering weak reference to %p at %p\n", auto_prelude(), referent, referrer);
480    Auto::SpinLock lock(&azone->weak_refs_table_lock);
481    auto_weak_callback_block_t *new_block = (block == NULL || block->isa != NULL) ? block : (auto_weak_callback_block_t *)displace(block, 1);
482    weak_register_no_lock(azone, referent, referrer, new_block);
483}
484
485void weak_transfer_weak_referents(Auto::Zone *azone, const void *oldReferent, const void *newReferent) {
486    weak_entry_t *entry = weak_entry_for_referent(azone, oldReferent);
487    if (entry == NULL) return;
488
489    usword_t i, count = entry->referrers.num_allocated, found = 0;
490    weak_referrer_t *refs = (weak_referrer_t *)alloca(count * sizeof(weak_referrer_t));
491    for (i = 0; i < count; ++i) {
492        weak_referrer_t &ref = entry->referrers.refs[i];
493        if (ref.referrer) {
494            ASSERTION(*ref.referrer == oldReferent);
495            refs[found++] = ref;
496        }
497    }
498    for (i = 0; i < found; ++i) {
499        weak_referrer_t &ref = refs[i];
500        weak_register_no_lock(azone, newReferent, ref.referrer, ref.block);
501    }
502    ASSERTION(weak_entry_for_referent(azone, oldReferent) == NULL);
503}
504
505void weak_transfer_weak_contents(Auto::Zone *azone, void *oldBlock[], void *newBlock[], const uint8_t *map) {
506    usword_t index = 0;
507    uint8_t byte;
508    while ((byte = *map++)) {
509        uint8_t skips = (byte >> 4);
510        index += skips;
511        uint8_t weaks = (byte & 0x0F);
512        while (weaks--) {
513            void **slot = &oldBlock[index];
514            const void *referent = *slot;
515            if (referent) {
516                weak_entry_t *entry = weak_entry_for_referent(azone, referent);
517                weak_referrer_array_t &referrers = entry->referrers;
518                usword_t probe = hash(slot) % referrers.num_allocated, hash_displacement = 0;
519                while (hash_displacement++ <= referrers.max_hash_displacement) {
520                    weak_referrer_t &ref = referrers.refs[probe];
521                    if (ref.referrer == slot) {
522                        newBlock[index] = NULL;
523                        weak_register_no_lock(azone, referent, &newBlock[index], ref.block);
524                        weak_unregister_no_lock(azone, referent, slot);
525                        break;
526                    }
527                    if (++probe == referrers.num_allocated)
528                        probe = 0;
529                }
530            }
531            ++index;
532        }
533    }
534}
535
536// IDEA:  can make this more efficient by walking all of the slots, and checking to see if any of them corresponds to a known
537// stored weak reference.
538
539void weak_transfer_weak_contents_unscanned(Auto::Zone *azone, void *oldBlock[], void *newBlock[], size_t size, bool forwarding) {
540#if DEBUG
541    // check word 1 to ensure it isn't being used as a weak reference. we shouldn't be here if that's the case.
542    if (forwarding && newBlock[0]) {
543        const void *referent = newBlock[0];
544        weak_entry_t *entry = weak_entry_for_referent(azone, referent);
545        if (entry) {
546            weak_referrer_array_t &referrers = entry->referrers;
547            for (usword_t i = 0; i < referrers.num_allocated; ++i) {
548                weak_referrer_t &ref = referrers.refs[i];
549                ASSERTION(ref.referrer != &oldBlock[0]);
550            }
551        }
552    }
553#endif
554    // NOTE:  loop starts at word 1 to avoid the forwarding pointer slot.
555    for (void **slot = (forwarding ? oldBlock + 1 : oldBlock), **limit = (void **)displace(oldBlock, size - sizeof(void*)); slot <= limit; ++slot) {
556        const void *referent = *slot;
557        if (referent) {
558            weak_entry_t *entry = weak_entry_for_referent(azone, referent);
559            if (entry) {
560                weak_referrer_array_t &referrers = entry->referrers;
561                usword_t probe = hash(slot) % referrers.num_allocated, hash_displacement = 0;
562                while (hash_displacement++ <= referrers.max_hash_displacement) {
563                    weak_referrer_t &ref = referrers.refs[probe];
564                    if (ref.referrer == slot) {
565                        // found one, transfer ownership to the same offset in newBlock.
566                        ptrdiff_t index = slot - oldBlock;
567                        newBlock[index] = NULL;
568                        weak_register_no_lock(azone, referent, &newBlock[index], ref.block);
569                        weak_unregister_no_lock(azone, referent, slot);
570                        break;
571                    }
572                    if (++probe == referrers.num_allocated)
573                        probe = 0;
574                }
575            }
576        }
577    }
578}
579
580#if 0
581
582static void referrers_in_range_no_lock(Auto::Zone *azone, const Range &range, void (^block) (weak_referrer_t &ref)) {
583    usword_t counter = 0;
584    for (; counter < azone->max_weak_refs; ++counter) {
585        if (!azone->weak_refs_table[counter].referent) continue;
586        weak_referrer_t *refs = azone->weak_refs_table[counter].referrers.refs;
587        usword_t index = 0;
588        for (; index < azone->weak_refs_table[counter].referrers.num_allocated; ++index) {
589            if (range.in_range(refs[index].referrer)) {
590                block(refs[index]);
591            }
592        }
593    }
594}
595
596void weak_transfer_weak_contents_unscanned(Auto::Zone *azone, void *oldBlock[], void *newBlock[], size_t size) {
597    Auto::SpinLock lock(&azone->weak_refs_table_lock);
598    __block WeakReferrerVector refs;
599    referrers_in_range_no_lock(azone, Range(oldBlock, size), ^(weak_referrer_t &ref) {
600        refs.push_back(ref);
601    });
602    for (WeakReferrerVector::iterator i = refs.begin(), end = refs.end(); i != end; ++i) {
603        weak_referrer_t &ref = *i;
604        ptrdiff_t index = ref.referrer - oldBlock;
605        weak_register_no_lock(azone, &newBlock[index], ref.referrer, ref.block);
606    }
607}
608#endif
609
610void weak_unregister(Zone *azone, const void *referent, void **referrer)
611{
612    Auto::SpinLock lock(&azone->weak_refs_table_lock);
613    weak_unregister_no_lock(azone, referent, referrer);
614}
615
616void weak_unregister_range_no_lock(Auto::Zone *azone, void **referrers, size_t count) {
617    for (void **slot = referrers, **limit = referrers + (count - 1); slot <= limit; ++slot) {
618        const void *referent = *slot;
619        if (referent) {
620            weak_entry_t *entry = weak_entry_for_referent(azone, referent);
621            if (entry) {
622                weak_referrer_array_t &referrers = entry->referrers;
623                usword_t probe = hash(slot) % referrers.num_allocated, hash_displacement = 0;
624                while (hash_displacement++ <= referrers.max_hash_displacement) {
625                    weak_referrer_t &ref = referrers.refs[probe];
626                    if (ref.referrer == slot) {
627                        // found one, unregister it.
628                        weak_unregister_no_lock(azone, referent, slot);
629                        break;
630                    }
631                    if (++probe == referrers.num_allocated)
632                        probe = 0;
633                }
634            }
635        }
636    }
637}
638
639void weak_unregister_range(Auto::Zone *azone, void **referrers, size_t count) {
640    Auto::SpinLock lock(&azone->weak_refs_table_lock);
641    weak_unregister_range_no_lock(azone, referrers, count);
642}
643
644void weak_unregister_with_layout(Zone *azone, void *block[], const unsigned char *layout) {
645    size_t index = 0;
646    unsigned char byte;
647    Auto::SpinLock lock(&azone->weak_refs_table_lock);
648    while ((byte = *layout++)) {
649        uint8_t skips = (byte >> 4);
650        index += skips;
651        uint8_t weaks = (byte & 0x0F);
652        while (weaks--) {
653            void **slot = &block[index++];
654            const void *referent = *slot;
655            if (referent) {
656                weak_entry_t *entry = weak_entry_for_referent(azone, referent);
657                if (entry) {
658                    remove_referrer_no_lock(&entry->referrers, slot);
659                    if (entry->referrers.num_refs == 0) {
660                        weak_entry_remove_no_lock(azone, entry);
661                        azone->num_weak_refs--;
662                    }
663                }
664            }
665        }
666    }
667}
668
669/*
670    weak_unregister_data_segment
671
672    Given an about to be unmapped datasegment address range, this walks the entire
673    weak references table searching for weak references in this range. This is likely
674    to be pretty slow, but will also be pretty infrequently called. Since the table
675    may need to be rehashed as entries are removed, this first walks the table
676    and gathers together <referent, referrer> pairs into a vector, and then removes
677    the pairs by walking the vector.
678 */
679
680struct WeakUnregister {
681    Zone *_zone;
682    WeakUnregister(Zone *zone) : _zone(zone) {}
683    void operator() (const WeakPair &pair) {
684        weak_unregister_no_lock(_zone, pair.first, pair.second);
685    }
686};
687
688void weak_unregister_data_segment(Zone *azone, void *base, size_t size) {
689    Auto::SpinLock lock(&azone->weak_refs_table_lock);
690    if (azone->num_weak_refs == 0) return;
691    Auto::Range datasegment(base, size);
692    WeakPairVector weakrefsToUnregister;
693    weak_entry_t *weak_refs_table = azone->weak_refs_table;
694    for (usword_t counter = 0; counter < azone->max_weak_refs; ++counter) {
695        void *referent = (void*) weak_refs_table[counter].referent;
696        if (!referent) continue;
697        weak_referrer_array_t &referrers = weak_refs_table[counter].referrers;
698        weak_referrer_t *refs = referrers.refs;
699        for (usword_t index = 0; index < referrers.num_allocated; ++index) {
700            void **referrer = refs[index].referrer;
701            if (datasegment.in_range((void*)referrer)) {
702                weakrefsToUnregister.push_back(WeakPair(referent, referrer));
703            }
704        }
705    }
706    std::for_each(weakrefsToUnregister.begin(), weakrefsToUnregister.end(), WeakUnregister(azone));
707}
708
709void weak_call_callbacks(auto_weak_callback_block_t *block) {
710    if (block == NULL) return;
711    while (block != (void *)-1) {
712        auto_weak_callback_block_t *next;
713        if (uintptr_t(block) & 0x1) {
714            old_auto_weak_callback_block *old_block = (old_auto_weak_callback_block *)displace(block, -1);
715            next = old_block->next;
716            // clear the link so it can be used during next cycle
717            old_block->next = NULL;
718            (*old_block->callback_function)(old_block->arg1, old_block->arg2);
719        } else {
720            next = block->next;
721            // clear the link so it can be used during next cycle
722            block->next = NULL;
723            (*block->callback_function)(block->target);
724        }
725        block = next;
726    }
727}
728
729__private_extern__ void weak_print_stats(void) DEPRECATED_ATTRIBUTE;
730__private_extern__ void weak_print_stats(void)
731{
732    Zone *azone = (Zone *)auto_zone();
733    if (!azone) {
734        fprintf(stderr, "weak table empty (GC off)\n");
735        return;
736    }
737
738    weak_entry_t *table = azone->weak_refs_table;
739    if (!table) {
740        fprintf(stderr, "weak table empty\n");
741        return;
742    }
743
744    usword_t chainlen = 0;
745    usword_t chaincount = 0;
746    usword_t chain = 0;
747    usword_t chainmax = 0;
748
749    usword_t table_size = azone->max_weak_refs;
750    usword_t start;
751    usword_t i;
752    // find the start of some chain
753    for (start = 0; start < azone->max_weak_refs; start++) {
754        weak_entry_t *entry = table + start;
755        if (! entry->referent) break;
756    }
757    for ( ; start < azone->max_weak_refs; start++) {
758        weak_entry_t *entry = table + start;
759        if (entry->referent) break;
760    }
761    if (start == azone->max_weak_refs) {
762        fprintf(stderr, "weak table empty\n");
763        return;
764    }
765
766    // add up all chains
767    i = start;
768    do {
769        weak_entry_t *entry = table + i;
770        if (entry->referent) chain++;
771        else if (chain) {
772            if (chain > chainmax) chainmax = chain;
773            chainlen += chain;
774            chaincount++;
775            chain = 0;
776        }
777        i++; if (i == table_size) i = 0;
778    } while (i != start);
779
780    fprintf(stderr, "weak table %lu/%lu used, %.1g avg / %lu max chain\n",
781            chainlen, azone->max_weak_refs,
782            chaincount ? chainlen/(double)chaincount : 0.0, chainmax);
783}
784