Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 1 | #include "cache.h" |
| 2 | #include "object.h" |
| 3 | |
| 4 | int track_object_refs = 0; |
| 5 | |
| 6 | static unsigned int refs_hash_size, nr_object_refs; |
| 7 | static struct object_refs **refs_hash; |
| 8 | |
| 9 | static unsigned int hash_obj(struct object *obj, unsigned int n) |
| 10 | { |
| 11 | unsigned int hash = *(unsigned int *)obj->sha1; |
| 12 | return hash % n; |
| 13 | } |
| 14 | |
Linus Torvalds | 5fdc849 | 2006-06-21 11:01:12 -0700 | [diff] [blame] | 15 | static void insert_ref_hash(struct object_refs *ref, struct object_refs **hash, unsigned int size) |
| 16 | { |
| 17 | int j = hash_obj(ref->base, size); |
| 18 | |
| 19 | while (hash[j]) { |
| 20 | j++; |
| 21 | if (j >= size) |
| 22 | j = 0; |
| 23 | } |
| 24 | hash[j] = ref; |
| 25 | } |
| 26 | |
Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 27 | static void grow_refs_hash(void) |
| 28 | { |
| 29 | int i; |
| 30 | int new_hash_size = (refs_hash_size + 1000) * 3 / 2; |
| 31 | struct object_refs **new_hash; |
| 32 | |
Jonas Fonseca | b3c952f | 2006-08-28 02:26:07 +0200 | [diff] [blame] | 33 | new_hash = xcalloc(new_hash_size, sizeof(struct object_refs *)); |
Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 34 | for (i = 0; i < refs_hash_size; i++) { |
Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 35 | struct object_refs *ref = refs_hash[i]; |
| 36 | if (!ref) |
| 37 | continue; |
Linus Torvalds | 5fdc849 | 2006-06-21 11:01:12 -0700 | [diff] [blame] | 38 | insert_ref_hash(ref, new_hash, new_hash_size); |
Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 39 | } |
| 40 | free(refs_hash); |
| 41 | refs_hash = new_hash; |
| 42 | refs_hash_size = new_hash_size; |
| 43 | } |
| 44 | |
Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 45 | static void add_object_refs(struct object *obj, struct object_refs *ref) |
| 46 | { |
| 47 | int nr = nr_object_refs + 1; |
| 48 | |
| 49 | if (nr > refs_hash_size * 2 / 3) |
| 50 | grow_refs_hash(); |
| 51 | ref->base = obj; |
Linus Torvalds | 5fdc849 | 2006-06-21 11:01:12 -0700 | [diff] [blame] | 52 | insert_ref_hash(ref, refs_hash, refs_hash_size); |
Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 53 | nr_object_refs = nr; |
| 54 | } |
| 55 | |
| 56 | struct object_refs *lookup_object_refs(struct object *obj) |
| 57 | { |
Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 58 | struct object_refs *ref; |
Linus Torvalds | 5d44cd1 | 2006-09-04 08:34:12 -0700 | [diff] [blame] | 59 | int j; |
Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 60 | |
Linus Torvalds | 5d44cd1 | 2006-09-04 08:34:12 -0700 | [diff] [blame] | 61 | /* nothing to lookup */ |
| 62 | if (!refs_hash_size) |
| 63 | return NULL; |
| 64 | j = hash_obj(obj, refs_hash_size); |
Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 65 | while ((ref = refs_hash[j]) != NULL) { |
| 66 | if (ref->base == obj) |
| 67 | break; |
| 68 | j++; |
| 69 | if (j >= refs_hash_size) |
| 70 | j = 0; |
| 71 | } |
| 72 | return ref; |
| 73 | } |
| 74 | |
| 75 | struct object_refs *alloc_object_refs(unsigned count) |
| 76 | { |
| 77 | struct object_refs *refs; |
| 78 | size_t size = sizeof(*refs) + count*sizeof(struct object *); |
| 79 | |
| 80 | refs = xcalloc(1, size); |
| 81 | refs->count = count; |
| 82 | return refs; |
| 83 | } |
| 84 | |
| 85 | static int compare_object_pointers(const void *a, const void *b) |
| 86 | { |
| 87 | const struct object * const *pa = a; |
| 88 | const struct object * const *pb = b; |
| 89 | if (*pa == *pb) |
| 90 | return 0; |
| 91 | else if (*pa < *pb) |
| 92 | return -1; |
| 93 | else |
| 94 | return 1; |
| 95 | } |
| 96 | |
| 97 | void set_object_refs(struct object *obj, struct object_refs *refs) |
| 98 | { |
| 99 | unsigned int i, j; |
| 100 | |
| 101 | /* Do not install empty list of references */ |
| 102 | if (refs->count < 1) { |
| 103 | free(refs); |
| 104 | return; |
| 105 | } |
| 106 | |
| 107 | /* Sort the list and filter out duplicates */ |
| 108 | qsort(refs->ref, refs->count, sizeof(refs->ref[0]), |
| 109 | compare_object_pointers); |
| 110 | for (i = j = 1; i < refs->count; i++) { |
| 111 | if (refs->ref[i] != refs->ref[i - 1]) |
| 112 | refs->ref[j++] = refs->ref[i]; |
| 113 | } |
| 114 | if (j < refs->count) { |
| 115 | /* Duplicates were found - reallocate list */ |
| 116 | size_t size = sizeof(*refs) + j*sizeof(struct object *); |
| 117 | refs->count = j; |
| 118 | refs = xrealloc(refs, size); |
| 119 | } |
| 120 | |
| 121 | for (i = 0; i < refs->count; i++) |
| 122 | refs->ref[i]->used = 1; |
| 123 | add_object_refs(obj, refs); |
| 124 | } |
| 125 | |
| 126 | void mark_reachable(struct object *obj, unsigned int mask) |
| 127 | { |
| 128 | const struct object_refs *refs; |
| 129 | |
| 130 | if (!track_object_refs) |
| 131 | die("cannot do reachability with object refs turned off"); |
Linus Torvalds | 3e4339e | 2006-06-18 11:45:02 -0700 | [diff] [blame] | 132 | /* If we've been here already, don't bother */ |
| 133 | if (obj->flags & mask) |
| 134 | return; |
| 135 | obj->flags |= mask; |
| 136 | refs = lookup_object_refs(obj); |
| 137 | if (refs) { |
| 138 | unsigned i; |
| 139 | for (i = 0; i < refs->count; i++) |
| 140 | mark_reachable(refs->ref[i], mask); |
| 141 | } |
| 142 | } |
| 143 | |
| 144 | |