Brandon Williams | 85ab50f | 2017-06-12 15:13:57 -0700 | [diff] [blame] | 1 | #define NO_THE_INDEX_COMPATIBILITY_MACROS |
Junio C Hamano | 8f1d2e6 | 2006-01-07 01:33:54 -0800 | [diff] [blame] | 2 | #include "cache.h" |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 3 | #include "cache-tree.h" |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 4 | #include "tree.h" |
| 5 | #include "blob.h" |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 6 | #include "commit.h" |
| 7 | #include "tag.h" |
Linus Torvalds | 136f2e5 | 2006-05-29 12:16:12 -0700 | [diff] [blame] | 8 | #include "tree-walk.h" |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 9 | |
| 10 | const char *tree_type = "tree"; |
| 11 | |
Brandon Williams | 85ab50f | 2017-06-12 15:13:57 -0700 | [diff] [blame] | 12 | static int read_one_entry_opt(struct index_state *istate, |
| 13 | const unsigned char *sha1, |
| 14 | const char *base, int baselen, |
| 15 | const char *pathname, |
| 16 | unsigned mode, int stage, int opt) |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 17 | { |
Linus Torvalds | 3c5e846 | 2005-11-26 09:38:20 -0800 | [diff] [blame] | 18 | int len; |
| 19 | unsigned int size; |
| 20 | struct cache_entry *ce; |
| 21 | |
| 22 | if (S_ISDIR(mode)) |
| 23 | return READ_TREE_RECURSIVE; |
| 24 | |
| 25 | len = strlen(pathname); |
| 26 | size = cache_entry_size(baselen + len); |
Peter Eriksen | 90321c1 | 2006-04-03 19:30:46 +0100 | [diff] [blame] | 27 | ce = xcalloc(1, size); |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 28 | |
| 29 | ce->ce_mode = create_ce_mode(mode); |
Thomas Gummerer | b60e188 | 2012-07-11 11:22:37 +0200 | [diff] [blame] | 30 | ce->ce_flags = create_ce_flags(stage); |
| 31 | ce->ce_namelen = baselen + len; |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 32 | memcpy(ce->name, base, baselen); |
| 33 | memcpy(ce->name + baselen, pathname, len+1); |
brian m. carlson | 99d1a98 | 2016-09-05 20:07:52 +0000 | [diff] [blame] | 34 | hashcpy(ce->oid.hash, sha1); |
Brandon Williams | 85ab50f | 2017-06-12 15:13:57 -0700 | [diff] [blame] | 35 | return add_index_entry(istate, ce, opt); |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 36 | } |
| 37 | |
Nguyễn Thái Ngọc Duy | 6a0b0b6 | 2014-11-30 16:05:00 +0700 | [diff] [blame] | 38 | static int read_one_entry(const unsigned char *sha1, struct strbuf *base, |
| 39 | const char *pathname, unsigned mode, int stage, |
| 40 | void *context) |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 41 | { |
Brandon Williams | 85ab50f | 2017-06-12 15:13:57 -0700 | [diff] [blame] | 42 | struct index_state *istate = context; |
| 43 | return read_one_entry_opt(istate, sha1, base->buf, base->len, pathname, |
Nguyễn Thái Ngọc Duy | 6a0b0b6 | 2014-11-30 16:05:00 +0700 | [diff] [blame] | 44 | mode, stage, |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 45 | ADD_CACHE_OK_TO_ADD|ADD_CACHE_SKIP_DFCHECK); |
| 46 | } |
| 47 | |
| 48 | /* |
| 49 | * This is used when the caller knows there is no existing entries at |
| 50 | * the stage that will conflict with the entry being added. |
| 51 | */ |
Nguyễn Thái Ngọc Duy | 6a0b0b6 | 2014-11-30 16:05:00 +0700 | [diff] [blame] | 52 | static int read_one_entry_quick(const unsigned char *sha1, struct strbuf *base, |
| 53 | const char *pathname, unsigned mode, int stage, |
| 54 | void *context) |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 55 | { |
Brandon Williams | 85ab50f | 2017-06-12 15:13:57 -0700 | [diff] [blame] | 56 | struct index_state *istate = context; |
| 57 | return read_one_entry_opt(istate, sha1, base->buf, base->len, pathname, |
Nguyễn Thái Ngọc Duy | 6a0b0b6 | 2014-11-30 16:05:00 +0700 | [diff] [blame] | 58 | mode, stage, |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 59 | ADD_CACHE_JUST_APPEND); |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 60 | } |
| 61 | |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 62 | static int read_tree_1(struct tree *tree, struct strbuf *base, |
Nguyễn Thái Ngọc Duy | 18e4f40 | 2013-07-14 15:35:52 +0700 | [diff] [blame] | 63 | int stage, const struct pathspec *pathspec, |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 64 | read_tree_fn_t fn, void *context) |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 65 | { |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 66 | struct tree_desc desc; |
| 67 | struct name_entry entry; |
brian m. carlson | f26efc5 | 2017-05-06 22:10:15 +0000 | [diff] [blame] | 68 | struct object_id oid; |
Nguyễn Thái Ngọc Duy | d688cf0 | 2011-10-24 17:36:10 +1100 | [diff] [blame] | 69 | int len, oldlen = base->len; |
| 70 | enum interesting retval = entry_not_interesting; |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 71 | |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 72 | if (parse_tree(tree)) |
| 73 | return -1; |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 74 | |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 75 | init_tree_desc(&desc, tree->buffer, tree->size); |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 76 | |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 77 | while (tree_entry(&desc, &entry)) { |
Nguyễn Thái Ngọc Duy | d688cf0 | 2011-10-24 17:36:10 +1100 | [diff] [blame] | 78 | if (retval != all_entries_interesting) { |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 79 | retval = tree_entry_interesting(&entry, base, 0, pathspec); |
Nguyễn Thái Ngọc Duy | d688cf0 | 2011-10-24 17:36:10 +1100 | [diff] [blame] | 80 | if (retval == all_entries_not_interesting) |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 81 | break; |
Nguyễn Thái Ngọc Duy | d688cf0 | 2011-10-24 17:36:10 +1100 | [diff] [blame] | 82 | if (retval == entry_not_interesting) |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 83 | continue; |
| 84 | } |
| 85 | |
brian m. carlson | 7d924c9 | 2016-04-17 23:10:39 +0000 | [diff] [blame] | 86 | switch (fn(entry.oid->hash, base, |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 87 | entry.path, entry.mode, stage, context)) { |
| 88 | case 0: |
| 89 | continue; |
| 90 | case READ_TREE_RECURSIVE: |
| 91 | break; |
| 92 | default: |
| 93 | return -1; |
| 94 | } |
| 95 | |
| 96 | if (S_ISDIR(entry.mode)) |
brian m. carlson | f26efc5 | 2017-05-06 22:10:15 +0000 | [diff] [blame] | 97 | oidcpy(&oid, entry.oid); |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 98 | else if (S_ISGITLINK(entry.mode)) { |
| 99 | struct commit *commit; |
| 100 | |
brian m. carlson | bc83266 | 2017-05-06 22:10:10 +0000 | [diff] [blame] | 101 | commit = lookup_commit(entry.oid); |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 102 | if (!commit) |
| 103 | die("Commit %s in submodule path %s%s not found", |
brian m. carlson | 7d924c9 | 2016-04-17 23:10:39 +0000 | [diff] [blame] | 104 | oid_to_hex(entry.oid), |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 105 | base->buf, entry.path); |
| 106 | |
| 107 | if (parse_commit(commit)) |
| 108 | die("Invalid commit %s in submodule path %s%s", |
brian m. carlson | 7d924c9 | 2016-04-17 23:10:39 +0000 | [diff] [blame] | 109 | oid_to_hex(entry.oid), |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 110 | base->buf, entry.path); |
| 111 | |
brian m. carlson | f26efc5 | 2017-05-06 22:10:15 +0000 | [diff] [blame] | 112 | oidcpy(&oid, &commit->tree->object.oid); |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 113 | } |
| 114 | else |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 115 | continue; |
Linus Torvalds | 3e58763 | 2005-07-14 11:39:27 -0700 | [diff] [blame] | 116 | |
Nguyễn Thái Ngọc Duy | 0de1633 | 2011-10-24 17:36:09 +1100 | [diff] [blame] | 117 | len = tree_entry_len(&entry); |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 118 | strbuf_add(base, entry.path, len); |
| 119 | strbuf_addch(base, '/'); |
brian m. carlson | 740ee05 | 2017-05-06 22:10:17 +0000 | [diff] [blame] | 120 | retval = read_tree_1(lookup_tree(&oid), |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 121 | base, stage, pathspec, |
| 122 | fn, context); |
| 123 | strbuf_setlen(base, oldlen); |
| 124 | if (retval) |
| 125 | return -1; |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 126 | } |
| 127 | return 0; |
| 128 | } |
| 129 | |
Daniel Barkalow | 521698b | 2006-01-26 01:13:36 -0500 | [diff] [blame] | 130 | int read_tree_recursive(struct tree *tree, |
Linus Torvalds | 3c5e846 | 2005-11-26 09:38:20 -0800 | [diff] [blame] | 131 | const char *base, int baselen, |
Nguyễn Thái Ngọc Duy | 18e4f40 | 2013-07-14 15:35:52 +0700 | [diff] [blame] | 132 | int stage, const struct pathspec *pathspec, |
René Scharfe | 671f070 | 2008-07-14 21:22:12 +0200 | [diff] [blame] | 133 | read_tree_fn_t fn, void *context) |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 134 | { |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 135 | struct strbuf sb = STRBUF_INIT; |
Nguyễn Thái Ngọc Duy | f0096c0 | 2011-03-25 16:34:19 +0700 | [diff] [blame] | 136 | int ret; |
Linus Torvalds | 0790a42 | 2006-05-29 12:17:28 -0700 | [diff] [blame] | 137 | |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 138 | strbuf_add(&sb, base, baselen); |
Nguyễn Thái Ngọc Duy | f0096c0 | 2011-03-25 16:34:19 +0700 | [diff] [blame] | 139 | ret = read_tree_1(tree, &sb, stage, pathspec, fn, context); |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 140 | strbuf_release(&sb); |
Nguyễn Thái Ngọc Duy | ffd31f6 | 2011-03-25 16:34:18 +0700 | [diff] [blame] | 141 | return ret; |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 142 | } |
| 143 | |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 144 | static int cmp_cache_name_compare(const void *a_, const void *b_) |
| 145 | { |
| 146 | const struct cache_entry *ce1, *ce2; |
| 147 | |
| 148 | ce1 = *((const struct cache_entry **)a_); |
| 149 | ce2 = *((const struct cache_entry **)b_); |
Thomas Gummerer | b60e188 | 2012-07-11 11:22:37 +0200 | [diff] [blame] | 150 | return cache_name_stage_compare(ce1->name, ce1->ce_namelen, ce_stage(ce1), |
| 151 | ce2->name, ce2->ce_namelen, ce_stage(ce2)); |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 152 | } |
| 153 | |
Brandon Williams | 85ab50f | 2017-06-12 15:13:57 -0700 | [diff] [blame] | 154 | int read_tree(struct tree *tree, int stage, struct pathspec *match, |
| 155 | struct index_state *istate) |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 156 | { |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 157 | read_tree_fn_t fn = NULL; |
| 158 | int i, err; |
| 159 | |
| 160 | /* |
| 161 | * Currently the only existing callers of this function all |
| 162 | * call it with stage=1 and after making sure there is nothing |
| 163 | * at that stage; we could always use read_one_entry_quick(). |
| 164 | * |
| 165 | * But when we decide to straighten out git-read-tree not to |
| 166 | * use unpack_trees() in some cases, this will probably start |
| 167 | * to matter. |
| 168 | */ |
| 169 | |
| 170 | /* |
| 171 | * See if we have cache entry at the stage. If so, |
| 172 | * do it the original slow way, otherwise, append and then |
| 173 | * sort at the end. |
| 174 | */ |
Brandon Williams | 85ab50f | 2017-06-12 15:13:57 -0700 | [diff] [blame] | 175 | for (i = 0; !fn && i < istate->cache_nr; i++) { |
| 176 | const struct cache_entry *ce = istate->cache[i]; |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 177 | if (ce_stage(ce) == stage) |
| 178 | fn = read_one_entry; |
| 179 | } |
| 180 | |
| 181 | if (!fn) |
| 182 | fn = read_one_entry_quick; |
Brandon Williams | 85ab50f | 2017-06-12 15:13:57 -0700 | [diff] [blame] | 183 | err = read_tree_recursive(tree, "", 0, stage, match, fn, istate); |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 184 | if (fn == read_one_entry || err) |
| 185 | return err; |
| 186 | |
| 187 | /* |
| 188 | * Sort the cache entry -- we need to nuke the cache tree, though. |
| 189 | */ |
Brandon Williams | 85ab50f | 2017-06-12 15:13:57 -0700 | [diff] [blame] | 190 | cache_tree_free(&istate->cache_tree); |
| 191 | QSORT(istate->cache, istate->cache_nr, cmp_cache_name_compare); |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 192 | return 0; |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 193 | } |
| 194 | |
brian m. carlson | 740ee05 | 2017-05-06 22:10:17 +0000 | [diff] [blame] | 195 | struct tree *lookup_tree(const struct object_id *oid) |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 196 | { |
brian m. carlson | 740ee05 | 2017-05-06 22:10:17 +0000 | [diff] [blame] | 197 | struct object *obj = lookup_object(oid->hash); |
Linus Torvalds | 100c5f3 | 2007-04-16 22:11:43 -0700 | [diff] [blame] | 198 | if (!obj) |
brian m. carlson | 740ee05 | 2017-05-06 22:10:17 +0000 | [diff] [blame] | 199 | return create_object(oid->hash, alloc_tree_node()); |
Jeff King | 8ff226a | 2014-07-13 02:42:03 -0400 | [diff] [blame] | 200 | return object_as_type(obj, OBJ_TREE, 0); |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 201 | } |
| 202 | |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 203 | int parse_tree_buffer(struct tree *item, void *buffer, unsigned long size) |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 204 | { |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 205 | if (item->object.parsed) |
| 206 | return 0; |
| 207 | item->object.parsed = 1; |
Linus Torvalds | 136f2e5 | 2006-05-29 12:16:12 -0700 | [diff] [blame] | 208 | item->buffer = buffer; |
| 209 | item->size = size; |
| 210 | |
Linus Torvalds | 2d9c58c | 2006-05-29 12:18:33 -0700 | [diff] [blame] | 211 | return 0; |
| 212 | } |
Linus Torvalds | 136f2e5 | 2006-05-29 12:16:12 -0700 | [diff] [blame] | 213 | |
Jeff King | 9cc2b07 | 2015-06-01 05:56:26 -0400 | [diff] [blame] | 214 | int parse_tree_gently(struct tree *item, int quiet_on_missing) |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 215 | { |
Nicolas Pitre | 21666f1 | 2007-02-26 14:55:59 -0500 | [diff] [blame] | 216 | enum object_type type; |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 217 | void *buffer; |
| 218 | unsigned long size; |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 219 | |
| 220 | if (item->object.parsed) |
| 221 | return 0; |
brian m. carlson | ed1c997 | 2015-11-10 02:22:29 +0000 | [diff] [blame] | 222 | buffer = read_sha1_file(item->object.oid.hash, &type, &size); |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 223 | if (!buffer) |
Jeff King | 9cc2b07 | 2015-06-01 05:56:26 -0400 | [diff] [blame] | 224 | return quiet_on_missing ? -1 : |
| 225 | error("Could not read %s", |
brian m. carlson | f2fd076 | 2015-11-10 02:22:28 +0000 | [diff] [blame] | 226 | oid_to_hex(&item->object.oid)); |
Nicolas Pitre | 21666f1 | 2007-02-26 14:55:59 -0500 | [diff] [blame] | 227 | if (type != OBJ_TREE) { |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 228 | free(buffer); |
| 229 | return error("Object %s not a tree", |
brian m. carlson | f2fd076 | 2015-11-10 02:22:28 +0000 | [diff] [blame] | 230 | oid_to_hex(&item->object.oid)); |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 231 | } |
Linus Torvalds | 136f2e5 | 2006-05-29 12:16:12 -0700 | [diff] [blame] | 232 | return parse_tree_buffer(item, buffer, size); |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 233 | } |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 234 | |
Jeff King | 6e454b9 | 2013-06-05 18:37:39 -0400 | [diff] [blame] | 235 | void free_tree_buffer(struct tree *tree) |
| 236 | { |
Ævar Arnfjörð Bjarmason | 6a83d90 | 2017-06-15 23:15:46 +0000 | [diff] [blame] | 237 | FREE_AND_NULL(tree->buffer); |
Jeff King | 6e454b9 | 2013-06-05 18:37:39 -0400 | [diff] [blame] | 238 | tree->size = 0; |
| 239 | tree->object.parsed = 0; |
| 240 | } |
| 241 | |
brian m. carlson | a9dbc17 | 2017-05-06 22:10:37 +0000 | [diff] [blame] | 242 | struct tree *parse_tree_indirect(const struct object_id *oid) |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 243 | { |
brian m. carlson | c251c83 | 2017-05-06 22:10:38 +0000 | [diff] [blame] | 244 | struct object *obj = parse_object(oid); |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 245 | do { |
| 246 | if (!obj) |
| 247 | return NULL; |
Linus Torvalds | 1974632 | 2006-07-11 20:45:31 -0700 | [diff] [blame] | 248 | if (obj->type == OBJ_TREE) |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 249 | return (struct tree *) obj; |
Linus Torvalds | 1974632 | 2006-07-11 20:45:31 -0700 | [diff] [blame] | 250 | else if (obj->type == OBJ_COMMIT) |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 251 | obj = &(((struct commit *) obj)->tree->object); |
Linus Torvalds | 1974632 | 2006-07-11 20:45:31 -0700 | [diff] [blame] | 252 | else if (obj->type == OBJ_TAG) |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 253 | obj = ((struct tag *) obj)->tagged; |
| 254 | else |
| 255 | return NULL; |
| 256 | if (!obj->parsed) |
brian m. carlson | c251c83 | 2017-05-06 22:10:38 +0000 | [diff] [blame] | 257 | parse_object(&obj->oid); |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 258 | } while (1); |
| 259 | } |