Junio C Hamano | 8f1d2e6 | 2006-01-07 01:33:54 -0800 | [diff] [blame] | 1 | #include "cache.h" |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 2 | #include "cache-tree.h" |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 3 | #include "tree.h" |
| 4 | #include "blob.h" |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 5 | #include "commit.h" |
| 6 | #include "tag.h" |
Linus Torvalds | 136f2e5 | 2006-05-29 12:16:12 -0700 | [diff] [blame] | 7 | #include "tree-walk.h" |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 8 | |
| 9 | const char *tree_type = "tree"; |
| 10 | |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 11 | static int read_one_entry_opt(const unsigned char *sha1, const char *base, int baselen, const char *pathname, unsigned mode, int stage, int opt) |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 12 | { |
Linus Torvalds | 3c5e846 | 2005-11-26 09:38:20 -0800 | [diff] [blame] | 13 | int len; |
| 14 | unsigned int size; |
| 15 | struct cache_entry *ce; |
| 16 | |
| 17 | if (S_ISDIR(mode)) |
| 18 | return READ_TREE_RECURSIVE; |
| 19 | |
| 20 | len = strlen(pathname); |
| 21 | size = cache_entry_size(baselen + len); |
Peter Eriksen | 90321c1 | 2006-04-03 19:30:46 +0100 | [diff] [blame] | 22 | ce = xcalloc(1, size); |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 23 | |
| 24 | ce->ce_mode = create_ce_mode(mode); |
| 25 | ce->ce_flags = create_ce_flags(baselen + len, stage); |
| 26 | memcpy(ce->name, base, baselen); |
| 27 | memcpy(ce->name + baselen, pathname, len+1); |
Shawn Pearce | e702496 | 2006-08-23 02:49:00 -0400 | [diff] [blame] | 28 | hashcpy(ce->sha1, sha1); |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 29 | return add_cache_entry(ce, opt); |
| 30 | } |
| 31 | |
René Scharfe | 671f070 | 2008-07-14 21:22:12 +0200 | [diff] [blame] | 32 | static int read_one_entry(const unsigned char *sha1, const char *base, int baselen, const char *pathname, unsigned mode, int stage, void *context) |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 33 | { |
| 34 | return read_one_entry_opt(sha1, base, baselen, pathname, mode, stage, |
| 35 | ADD_CACHE_OK_TO_ADD|ADD_CACHE_SKIP_DFCHECK); |
| 36 | } |
| 37 | |
| 38 | /* |
| 39 | * This is used when the caller knows there is no existing entries at |
| 40 | * the stage that will conflict with the entry being added. |
| 41 | */ |
René Scharfe | 671f070 | 2008-07-14 21:22:12 +0200 | [diff] [blame] | 42 | static int read_one_entry_quick(const unsigned char *sha1, const char *base, int baselen, const char *pathname, unsigned mode, int stage, void *context) |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 43 | { |
| 44 | return read_one_entry_opt(sha1, base, baselen, pathname, mode, stage, |
| 45 | ADD_CACHE_JUST_APPEND); |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 46 | } |
| 47 | |
Linus Torvalds | 3e58763 | 2005-07-14 11:39:27 -0700 | [diff] [blame] | 48 | static int match_tree_entry(const char *base, int baselen, const char *path, unsigned int mode, const char **paths) |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 49 | { |
Linus Torvalds | 3e58763 | 2005-07-14 11:39:27 -0700 | [diff] [blame] | 50 | const char *match; |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 51 | int pathlen; |
| 52 | |
| 53 | if (!paths) |
| 54 | return 1; |
| 55 | pathlen = strlen(path); |
| 56 | while ((match = *paths++) != NULL) { |
| 57 | int matchlen = strlen(match); |
| 58 | |
| 59 | if (baselen >= matchlen) { |
| 60 | /* If it doesn't match, move along... */ |
| 61 | if (strncmp(base, match, matchlen)) |
| 62 | continue; |
| 63 | /* The base is a subdirectory of a path which was specified. */ |
| 64 | return 1; |
| 65 | } |
| 66 | |
| 67 | /* Does the base match? */ |
| 68 | if (strncmp(base, match, baselen)) |
| 69 | continue; |
| 70 | |
| 71 | match += baselen; |
| 72 | matchlen -= baselen; |
| 73 | |
| 74 | if (pathlen > matchlen) |
| 75 | continue; |
| 76 | |
| 77 | if (matchlen > pathlen) { |
| 78 | if (match[pathlen] != '/') |
| 79 | continue; |
| 80 | if (!S_ISDIR(mode)) |
| 81 | continue; |
| 82 | } |
| 83 | |
| 84 | if (strncmp(path, match, pathlen)) |
| 85 | continue; |
Linus Torvalds | 3e58763 | 2005-07-14 11:39:27 -0700 | [diff] [blame] | 86 | |
| 87 | return 1; |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 88 | } |
| 89 | return 0; |
| 90 | } |
| 91 | |
Daniel Barkalow | 521698b | 2006-01-26 01:13:36 -0500 | [diff] [blame] | 92 | int read_tree_recursive(struct tree *tree, |
Linus Torvalds | 3c5e846 | 2005-11-26 09:38:20 -0800 | [diff] [blame] | 93 | const char *base, int baselen, |
| 94 | int stage, const char **match, |
René Scharfe | 671f070 | 2008-07-14 21:22:12 +0200 | [diff] [blame] | 95 | read_tree_fn_t fn, void *context) |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 96 | { |
Linus Torvalds | 0790a42 | 2006-05-29 12:17:28 -0700 | [diff] [blame] | 97 | struct tree_desc desc; |
Linus Torvalds | 4c068a9 | 2006-05-30 09:45:45 -0700 | [diff] [blame] | 98 | struct name_entry entry; |
Linus Torvalds | 0790a42 | 2006-05-29 12:17:28 -0700 | [diff] [blame] | 99 | |
Daniel Barkalow | 521698b | 2006-01-26 01:13:36 -0500 | [diff] [blame] | 100 | if (parse_tree(tree)) |
| 101 | return -1; |
Linus Torvalds | 0790a42 | 2006-05-29 12:17:28 -0700 | [diff] [blame] | 102 | |
Linus Torvalds | 6fda5e5 | 2007-03-21 10:08:25 -0700 | [diff] [blame] | 103 | init_tree_desc(&desc, tree->buffer, tree->size); |
Linus Torvalds | 0790a42 | 2006-05-29 12:17:28 -0700 | [diff] [blame] | 104 | |
Linus Torvalds | 4c068a9 | 2006-05-30 09:45:45 -0700 | [diff] [blame] | 105 | while (tree_entry(&desc, &entry)) { |
| 106 | if (!match_tree_entry(base, baselen, entry.path, entry.mode, match)) |
Linus Torvalds | 0ca14a5 | 2005-07-14 11:26:31 -0700 | [diff] [blame] | 107 | continue; |
| 108 | |
René Scharfe | 671f070 | 2008-07-14 21:22:12 +0200 | [diff] [blame] | 109 | switch (fn(entry.sha1, base, baselen, entry.path, entry.mode, stage, context)) { |
Linus Torvalds | 3c5e846 | 2005-11-26 09:38:20 -0800 | [diff] [blame] | 110 | case 0: |
| 111 | continue; |
| 112 | case READ_TREE_RECURSIVE: |
Junio C Hamano | ba19a80 | 2009-02-10 17:42:04 -0800 | [diff] [blame] | 113 | break; |
Linus Torvalds | 3c5e846 | 2005-11-26 09:38:20 -0800 | [diff] [blame] | 114 | default: |
| 115 | return -1; |
| 116 | } |
Linus Torvalds | 4c068a9 | 2006-05-30 09:45:45 -0700 | [diff] [blame] | 117 | if (S_ISDIR(entry.mode)) { |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 118 | int retval; |
Jonas Fonseca | 1c9da46 | 2005-04-27 00:00:01 +0200 | [diff] [blame] | 119 | char *newbase; |
Linus Torvalds | a8c4047 | 2007-03-21 10:07:46 -0700 | [diff] [blame] | 120 | unsigned int pathlen = tree_entry_len(entry.path, entry.sha1); |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 121 | |
Linus Torvalds | a8c4047 | 2007-03-21 10:07:46 -0700 | [diff] [blame] | 122 | newbase = xmalloc(baselen + 1 + pathlen); |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 123 | memcpy(newbase, base, baselen); |
Linus Torvalds | a8c4047 | 2007-03-21 10:07:46 -0700 | [diff] [blame] | 124 | memcpy(newbase + baselen, entry.path, pathlen); |
| 125 | newbase[baselen + pathlen] = '/'; |
Linus Torvalds | 4c068a9 | 2006-05-30 09:45:45 -0700 | [diff] [blame] | 126 | retval = read_tree_recursive(lookup_tree(entry.sha1), |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 127 | newbase, |
Linus Torvalds | a8c4047 | 2007-03-21 10:07:46 -0700 | [diff] [blame] | 128 | baselen + pathlen + 1, |
René Scharfe | 671f070 | 2008-07-14 21:22:12 +0200 | [diff] [blame] | 129 | stage, match, fn, context); |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 130 | free(newbase); |
| 131 | if (retval) |
| 132 | return -1; |
| 133 | continue; |
Lars Hjemli | d3bee16 | 2009-01-25 01:52:05 +0100 | [diff] [blame] | 134 | } else if (S_ISGITLINK(entry.mode)) { |
| 135 | int retval; |
| 136 | struct strbuf path; |
| 137 | unsigned int entrylen; |
| 138 | struct commit *commit; |
| 139 | |
| 140 | entrylen = tree_entry_len(entry.path, entry.sha1); |
| 141 | strbuf_init(&path, baselen + entrylen + 1); |
| 142 | strbuf_add(&path, base, baselen); |
| 143 | strbuf_add(&path, entry.path, entrylen); |
| 144 | strbuf_addch(&path, '/'); |
| 145 | |
| 146 | commit = lookup_commit(entry.sha1); |
| 147 | if (!commit) |
| 148 | die("Commit %s in submodule path %s not found", |
| 149 | sha1_to_hex(entry.sha1), path.buf); |
| 150 | |
| 151 | if (parse_commit(commit)) |
| 152 | die("Invalid commit %s in submodule path %s", |
| 153 | sha1_to_hex(entry.sha1), path.buf); |
| 154 | |
| 155 | retval = read_tree_recursive(commit->tree, |
| 156 | path.buf, path.len, |
| 157 | stage, match, fn, context); |
| 158 | strbuf_release(&path); |
| 159 | if (retval) |
| 160 | return -1; |
| 161 | continue; |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 162 | } |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 163 | } |
| 164 | return 0; |
| 165 | } |
| 166 | |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 167 | static int cmp_cache_name_compare(const void *a_, const void *b_) |
| 168 | { |
| 169 | const struct cache_entry *ce1, *ce2; |
| 170 | |
| 171 | ce1 = *((const struct cache_entry **)a_); |
| 172 | ce2 = *((const struct cache_entry **)b_); |
Linus Torvalds | 7a51ed6 | 2008-01-14 16:03:17 -0800 | [diff] [blame] | 173 | return cache_name_compare(ce1->name, ce1->ce_flags, |
| 174 | ce2->name, ce2->ce_flags); |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 175 | } |
| 176 | |
Daniel Barkalow | 521698b | 2006-01-26 01:13:36 -0500 | [diff] [blame] | 177 | int read_tree(struct tree *tree, int stage, const char **match) |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 178 | { |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 179 | read_tree_fn_t fn = NULL; |
| 180 | int i, err; |
| 181 | |
| 182 | /* |
| 183 | * Currently the only existing callers of this function all |
| 184 | * call it with stage=1 and after making sure there is nothing |
| 185 | * at that stage; we could always use read_one_entry_quick(). |
| 186 | * |
| 187 | * But when we decide to straighten out git-read-tree not to |
| 188 | * use unpack_trees() in some cases, this will probably start |
| 189 | * to matter. |
| 190 | */ |
| 191 | |
| 192 | /* |
| 193 | * See if we have cache entry at the stage. If so, |
| 194 | * do it the original slow way, otherwise, append and then |
| 195 | * sort at the end. |
| 196 | */ |
| 197 | for (i = 0; !fn && i < active_nr; i++) { |
| 198 | struct cache_entry *ce = active_cache[i]; |
| 199 | if (ce_stage(ce) == stage) |
| 200 | fn = read_one_entry; |
| 201 | } |
| 202 | |
| 203 | if (!fn) |
| 204 | fn = read_one_entry_quick; |
René Scharfe | 671f070 | 2008-07-14 21:22:12 +0200 | [diff] [blame] | 205 | err = read_tree_recursive(tree, "", 0, stage, match, fn, NULL); |
Junio C Hamano | af3785d | 2007-08-09 13:42:50 -0700 | [diff] [blame] | 206 | if (fn == read_one_entry || err) |
| 207 | return err; |
| 208 | |
| 209 | /* |
| 210 | * Sort the cache entry -- we need to nuke the cache tree, though. |
| 211 | */ |
| 212 | cache_tree_free(&active_cache_tree); |
| 213 | qsort(active_cache, active_nr, sizeof(active_cache[0]), |
| 214 | cmp_cache_name_compare); |
| 215 | return 0; |
Linus Torvalds | 94537c7 | 2005-04-22 16:42:37 -0700 | [diff] [blame] | 216 | } |
| 217 | |
Jason McMullan | 5d6ccf5 | 2005-06-03 11:05:39 -0400 | [diff] [blame] | 218 | struct tree *lookup_tree(const unsigned char *sha1) |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 219 | { |
| 220 | struct object *obj = lookup_object(sha1); |
Linus Torvalds | 100c5f3 | 2007-04-16 22:11:43 -0700 | [diff] [blame] | 221 | if (!obj) |
| 222 | return create_object(sha1, OBJ_TREE, alloc_tree_node()); |
Nicolas Pitre | d1af002 | 2005-05-20 16:59:17 -0400 | [diff] [blame] | 223 | if (!obj->type) |
Linus Torvalds | 1974632 | 2006-07-11 20:45:31 -0700 | [diff] [blame] | 224 | obj->type = OBJ_TREE; |
| 225 | if (obj->type != OBJ_TREE) { |
Linus Torvalds | 885a86a | 2006-06-14 16:45:13 -0700 | [diff] [blame] | 226 | error("Object %s is a %s, not a tree", |
| 227 | sha1_to_hex(sha1), typename(obj->type)); |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 228 | return NULL; |
| 229 | } |
| 230 | return (struct tree *) obj; |
| 231 | } |
| 232 | |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 233 | int parse_tree_buffer(struct tree *item, void *buffer, unsigned long size) |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 234 | { |
Daniel Barkalow | 175785e | 2005-04-18 11:39:48 -0700 | [diff] [blame] | 235 | if (item->object.parsed) |
| 236 | return 0; |
| 237 | item->object.parsed = 1; |
Linus Torvalds | 136f2e5 | 2006-05-29 12:16:12 -0700 | [diff] [blame] | 238 | item->buffer = buffer; |
| 239 | item->size = size; |
| 240 | |
Linus Torvalds | 2d9c58c | 2006-05-29 12:18:33 -0700 | [diff] [blame] | 241 | return 0; |
| 242 | } |
Linus Torvalds | 136f2e5 | 2006-05-29 12:16:12 -0700 | [diff] [blame] | 243 | |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 244 | int parse_tree(struct tree *item) |
| 245 | { |
Nicolas Pitre | 21666f1 | 2007-02-26 14:55:59 -0500 | [diff] [blame] | 246 | enum object_type type; |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 247 | void *buffer; |
| 248 | unsigned long size; |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 249 | |
| 250 | if (item->object.parsed) |
| 251 | return 0; |
Nicolas Pitre | 21666f1 | 2007-02-26 14:55:59 -0500 | [diff] [blame] | 252 | buffer = read_sha1_file(item->object.sha1, &type, &size); |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 253 | if (!buffer) |
| 254 | return error("Could not read %s", |
| 255 | sha1_to_hex(item->object.sha1)); |
Nicolas Pitre | 21666f1 | 2007-02-26 14:55:59 -0500 | [diff] [blame] | 256 | if (type != OBJ_TREE) { |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 257 | free(buffer); |
| 258 | return error("Object %s not a tree", |
| 259 | sha1_to_hex(item->object.sha1)); |
| 260 | } |
Linus Torvalds | 136f2e5 | 2006-05-29 12:16:12 -0700 | [diff] [blame] | 261 | return parse_tree_buffer(item, buffer, size); |
Nicolas Pitre | bd2c39f | 2005-05-06 13:48:34 -0400 | [diff] [blame] | 262 | } |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 263 | |
| 264 | struct tree *parse_tree_indirect(const unsigned char *sha1) |
| 265 | { |
| 266 | struct object *obj = parse_object(sha1); |
| 267 | do { |
| 268 | if (!obj) |
| 269 | return NULL; |
Linus Torvalds | 1974632 | 2006-07-11 20:45:31 -0700 | [diff] [blame] | 270 | if (obj->type == OBJ_TREE) |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 271 | return (struct tree *) obj; |
Linus Torvalds | 1974632 | 2006-07-11 20:45:31 -0700 | [diff] [blame] | 272 | else if (obj->type == OBJ_COMMIT) |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 273 | obj = &(((struct commit *) obj)->tree->object); |
Linus Torvalds | 1974632 | 2006-07-11 20:45:31 -0700 | [diff] [blame] | 274 | else if (obj->type == OBJ_TAG) |
Daniel Barkalow | 77675e2 | 2005-09-05 02:03:51 -0400 | [diff] [blame] | 275 | obj = ((struct tag *) obj)->tagged; |
| 276 | else |
| 277 | return NULL; |
| 278 | if (!obj->parsed) |
| 279 | parse_object(obj->sha1); |
| 280 | } while (1); |
| 281 | } |