1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
|
#include "cache.h"
#include "cache-tree.h"
#include "tree.h"
#include "blob.h"
#include "commit.h"
#include "tag.h"
#include "tree-walk.h"
const char *tree_type = "tree";
static int read_one_entry_opt(const unsigned char *sha1, const char *base, int baselen, const char *pathname, unsigned mode, int stage, int opt)
{
int len;
unsigned int size;
struct cache_entry *ce;
if (S_ISDIR(mode))
return READ_TREE_RECURSIVE;
len = strlen(pathname);
size = cache_entry_size(baselen + len);
ce = xcalloc(1, size);
ce->ce_mode = create_ce_mode(mode);
ce->ce_flags = create_ce_flags(baselen + len, stage);
memcpy(ce->name, base, baselen);
memcpy(ce->name + baselen, pathname, len+1);
hashcpy(ce->sha1, sha1);
return add_cache_entry(ce, opt);
}
static int read_one_entry(const unsigned char *sha1, const char *base, int baselen, const char *pathname, unsigned mode, int stage, void *context)
{
return read_one_entry_opt(sha1, base, baselen, pathname, mode, stage,
ADD_CACHE_OK_TO_ADD|ADD_CACHE_SKIP_DFCHECK);
}
/*
* This is used when the caller knows there is no existing entries at
* the stage that will conflict with the entry being added.
*/
static int read_one_entry_quick(const unsigned char *sha1, const char *base, int baselen, const char *pathname, unsigned mode, int stage, void *context)
{
return read_one_entry_opt(sha1, base, baselen, pathname, mode, stage,
ADD_CACHE_JUST_APPEND);
}
static int match_tree_entry(const char *base, int baselen, const char *path, unsigned int mode, const char **paths)
{
const char *match;
int pathlen;
if (!paths)
return 1;
pathlen = strlen(path);
while ((match = *paths++) != NULL) {
int matchlen = strlen(match);
if (baselen >= matchlen) {
/* If it doesn't match, move along... */
if (strncmp(base, match, matchlen))
continue;
/* pathspecs match only at the directory boundaries */
if (!matchlen ||
base[matchlen] == '/' ||
match[matchlen - 1] == '/')
return 1;
continue;
}
/* Does the base match? */
if (strncmp(base, match, baselen))
continue;
match += baselen;
matchlen -= baselen;
if (pathlen > matchlen)
continue;
if (matchlen > pathlen) {
if (match[pathlen] != '/')
continue;
if (!S_ISDIR(mode))
continue;
}
if (strncmp(path, match, pathlen))
continue;
return 1;
}
return 0;
}
int read_tree_recursive(struct tree *tree,
const char *base, int baselen,
int stage, const char **match,
read_tree_fn_t fn, void *context)
{
struct tree_desc desc;
struct name_entry entry;
if (parse_tree(tree))
return -1;
init_tree_desc(&desc, tree->buffer, tree->size);
while (tree_entry(&desc, &entry)) {
if (!match_tree_entry(base, baselen, entry.path, entry.mode, match))
continue;
switch (fn(entry.sha1, base, baselen, entry.path, entry.mode, stage, context)) {
case 0:
continue;
case READ_TREE_RECURSIVE:
break;;
default:
return -1;
}
if (S_ISDIR(entry.mode)) {
int retval;
char *newbase;
unsigned int pathlen = tree_entry_len(entry.path, entry.sha1);
newbase = xmalloc(baselen + 1 + pathlen);
memcpy(newbase, base, baselen);
memcpy(newbase + baselen, entry.path, pathlen);
newbase[baselen + pathlen] = '/';
retval = read_tree_recursive(lookup_tree(entry.sha1),
newbase,
baselen + pathlen + 1,
stage, match, fn, context);
free(newbase);
if (retval)
return -1;
continue;
}
}
return 0;
}
static int cmp_cache_name_compare(const void *a_, const void *b_)
{
const struct cache_entry *ce1, *ce2;
ce1 = *((const struct cache_entry **)a_);
ce2 = *((const struct cache_entry **)b_);
return cache_name_compare(ce1->name, ce1->ce_flags,
ce2->name, ce2->ce_flags);
}
int read_tree(struct tree *tree, int stage, const char **match)
{
read_tree_fn_t fn = NULL;
int i, err;
/*
* Currently the only existing callers of this function all
* call it with stage=1 and after making sure there is nothing
* at that stage; we could always use read_one_entry_quick().
*
* But when we decide to straighten out git-read-tree not to
* use unpack_trees() in some cases, this will probably start
* to matter.
*/
/*
* See if we have cache entry at the stage. If so,
* do it the original slow way, otherwise, append and then
* sort at the end.
*/
for (i = 0; !fn && i < active_nr; i++) {
struct cache_entry *ce = active_cache[i];
if (ce_stage(ce) == stage)
fn = read_one_entry;
}
if (!fn)
fn = read_one_entry_quick;
err = read_tree_recursive(tree, "", 0, stage, match, fn, NULL);
if (fn == read_one_entry || err)
return err;
/*
* Sort the cache entry -- we need to nuke the cache tree, though.
*/
cache_tree_free(&active_cache_tree);
qsort(active_cache, active_nr, sizeof(active_cache[0]),
cmp_cache_name_compare);
return 0;
}
struct tree *lookup_tree(const unsigned char *sha1)
{
struct object *obj = lookup_object(sha1);
if (!obj)
return create_object(sha1, OBJ_TREE, alloc_tree_node());
if (!obj->type)
obj->type = OBJ_TREE;
if (obj->type != OBJ_TREE) {
error("Object %s is a %s, not a tree",
sha1_to_hex(sha1), typename(obj->type));
return NULL;
}
return (struct tree *) obj;
}
int parse_tree_buffer(struct tree *item, void *buffer, unsigned long size)
{
if (item->object.parsed)
return 0;
item->object.parsed = 1;
item->buffer = buffer;
item->size = size;
return 0;
}
int parse_tree(struct tree *item)
{
enum object_type type;
void *buffer;
unsigned long size;
if (item->object.parsed)
return 0;
buffer = read_sha1_file(item->object.sha1, &type, &size);
if (!buffer)
return error("Could not read %s",
sha1_to_hex(item->object.sha1));
if (type != OBJ_TREE) {
free(buffer);
return error("Object %s not a tree",
sha1_to_hex(item->object.sha1));
}
return parse_tree_buffer(item, buffer, size);
}
struct tree *parse_tree_indirect(const unsigned char *sha1)
{
struct object *obj = parse_object(sha1);
do {
if (!obj)
return NULL;
if (obj->type == OBJ_TREE)
return (struct tree *) obj;
else if (obj->type == OBJ_COMMIT)
obj = &(((struct commit *) obj)->tree->object);
else if (obj->type == OBJ_TAG)
obj = ((struct tag *) obj)->tagged;
else
return NULL;
if (!obj->parsed)
parse_object(obj->sha1);
} while (1);
}
|