Code

t/: Replace diff [-u|-U0] with test_cmp to allow compilation with old diff
[git.git] / builtin-read-tree.c
1 /*
2  * GIT - The information manager from hell
3  *
4  * Copyright (C) Linus Torvalds, 2005
5  */
7 #include "cache.h"
8 #include "object.h"
9 #include "tree.h"
10 #include "tree-walk.h"
11 #include "cache-tree.h"
12 #include "unpack-trees.h"
13 #include "dir.h"
14 #include "builtin.h"
16 static int nr_trees;
17 static struct tree *trees[MAX_UNPACK_TREES];
19 static int list_tree(unsigned char *sha1)
20 {
21         struct tree *tree;
23         if (nr_trees >= MAX_UNPACK_TREES)
24                 die("I cannot read more than %d trees", MAX_UNPACK_TREES);
25         tree = parse_tree_indirect(sha1);
26         if (!tree)
27                 return -1;
28         trees[nr_trees++] = tree;
29         return 0;
30 }
32 static void prime_cache_tree_rec(struct cache_tree *it, struct tree *tree)
33 {
34         struct tree_desc desc;
35         struct name_entry entry;
36         int cnt;
38         hashcpy(it->sha1, tree->object.sha1);
39         init_tree_desc(&desc, tree->buffer, tree->size);
40         cnt = 0;
41         while (tree_entry(&desc, &entry)) {
42                 if (!S_ISDIR(entry.mode))
43                         cnt++;
44                 else {
45                         struct cache_tree_sub *sub;
46                         struct tree *subtree = lookup_tree(entry.sha1);
47                         if (!subtree->object.parsed)
48                                 parse_tree(subtree);
49                         sub = cache_tree_sub(it, entry.path);
50                         sub->cache_tree = cache_tree();
51                         prime_cache_tree_rec(sub->cache_tree, subtree);
52                         cnt += sub->cache_tree->entry_count;
53                 }
54         }
55         it->entry_count = cnt;
56 }
58 static void prime_cache_tree(void)
59 {
60         if (!nr_trees)
61                 return;
62         active_cache_tree = cache_tree();
63         prime_cache_tree_rec(active_cache_tree, trees[0]);
65 }
67 static const char read_tree_usage[] = "git-read-tree (<sha> | [[-m [--trivial] [--aggressive] | --reset | --prefix=<prefix>] [-u | -i]] [--exclude-per-directory=<gitignore>] [--index-output=<file>] <sha1> [<sha2> [<sha3>]])";
69 static struct lock_file lock_file;
71 int cmd_read_tree(int argc, const char **argv, const char *unused_prefix)
72 {
73         int i, newfd, stage = 0;
74         unsigned char sha1[20];
75         struct tree_desc t[MAX_UNPACK_TREES];
76         struct unpack_trees_options opts;
78         memset(&opts, 0, sizeof(opts));
79         opts.head_idx = -1;
80         opts.src_index = &the_index;
81         opts.dst_index = &the_index;
83         git_config(git_default_config, NULL);
85         newfd = hold_locked_index(&lock_file, 1);
87         for (i = 1; i < argc; i++) {
88                 const char *arg = argv[i];
90                 /* "-u" means "update", meaning that a merge will update
91                  * the working tree.
92                  */
93                 if (!strcmp(arg, "-u")) {
94                         opts.update = 1;
95                         continue;
96                 }
98                 if (!strcmp(arg, "-v")) {
99                         opts.verbose_update = 1;
100                         continue;
101                 }
103                 /* "-i" means "index only", meaning that a merge will
104                  * not even look at the working tree.
105                  */
106                 if (!strcmp(arg, "-i")) {
107                         opts.index_only = 1;
108                         continue;
109                 }
111                 if (!prefixcmp(arg, "--index-output=")) {
112                         set_alternate_index_output(arg + 15);
113                         continue;
114                 }
116                 /* "--prefix=<subdirectory>/" means keep the current index
117                  *  entries and put the entries from the tree under the
118                  * given subdirectory.
119                  */
120                 if (!prefixcmp(arg, "--prefix=")) {
121                         if (stage || opts.merge || opts.prefix)
122                                 usage(read_tree_usage);
123                         opts.prefix = arg + 9;
124                         opts.merge = 1;
125                         stage = 1;
126                         if (read_cache_unmerged())
127                                 die("you need to resolve your current index first");
128                         continue;
129                 }
131                 /* This differs from "-m" in that we'll silently ignore
132                  * unmerged entries and overwrite working tree files that
133                  * correspond to them.
134                  */
135                 if (!strcmp(arg, "--reset")) {
136                         if (stage || opts.merge || opts.prefix)
137                                 usage(read_tree_usage);
138                         opts.reset = 1;
139                         opts.merge = 1;
140                         stage = 1;
141                         read_cache_unmerged();
142                         continue;
143                 }
145                 if (!strcmp(arg, "--trivial")) {
146                         opts.trivial_merges_only = 1;
147                         continue;
148                 }
150                 if (!strcmp(arg, "--aggressive")) {
151                         opts.aggressive = 1;
152                         continue;
153                 }
155                 /* "-m" stands for "merge", meaning we start in stage 1 */
156                 if (!strcmp(arg, "-m")) {
157                         if (stage || opts.merge || opts.prefix)
158                                 usage(read_tree_usage);
159                         if (read_cache_unmerged())
160                                 die("you need to resolve your current index first");
161                         stage = 1;
162                         opts.merge = 1;
163                         continue;
164                 }
166                 if (!prefixcmp(arg, "--exclude-per-directory=")) {
167                         struct dir_struct *dir;
169                         if (opts.dir)
170                                 die("more than one --exclude-per-directory are given.");
172                         dir = xcalloc(1, sizeof(*opts.dir));
173                         dir->show_ignored = 1;
174                         dir->exclude_per_dir = arg + 24;
175                         opts.dir = dir;
176                         /* We do not need to nor want to do read-directory
177                          * here; we are merely interested in reusing the
178                          * per directory ignore stack mechanism.
179                          */
180                         continue;
181                 }
183                 /* using -u and -i at the same time makes no sense */
184                 if (1 < opts.index_only + opts.update)
185                         usage(read_tree_usage);
187                 if (get_sha1(arg, sha1))
188                         die("Not a valid object name %s", arg);
189                 if (list_tree(sha1) < 0)
190                         die("failed to unpack tree object %s", arg);
191                 stage++;
192         }
193         if ((opts.update||opts.index_only) && !opts.merge)
194                 usage(read_tree_usage);
195         if ((opts.dir && !opts.update))
196                 die("--exclude-per-directory is meaningless unless -u");
198         if (opts.merge) {
199                 if (stage < 2)
200                         die("just how do you expect me to merge %d trees?", stage-1);
201                 switch (stage - 1) {
202                 case 1:
203                         opts.fn = opts.prefix ? bind_merge : oneway_merge;
204                         break;
205                 case 2:
206                         opts.fn = twoway_merge;
207                         break;
208                 case 3:
209                 default:
210                         opts.fn = threeway_merge;
211                         cache_tree_free(&active_cache_tree);
212                         break;
213                 }
215                 if (stage - 1 >= 3)
216                         opts.head_idx = stage - 2;
217                 else
218                         opts.head_idx = 1;
219         }
221         for (i = 0; i < nr_trees; i++) {
222                 struct tree *tree = trees[i];
223                 parse_tree(tree);
224                 init_tree_desc(t+i, tree->buffer, tree->size);
225         }
226         if (unpack_trees(nr_trees, t, &opts))
227                 return 128;
229         /*
230          * When reading only one tree (either the most basic form,
231          * "-m ent" or "--reset ent" form), we can obtain a fully
232          * valid cache-tree because the index must match exactly
233          * what came from the tree.
234          */
235         if (nr_trees && !opts.prefix && (!opts.merge || (stage == 2))) {
236                 cache_tree_free(&active_cache_tree);
237                 prime_cache_tree();
238         }
240         if (write_cache(newfd, active_cache, active_nr) ||
241             commit_locked_index(&lock_file))
242                 die("unable to write new index file");
243         return 0;