/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to workingtree.py

  • Committer: Jelmer Vernooij
  • Date: 2018-04-02 14:59:43 UTC
  • mto: (0.200.1913 work)
  • mto: This revision was merged to the branch mainline in revision 6960.
  • Revision ID: jelmer@jelmer.uk-20180402145943-s5jmpbvvf1x42pao
Just don't touch the URL if it's already a valid URL.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
 
 
18
"""An adapter between a Git index and a Bazaar Working Tree"""
 
19
 
 
20
from __future__ import absolute_import
 
21
 
 
22
import itertools
 
23
from cStringIO import (
 
24
    StringIO,
 
25
    )
 
26
from collections import defaultdict
 
27
import errno
 
28
from dulwich.errors import NotGitRepository
 
29
from dulwich.ignore import (
 
30
    IgnoreFilterManager,
 
31
    )
 
32
from dulwich.index import (
 
33
    Index,
 
34
    build_index_from_tree,
 
35
    changes_from_tree,
 
36
    cleanup_mode,
 
37
    commit_tree,
 
38
    index_entry_from_path,
 
39
    index_entry_from_stat,
 
40
    iter_fresh_entries,
 
41
    blob_from_path_and_stat,
 
42
    FLAG_STAGEMASK,
 
43
    validate_path,
 
44
    )
 
45
from dulwich.object_store import (
 
46
    tree_lookup_path,
 
47
    )
 
48
from dulwich.objects import (
 
49
    Blob,
 
50
    Tree,
 
51
    S_IFGITLINK,
 
52
    S_ISGITLINK,
 
53
    ZERO_SHA,
 
54
    )
 
55
from dulwich.repo import Repo
 
56
import os
 
57
import posixpath
 
58
import re
 
59
import stat
 
60
import sys
 
61
 
 
62
from ... import (
 
63
    conflicts as _mod_conflicts,
 
64
    errors,
 
65
    controldir as _mod_controldir,
 
66
    globbing,
 
67
    ignores,
 
68
    lock,
 
69
    merge,
 
70
    osutils,
 
71
    revision as _mod_revision,
 
72
    trace,
 
73
    transport as _mod_transport,
 
74
    tree,
 
75
    workingtree,
 
76
    )
 
77
from ...decorators import (
 
78
    only_raises,
 
79
    )
 
80
from ...bzr import (
 
81
    inventory,
 
82
    )
 
83
from ...mutabletree import (
 
84
    MutableTree,
 
85
    )
 
86
 
 
87
 
 
88
from .dir import (
 
89
    LocalGitDir,
 
90
    )
 
91
from .tree import (
 
92
    changes_from_git_changes,
 
93
    tree_delta_from_git_changes,
 
94
    InterGitTrees,
 
95
    MutableGitIndexTree,
 
96
    )
 
97
from .mapping import (
 
98
    GitFileIdMap,
 
99
    mode_kind,
 
100
    )
 
101
 
 
102
IGNORE_FILENAME = ".gitignore"
 
103
 
 
104
 
 
105
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
 
106
    """A Git working tree."""
 
107
 
 
108
    def __init__(self, controldir, repo, branch, index):
 
109
        MutableGitIndexTree.__init__(self)
 
110
        basedir = controldir.root_transport.local_abspath('.')
 
111
        self.basedir = osutils.realpath(basedir)
 
112
        self.controldir = controldir
 
113
        self.repository = repo
 
114
        self.store = self.repository._git.object_store
 
115
        self.mapping = self.repository.get_mapping()
 
116
        self._branch = branch
 
117
        self._transport = controldir.transport
 
118
        self._format = GitWorkingTreeFormat()
 
119
        self.index = index
 
120
        self.views = self._make_views()
 
121
        self._rules_searcher = None
 
122
        self._detect_case_handling()
 
123
        self._reset_data()
 
124
 
 
125
    def supports_tree_reference(self):
 
126
        return False
 
127
 
 
128
    def supports_rename_tracking(self):
 
129
        return False
 
130
 
 
131
    def lock_read(self):
 
132
        """Lock the repository for read operations.
 
133
 
 
134
        :return: A breezy.lock.LogicalLockResult.
 
135
        """
 
136
        if not self._lock_mode:
 
137
            self._lock_mode = 'r'
 
138
            self._lock_count = 1
 
139
            self.index.read()
 
140
        else:
 
141
            self._lock_count += 1
 
142
        self.branch.lock_read()
 
143
        return lock.LogicalLockResult(self.unlock)
 
144
 
 
145
    def _lock_write_tree(self):
 
146
        # TODO(jelmer): Actually create index.lock
 
147
        if not self._lock_mode:
 
148
            self._lock_mode = 'w'
 
149
            self._lock_count = 1
 
150
            self.index.read()
 
151
        elif self._lock_mode == 'r':
 
152
            raise errors.ReadOnlyError(self)
 
153
        else:
 
154
            self._lock_count +=1
 
155
 
 
156
    def lock_tree_write(self):
 
157
        self.branch.lock_read()
 
158
        try:
 
159
            self._lock_write_tree()
 
160
            return lock.LogicalLockResult(self.unlock)
 
161
        except:
 
162
            self.branch.unlock()
 
163
            raise
 
164
 
 
165
    def lock_write(self, token=None):
 
166
        self.branch.lock_write()
 
167
        try:
 
168
            self._lock_write_tree()
 
169
            return lock.LogicalLockResult(self.unlock)
 
170
        except:
 
171
            self.branch.unlock()
 
172
            raise
 
173
 
 
174
    def is_locked(self):
 
175
        return self._lock_count >= 1
 
176
 
 
177
    def get_physical_lock_status(self):
 
178
        return False
 
179
 
 
180
    @only_raises(errors.LockNotHeld, errors.LockBroken)
 
181
    def unlock(self):
 
182
        if not self._lock_count:
 
183
            return lock.cant_unlock_not_held(self)
 
184
        try:
 
185
            self._cleanup()
 
186
            self._lock_count -= 1
 
187
            if self._lock_count > 0:
 
188
                return
 
189
            self._lock_mode = None
 
190
        finally:
 
191
            self.branch.unlock()
 
192
 
 
193
    def _cleanup(self):
 
194
        pass
 
195
 
 
196
    def _detect_case_handling(self):
 
197
        try:
 
198
            self._transport.stat(".git/cOnFiG")
 
199
        except errors.NoSuchFile:
 
200
            self.case_sensitive = True
 
201
        else:
 
202
            self.case_sensitive = False
 
203
 
 
204
    def merge_modified(self):
 
205
        return {}
 
206
 
 
207
    def set_merge_modified(self, modified_hashes):
 
208
        raise errors.UnsupportedOperation(self.set_merge_modified, self)
 
209
 
 
210
    def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
 
211
        self.set_parent_ids([p for p, t in parents_list])
 
212
 
 
213
    def _set_merges_from_parent_ids(self, rhs_parent_ids):
 
214
        try:
 
215
            merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
 
216
        except errors.NoSuchRevision as e:
 
217
            raise errors.GhostRevisionUnusableHere(e.revision)
 
218
        if merges:
 
219
            self.control_transport.put_bytes('MERGE_HEAD', '\n'.join(merges),
 
220
                mode=self.controldir._get_file_mode())
 
221
        else:
 
222
            try:
 
223
                self.control_transport.delete('MERGE_HEAD')
 
224
            except errors.NoSuchFile:
 
225
                pass
 
226
 
 
227
    def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
 
228
        """Set the parent ids to revision_ids.
 
229
 
 
230
        See also set_parent_trees. This api will try to retrieve the tree data
 
231
        for each element of revision_ids from the trees repository. If you have
 
232
        tree data already available, it is more efficient to use
 
233
        set_parent_trees rather than set_parent_ids. set_parent_ids is however
 
234
        an easier API to use.
 
235
 
 
236
        :param revision_ids: The revision_ids to set as the parent ids of this
 
237
            working tree. Any of these may be ghosts.
 
238
        """
 
239
        with self.lock_tree_write():
 
240
            self._check_parents_for_ghosts(revision_ids,
 
241
                allow_leftmost_as_ghost=allow_leftmost_as_ghost)
 
242
            for revision_id in revision_ids:
 
243
                _mod_revision.check_not_reserved_id(revision_id)
 
244
 
 
245
            revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
 
246
 
 
247
            if len(revision_ids) > 0:
 
248
                self.set_last_revision(revision_ids[0])
 
249
            else:
 
250
                self.set_last_revision(_mod_revision.NULL_REVISION)
 
251
 
 
252
            self._set_merges_from_parent_ids(revision_ids[1:])
 
253
 
 
254
    def get_parent_ids(self):
 
255
        """See Tree.get_parent_ids.
 
256
 
 
257
        This implementation reads the pending merges list and last_revision
 
258
        value and uses that to decide what the parents list should be.
 
259
        """
 
260
        last_rev = _mod_revision.ensure_null(self._last_revision())
 
261
        if _mod_revision.NULL_REVISION == last_rev:
 
262
            parents = []
 
263
        else:
 
264
            parents = [last_rev]
 
265
        try:
 
266
            merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
 
267
        except errors.NoSuchFile:
 
268
            pass
 
269
        else:
 
270
            for l in osutils.split_lines(merges_bytes):
 
271
                revision_id = l.rstrip('\n')
 
272
                parents.append(self.branch.lookup_foreign_revision_id(revision_id))
 
273
        return parents
 
274
 
 
275
    def check_state(self):
 
276
        """Check that the working state is/isn't valid."""
 
277
        pass
 
278
 
 
279
    def remove(self, files, verbose=False, to_file=None, keep_files=True,
 
280
        force=False):
 
281
        """Remove nominated files from the working tree metadata.
 
282
 
 
283
        :param files: File paths relative to the basedir.
 
284
        :param keep_files: If true, the files will also be kept.
 
285
        :param force: Delete files and directories, even if they are changed
 
286
            and even if the directories are not empty.
 
287
        """
 
288
        if isinstance(files, basestring):
 
289
            files = [files]
 
290
 
 
291
        if to_file is None:
 
292
            to_file = sys.stdout
 
293
 
 
294
        def backup(file_to_backup):
 
295
            abs_path = self.abspath(file_to_backup)
 
296
            backup_name = self.controldir._available_backup_name(file_to_backup)
 
297
            osutils.rename(abs_path, self.abspath(backup_name))
 
298
            return "removed %s (but kept a copy: %s)" % (
 
299
                file_to_backup, backup_name)
 
300
 
 
301
        # Sort needed to first handle directory content before the directory
 
302
        files_to_backup = []
 
303
 
 
304
        all_files = set()
 
305
 
 
306
        def recurse_directory_to_add_files(directory):
 
307
            # Recurse directory and add all files
 
308
            # so we can check if they have changed.
 
309
            for parent_info, file_infos in self.walkdirs(directory):
 
310
                for relpath, basename, kind, lstat, fileid, kind in file_infos:
 
311
                    # Is it versioned or ignored?
 
312
                    if self.is_versioned(relpath):
 
313
                        # Add nested content for deletion.
 
314
                        all_files.add(relpath)
 
315
                    else:
 
316
                        # Files which are not versioned
 
317
                        # should be treated as unknown.
 
318
                        files_to_backup.append(relpath)
 
319
 
 
320
        with self.lock_tree_write():
 
321
            for filepath in files:
 
322
                # Get file name into canonical form.
 
323
                abspath = self.abspath(filepath)
 
324
                filepath = self.relpath(abspath)
 
325
 
 
326
                if filepath:
 
327
                    all_files.add(filepath)
 
328
                    recurse_directory_to_add_files(filepath)
 
329
 
 
330
            files = list(all_files)
 
331
 
 
332
            if len(files) == 0:
 
333
                return # nothing to do
 
334
 
 
335
            # Sort needed to first handle directory content before the directory
 
336
            files.sort(reverse=True)
 
337
 
 
338
            # Bail out if we are going to delete files we shouldn't
 
339
            if not keep_files and not force:
 
340
                for (file_id, path, content_change, versioned, parent_id, name,
 
341
                     kind, executable) in self.iter_changes(self.basis_tree(),
 
342
                         include_unchanged=True, require_versioned=False,
 
343
                         want_unversioned=True, specific_files=files):
 
344
                    if versioned[0] == False:
 
345
                        # The record is unknown or newly added
 
346
                        files_to_backup.append(path[1])
 
347
                        files_to_backup.extend(osutils.parent_directories(path[1]))
 
348
                    elif (content_change and (kind[1] is not None) and
 
349
                            osutils.is_inside_any(files, path[1])):
 
350
                        # Versioned and changed, but not deleted, and still
 
351
                        # in one of the dirs to be deleted.
 
352
                        files_to_backup.append(path[1])
 
353
                        files_to_backup.extend(osutils.parent_directories(path[1]))
 
354
 
 
355
            for f in files:
 
356
                if f == '':
 
357
                    continue
 
358
 
 
359
                try:
 
360
                    kind = self.kind(f)
 
361
                except errors.NoSuchFile:
 
362
                    kind = None
 
363
 
 
364
                abs_path = self.abspath(f)
 
365
                if verbose:
 
366
                    # having removed it, it must be either ignored or unknown
 
367
                    if self.is_ignored(f):
 
368
                        new_status = 'I'
 
369
                    else:
 
370
                        new_status = '?'
 
371
                    kind_ch = osutils.kind_marker(kind)
 
372
                    to_file.write(new_status + '       ' + f + kind_ch + '\n')
 
373
                if kind is None:
 
374
                    message = "%s does not exist" % (f, )
 
375
                else:
 
376
                    if not keep_files:
 
377
                        if f in files_to_backup and not force:
 
378
                            message = backup(f)
 
379
                        else:
 
380
                            if kind == 'directory':
 
381
                                osutils.rmtree(abs_path)
 
382
                            else:
 
383
                                osutils.delete_any(abs_path)
 
384
                            message = "deleted %s" % (f,)
 
385
                    else:
 
386
                        message = "removed %s" % (f,)
 
387
                self._unversion_path(f)
 
388
 
 
389
                # print only one message (if any) per file.
 
390
                if message is not None:
 
391
                    trace.note(message)
 
392
            self._versioned_dirs = None
 
393
            self.flush()
 
394
 
 
395
    def smart_add(self, file_list, recurse=True, action=None, save=True):
 
396
        if not file_list:
 
397
            file_list = [u'.']
 
398
 
 
399
        # expand any symlinks in the directory part, while leaving the
 
400
        # filename alone
 
401
        # only expanding if symlinks are supported avoids windows path bugs
 
402
        if osutils.has_symlinks():
 
403
            file_list = list(map(osutils.normalizepath, file_list))
 
404
 
 
405
        conflicts_related = set()
 
406
        for c in self.conflicts():
 
407
            conflicts_related.update(c.associated_filenames())
 
408
 
 
409
        added = []
 
410
        ignored = {}
 
411
        user_dirs = []
 
412
        def call_action(filepath, kind):
 
413
            if action is not None:
 
414
                parent_path = posixpath.dirname(filepath)
 
415
                parent_id = self.path2id(parent_path)
 
416
                parent_ie = self._get_dir_ie(parent_path, parent_id)
 
417
                file_id = action(self, parent_ie, filepath, kind)
 
418
                if file_id is not None:
 
419
                    raise workingtree.SettingFileIdUnsupported()
 
420
 
 
421
        with self.lock_tree_write():
 
422
            for filepath in osutils.canonical_relpaths(self.basedir, file_list):
 
423
                filepath, can_access = osutils.normalized_filename(filepath)
 
424
                if not can_access:
 
425
                    raise errors.InvalidNormalization(filepath)
 
426
 
 
427
                abspath = self.abspath(filepath)
 
428
                kind = osutils.file_kind(abspath)
 
429
                if kind in ("file", "symlink"):
 
430
                    if filepath in self.index:
 
431
                        # Already present
 
432
                        continue
 
433
                    call_action(filepath, kind)
 
434
                    if save:
 
435
                        self._index_add_entry(filepath, kind)
 
436
                    added.append(filepath)
 
437
                elif kind == "directory":
 
438
                    if filepath not in self.index:
 
439
                        call_action(filepath, kind)
 
440
                    if recurse:
 
441
                        user_dirs.append(filepath)
 
442
                else:
 
443
                    raise errors.BadFileKindError(filename=abspath, kind=kind)
 
444
            for user_dir in user_dirs:
 
445
                abs_user_dir = self.abspath(user_dir)
 
446
                if user_dir != '':
 
447
                    try:
 
448
                        transport = _mod_transport.get_transport_from_path(abs_user_dir)
 
449
                        _mod_controldir.ControlDirFormat.find_format(transport)
 
450
                        subtree = True
 
451
                    except errors.NotBranchError:
 
452
                        subtree = False
 
453
                    except errors.UnsupportedFormatError:
 
454
                        subtree = False
 
455
                else:
 
456
                    subtree = False
 
457
                if subtree:
 
458
                    trace.warning('skipping nested tree %r', abs_user_dir)
 
459
                    continue
 
460
 
 
461
                for name in os.listdir(abs_user_dir):
 
462
                    subp = os.path.join(user_dir, name)
 
463
                    if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
 
464
                        continue
 
465
                    ignore_glob = self.is_ignored(subp)
 
466
                    if ignore_glob is not None:
 
467
                        ignored.setdefault(ignore_glob, []).append(subp)
 
468
                        continue
 
469
                    abspath = self.abspath(subp)
 
470
                    kind = osutils.file_kind(abspath)
 
471
                    if kind == "directory":
 
472
                        user_dirs.append(subp)
 
473
                    else:
 
474
                        if subp in self.index:
 
475
                            # Already present
 
476
                            continue
 
477
                        if subp in conflicts_related:
 
478
                            continue
 
479
                        call_action(filepath, kind)
 
480
                        if save:
 
481
                            self._index_add_entry(subp, kind)
 
482
                        added.append(subp)
 
483
            if added and save:
 
484
                self.flush()
 
485
            return added, ignored
 
486
 
 
487
    def has_filename(self, filename):
 
488
        return osutils.lexists(self.abspath(filename))
 
489
 
 
490
    def _iter_files_recursive(self, from_dir=None, include_dirs=False):
 
491
        if from_dir is None:
 
492
            from_dir = u""
 
493
        for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
 
494
            dir_relpath = dirpath[len(self.basedir):].strip("/")
 
495
            if self.controldir.is_control_filename(dir_relpath):
 
496
                continue
 
497
            for name in list(dirnames):
 
498
                if self.controldir.is_control_filename(name):
 
499
                    dirnames.remove(name)
 
500
                    continue
 
501
                relpath = os.path.join(dir_relpath, name)
 
502
                if include_dirs:
 
503
                    try:
 
504
                        yield relpath.decode(osutils._fs_enc)
 
505
                    except UnicodeDecodeError:
 
506
                        raise errors.BadFilenameEncoding(
 
507
                            relpath, osutils._fs_enc)
 
508
                if not self._has_dir(relpath):
 
509
                    dirnames.remove(name)
 
510
            for name in filenames:
 
511
                if not self.mapping.is_special_file(name):
 
512
                    yp = os.path.join(dir_relpath, name)
 
513
                    try:
 
514
                        yield yp.decode(osutils._fs_enc)
 
515
                    except UnicodeDecodeError:
 
516
                        raise errors.BadFilenameEncoding(
 
517
                            yp, osutils._fs_enc)
 
518
 
 
519
    def extras(self):
 
520
        """Yield all unversioned files in this WorkingTree.
 
521
        """
 
522
        with self.lock_read():
 
523
            for p in (set(self._iter_files_recursive(include_dirs=True)) - set([p.decode('utf-8') for p in self.index])):
 
524
                if not self._has_dir(p):
 
525
                    yield p
 
526
 
 
527
    def flush(self):
 
528
        # TODO: Maybe this should only write on dirty ?
 
529
        if self._lock_mode != 'w':
 
530
            raise errors.NotWriteLocked(self)
 
531
        self.index.write()
 
532
 
 
533
    def has_or_had_id(self, file_id):
 
534
        if self.has_id(file_id):
 
535
            return True
 
536
        if self.had_id(file_id):
 
537
            return True
 
538
        return False
 
539
 
 
540
    def had_id(self, file_id):
 
541
        path = self._basis_fileid_map.lookup_file_id(file_id)
 
542
        try:
 
543
            head = self.repository._git.head()
 
544
        except KeyError:
 
545
            # Assume no if basis is not accessible
 
546
            return False
 
547
        try:
 
548
            root_tree = self.store[head].tree
 
549
        except KeyError:
 
550
            return False
 
551
        try:
 
552
            tree_lookup_path(self.store.__getitem__, root_tree, path)
 
553
        except KeyError:
 
554
            return False
 
555
        else:
 
556
            return True
 
557
 
 
558
    def get_file_mtime(self, path, file_id=None):
 
559
        """See Tree.get_file_mtime."""
 
560
        try:
 
561
            return self._lstat(path).st_mtime
 
562
        except OSError, (num, msg):
 
563
            if num == errno.ENOENT:
 
564
                raise errors.NoSuchFile(path)
 
565
            raise
 
566
 
 
567
    def is_ignored(self, filename):
 
568
        r"""Check whether the filename matches an ignore pattern.
 
569
 
 
570
        If the file is ignored, returns the pattern which caused it to
 
571
        be ignored, otherwise None.  So this can simply be used as a
 
572
        boolean if desired."""
 
573
        if getattr(self, '_global_ignoreglobster', None) is None:
 
574
            ignore_globs = set()
 
575
            ignore_globs.update(ignores.get_runtime_ignores())
 
576
            ignore_globs.update(ignores.get_user_ignores())
 
577
            self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
 
578
        match = self._global_ignoreglobster.match(filename)
 
579
        if match is not None:
 
580
            return match
 
581
        try:
 
582
            if self.kind(filename) == 'directory':
 
583
                filename += b'/'
 
584
        except errors.NoSuchFile:
 
585
            pass
 
586
        filename = filename.lstrip(b'/')
 
587
        ignore_manager = self._get_ignore_manager()
 
588
        ps = list(ignore_manager.find_matching(filename))
 
589
        if not ps:
 
590
            return None
 
591
        if not ps[-1].is_exclude:
 
592
            return None
 
593
        return bytes(ps[-1])
 
594
 
 
595
    def _get_ignore_manager(self):
 
596
        ignoremanager = getattr(self, '_ignoremanager', None)
 
597
        if ignoremanager is not None:
 
598
            return ignoremanager
 
599
 
 
600
        ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
 
601
        self._ignoremanager = ignore_manager
 
602
        return ignore_manager
 
603
 
 
604
    def _flush_ignore_list_cache(self):
 
605
        self._ignoremanager = None
 
606
 
 
607
    def set_last_revision(self, revid):
 
608
        if _mod_revision.is_null(revid):
 
609
            self.branch.set_last_revision_info(0, revid)
 
610
            return False
 
611
        _mod_revision.check_not_reserved_id(revid)
 
612
        try:
 
613
            self.branch.generate_revision_history(revid)
 
614
        except errors.NoSuchRevision:
 
615
            raise errors.GhostRevisionUnusableHere(revid)
 
616
 
 
617
    def _reset_data(self):
 
618
        try:
 
619
            head = self.repository._git.head()
 
620
        except KeyError:
 
621
            self._basis_fileid_map = GitFileIdMap({}, self.mapping)
 
622
        else:
 
623
            self._basis_fileid_map = self.mapping.get_fileid_map(
 
624
                self.store.__getitem__, self.store[head].tree)
 
625
        self._fileid_map = self._basis_fileid_map.copy()
 
626
 
 
627
    def get_file_verifier(self, path, file_id=None, stat_value=None):
 
628
        with self.lock_read():
 
629
            try:
 
630
                return ("GIT", self.index[path.encode('utf-8')].sha)
 
631
            except KeyError:
 
632
                if self._has_dir(path):
 
633
                    return ("GIT", None)
 
634
                raise errors.NoSuchFile(path)
 
635
 
 
636
    def get_file_sha1(self, path, file_id=None, stat_value=None):
 
637
        with self.lock_read():
 
638
            if not self.is_versioned(path):
 
639
                raise errors.NoSuchFile(path)
 
640
            abspath = self.abspath(path)
 
641
            try:
 
642
                return osutils.sha_file_by_name(abspath)
 
643
            except OSError, (num, msg):
 
644
                if num in (errno.EISDIR, errno.ENOENT):
 
645
                    return None
 
646
                raise
 
647
 
 
648
    def revision_tree(self, revid):
 
649
        return self.repository.revision_tree(revid)
 
650
 
 
651
    def filter_unversioned_files(self, files):
 
652
        return set([p for p in files if not self.is_versioned(p)])
 
653
 
 
654
    def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
 
655
        mode = stat_result.st_mode
 
656
        return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
 
657
 
 
658
    def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
 
659
        return self.basis_tree().is_executable(path)
 
660
 
 
661
    def stored_kind(self, path, file_id=None):
 
662
        with self.lock_read():
 
663
            try:
 
664
                return mode_kind(self.index[path.encode("utf-8")].mode)
 
665
            except KeyError:
 
666
                # Maybe it's a directory?
 
667
                if self._has_dir(path):
 
668
                    return "directory"
 
669
                raise errors.NoSuchFile(path)
 
670
 
 
671
    def _lstat(self, path):
 
672
        return os.lstat(self.abspath(path))
 
673
 
 
674
    def is_executable(self, path, file_id=None):
 
675
        with self.lock_read():
 
676
            if getattr(self, "_supports_executable", osutils.supports_executable)():
 
677
                mode = self._lstat(path).st_mode
 
678
            else:
 
679
                try:
 
680
                    mode = self.index[path.encode('utf-8')].mode
 
681
                except KeyError:
 
682
                    mode = 0
 
683
            return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
 
684
 
 
685
    def _is_executable_from_path_and_stat(self, path, stat_result):
 
686
        if getattr(self, "_supports_executable", osutils.supports_executable)():
 
687
            return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
 
688
        else:
 
689
            return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
 
690
 
 
691
    def list_files(self, include_root=False, from_dir=None, recursive=True):
 
692
        if from_dir is None:
 
693
            from_dir = u""
 
694
        dir_ids = {}
 
695
        fk_entries = {'directory': tree.TreeDirectory,
 
696
                      'file': tree.TreeFile,
 
697
                      'symlink': tree.TreeLink}
 
698
        with self.lock_read():
 
699
            root_ie = self._get_dir_ie(u"", None)
 
700
            if include_root and not from_dir:
 
701
                yield "", "V", root_ie.kind, root_ie.file_id, root_ie
 
702
            dir_ids[u""] = root_ie.file_id
 
703
            if recursive:
 
704
                path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
 
705
            else:
 
706
                path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
 
707
                    os.listdir(self.abspath(from_dir).encode(osutils._fs_enc)) if not self.controldir.is_control_filename(name)
 
708
                    and not self.mapping.is_special_file(name)])
 
709
            for path in path_iterator:
 
710
                try:
 
711
                    index_path = path.encode("utf-8")
 
712
                except UnicodeEncodeError:
 
713
                    raise errors.BadFilenameEncoding(
 
714
                        path, osutils._fs_enc)
 
715
                try:
 
716
                    value = self.index[index_path]
 
717
                except KeyError:
 
718
                    value = None
 
719
                kind = osutils.file_kind(self.abspath(path))
 
720
                parent, name = posixpath.split(path)
 
721
                for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
 
722
                    pass
 
723
                if kind == 'directory':
 
724
                    if path != from_dir:
 
725
                        if self._has_dir(path):
 
726
                            ie = self._get_dir_ie(path, self.path2id(path))
 
727
                            status = "V"
 
728
                            file_id = ie.file_id
 
729
                        elif self.is_ignored(path):
 
730
                            status = "I"
 
731
                            ie = fk_entries[kind]()
 
732
                            file_id = None
 
733
                        else:
 
734
                            status = "?"
 
735
                            ie = fk_entries[kind]()
 
736
                            file_id = None
 
737
                        yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
 
738
                    continue
 
739
                if value is not None:
 
740
                    ie = self._get_file_ie(name, path, value, dir_ids[parent])
 
741
                    yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
 
742
                else:
 
743
                    ie = fk_entries[kind]()
 
744
                    yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
 
745
 
 
746
    def all_file_ids(self):
 
747
        with self.lock_read():
 
748
            ids = {u"": self.path2id("")}
 
749
            for path in self.index:
 
750
                if self.mapping.is_special_file(path):
 
751
                    continue
 
752
                path = path.decode("utf-8")
 
753
                parent = posixpath.dirname(path).strip("/")
 
754
                for e in self._add_missing_parent_ids(parent, ids):
 
755
                    pass
 
756
                ids[path] = self.path2id(path)
 
757
            return set(ids.values())
 
758
 
 
759
    def all_versioned_paths(self):
 
760
        with self.lock_read():
 
761
            paths = {u""}
 
762
            for path in self.index:
 
763
                if self.mapping.is_special_file(path):
 
764
                    continue
 
765
                path = path.decode("utf-8")
 
766
                paths.add(path)
 
767
                while path != "":
 
768
                    path = posixpath.dirname(path).strip("/")
 
769
                    if path in paths:
 
770
                        break
 
771
                    paths.add(path)
 
772
            return paths
 
773
 
 
774
    def _directory_is_tree_reference(self, path):
 
775
        # FIXME: Check .gitsubmodules for path
 
776
        return False
 
777
 
 
778
    def iter_child_entries(self, path, file_id=None):
 
779
        encoded_path = path.encode('utf-8')
 
780
        with self.lock_read():
 
781
            parent_id = self.path2id(path)
 
782
            found_any = False
 
783
            seen_children = set()
 
784
            for item_path, value in self.index.iteritems():
 
785
                if self.mapping.is_special_file(item_path):
 
786
                    continue
 
787
                if not osutils.is_inside(encoded_path, item_path):
 
788
                    continue
 
789
                found_any = True
 
790
                subpath = posixpath.relpath(item_path, encoded_path)
 
791
                if b'/' in subpath:
 
792
                    dirname = subpath.split(b'/', 1)[0]
 
793
                    file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
 
794
                else:
 
795
                    (parent, name) = posixpath.split(item_path)
 
796
                    file_ie = self._get_file_ie(
 
797
                            name.decode('utf-8'),
 
798
                            item_path.decode('utf-8'), value, parent_id)
 
799
                yield file_ie
 
800
            if not found_any and path != u'':
 
801
                raise errors.NoSuchFile(path)
 
802
 
 
803
    def conflicts(self):
 
804
        with self.lock_read():
 
805
            conflicts = _mod_conflicts.ConflictList()
 
806
            for item_path, value in self.index.iteritems():
 
807
                if value.flags & FLAG_STAGEMASK:
 
808
                    conflicts.append(_mod_conflicts.TextConflict(item_path.decode('utf-8')))
 
809
            return conflicts
 
810
 
 
811
    def set_conflicts(self, conflicts):
 
812
        by_path = set()
 
813
        for conflict in conflicts:
 
814
            if conflict.typestring in ('text conflict', 'contents conflict'):
 
815
                by_path.add(conflict.path.encode('utf-8'))
 
816
            else:
 
817
                raise errors.UnsupportedOperation(self.set_conflicts, self)
 
818
        with self.lock_tree_write():
 
819
            for path in self.index:
 
820
                self._set_conflicted(path, path in by_path)
 
821
            self.flush()
 
822
 
 
823
    def _set_conflicted(self, path, conflicted):
 
824
        trace.mutter('change conflict: %r -> %r', path, conflicted)
 
825
        value = self.index[path]
 
826
        if conflicted:
 
827
            self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
 
828
        else:
 
829
            self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
 
830
 
 
831
    def add_conflicts(self, new_conflicts):
 
832
        with self.lock_tree_write():
 
833
            for conflict in new_conflicts:
 
834
                if conflict.typestring in ('text conflict', 'contents conflict'):
 
835
                    try:
 
836
                        self._set_conflicted(conflict.path.encode('utf-8'), True)
 
837
                    except KeyError:
 
838
                        raise errors.UnsupportedOperation(self.add_conflicts, self)
 
839
                else:
 
840
                    raise errors.UnsupportedOperation(self.add_conflicts, self)
 
841
            self.flush()
 
842
 
 
843
    def walkdirs(self, prefix=""):
 
844
        """Walk the directories of this tree.
 
845
 
 
846
        returns a generator which yields items in the form:
 
847
                ((curren_directory_path, fileid),
 
848
                 [(file1_path, file1_name, file1_kind, (lstat), file1_id,
 
849
                   file1_kind), ... ])
 
850
 
 
851
        This API returns a generator, which is only valid during the current
 
852
        tree transaction - within a single lock_read or lock_write duration.
 
853
 
 
854
        If the tree is not locked, it may cause an error to be raised,
 
855
        depending on the tree implementation.
 
856
        """
 
857
        from bisect import bisect_left
 
858
        import operator
 
859
        disk_top = self.abspath(prefix)
 
860
        if disk_top.endswith('/'):
 
861
            disk_top = disk_top[:-1]
 
862
        top_strip_len = len(disk_top) + 1
 
863
        inventory_iterator = self._walkdirs(prefix)
 
864
        disk_iterator = osutils.walkdirs(disk_top, prefix)
 
865
        try:
 
866
            current_disk = next(disk_iterator)
 
867
            disk_finished = False
 
868
        except OSError as e:
 
869
            if not (e.errno == errno.ENOENT or
 
870
                (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
 
871
                raise
 
872
            current_disk = None
 
873
            disk_finished = True
 
874
        try:
 
875
            current_inv = next(inventory_iterator)
 
876
            inv_finished = False
 
877
        except StopIteration:
 
878
            current_inv = None
 
879
            inv_finished = True
 
880
        while not inv_finished or not disk_finished:
 
881
            if current_disk:
 
882
                ((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
 
883
                    cur_disk_dir_content) = current_disk
 
884
            else:
 
885
                ((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
 
886
                    cur_disk_dir_content) = ((None, None), None)
 
887
            if not disk_finished:
 
888
                # strip out .bzr dirs
 
889
                if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
 
890
                    len(cur_disk_dir_content) > 0):
 
891
                    # osutils.walkdirs can be made nicer -
 
892
                    # yield the path-from-prefix rather than the pathjoined
 
893
                    # value.
 
894
                    bzrdir_loc = bisect_left(cur_disk_dir_content,
 
895
                        ('.git', '.git'))
 
896
                    if (bzrdir_loc < len(cur_disk_dir_content)
 
897
                        and self.controldir.is_control_filename(
 
898
                            cur_disk_dir_content[bzrdir_loc][0])):
 
899
                        # we dont yield the contents of, or, .bzr itself.
 
900
                        del cur_disk_dir_content[bzrdir_loc]
 
901
            if inv_finished:
 
902
                # everything is unknown
 
903
                direction = 1
 
904
            elif disk_finished:
 
905
                # everything is missing
 
906
                direction = -1
 
907
            else:
 
908
                direction = cmp(current_inv[0][0], cur_disk_dir_relpath)
 
909
            if direction > 0:
 
910
                # disk is before inventory - unknown
 
911
                dirblock = [(relpath, basename, kind, stat, None, None) for
 
912
                    relpath, basename, kind, stat, top_path in
 
913
                    cur_disk_dir_content]
 
914
                yield (cur_disk_dir_relpath, None), dirblock
 
915
                try:
 
916
                    current_disk = next(disk_iterator)
 
917
                except StopIteration:
 
918
                    disk_finished = True
 
919
            elif direction < 0:
 
920
                # inventory is before disk - missing.
 
921
                dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
 
922
                    for relpath, basename, dkind, stat, fileid, kind in
 
923
                    current_inv[1]]
 
924
                yield (current_inv[0][0], current_inv[0][1]), dirblock
 
925
                try:
 
926
                    current_inv = next(inventory_iterator)
 
927
                except StopIteration:
 
928
                    inv_finished = True
 
929
            else:
 
930
                # versioned present directory
 
931
                # merge the inventory and disk data together
 
932
                dirblock = []
 
933
                for relpath, subiterator in itertools.groupby(sorted(
 
934
                    current_inv[1] + cur_disk_dir_content,
 
935
                    key=operator.itemgetter(0)), operator.itemgetter(1)):
 
936
                    path_elements = list(subiterator)
 
937
                    if len(path_elements) == 2:
 
938
                        inv_row, disk_row = path_elements
 
939
                        # versioned, present file
 
940
                        dirblock.append((inv_row[0],
 
941
                            inv_row[1], disk_row[2],
 
942
                            disk_row[3], inv_row[4],
 
943
                            inv_row[5]))
 
944
                    elif len(path_elements[0]) == 5:
 
945
                        # unknown disk file
 
946
                        dirblock.append((path_elements[0][0],
 
947
                            path_elements[0][1], path_elements[0][2],
 
948
                            path_elements[0][3], None, None))
 
949
                    elif len(path_elements[0]) == 6:
 
950
                        # versioned, absent file.
 
951
                        dirblock.append((path_elements[0][0],
 
952
                            path_elements[0][1], 'unknown', None,
 
953
                            path_elements[0][4], path_elements[0][5]))
 
954
                    else:
 
955
                        raise NotImplementedError('unreachable code')
 
956
                yield current_inv[0], dirblock
 
957
                try:
 
958
                    current_inv = next(inventory_iterator)
 
959
                except StopIteration:
 
960
                    inv_finished = True
 
961
                try:
 
962
                    current_disk = next(disk_iterator)
 
963
                except StopIteration:
 
964
                    disk_finished = True
 
965
 
 
966
    def _walkdirs(self, prefix=""):
 
967
        if prefix != "":
 
968
            prefix += "/"
 
969
        prefix = prefix.encode('utf-8')
 
970
        per_dir = defaultdict(set)
 
971
        if prefix == "":
 
972
            per_dir[('', self.get_root_id())] = set()
 
973
        def add_entry(path, kind):
 
974
            if path == '' or not path.startswith(prefix):
 
975
                return
 
976
            (dirname, child_name) = posixpath.split(path)
 
977
            add_entry(dirname, 'directory')
 
978
            dirname = dirname.decode("utf-8")
 
979
            dir_file_id = self.path2id(dirname)
 
980
            if not isinstance(value, tuple) or len(value) != 10:
 
981
                raise ValueError(value)
 
982
            per_dir[(dirname, dir_file_id)].add(
 
983
                (path.decode("utf-8"), child_name.decode("utf-8"),
 
984
                kind, None,
 
985
                self.path2id(path.decode("utf-8")),
 
986
                kind))
 
987
        with self.lock_read():
 
988
            for path, value in self.index.iteritems():
 
989
                if self.mapping.is_special_file(path):
 
990
                    continue
 
991
                if not path.startswith(prefix):
 
992
                    continue
 
993
                add_entry(path, mode_kind(value.mode))
 
994
        return ((k, sorted(v)) for (k, v) in sorted(per_dir.iteritems()))
 
995
 
 
996
    def get_shelf_manager(self):
 
997
        raise workingtree.ShelvingUnsupported()
 
998
 
 
999
    def store_uncommitted(self):
 
1000
        raise errors.StoringUncommittedNotSupported(self)
 
1001
 
 
1002
    def apply_inventory_delta(self, changes):
 
1003
        for (old_path, new_path, file_id, ie) in changes:
 
1004
            if old_path is not None:
 
1005
                try:
 
1006
                    del self.index[old_path.encode('utf-8')]
 
1007
                except KeyError:
 
1008
                    pass
 
1009
                else:
 
1010
                    self._versioned_dirs = None
 
1011
            if new_path is not None and ie.kind != 'directory':
 
1012
                self._index_add_entry(new_path, ie.kind)
 
1013
        self.flush()
 
1014
 
 
1015
    def annotate_iter(self, path, file_id=None,
 
1016
                      default_revision=_mod_revision.CURRENT_REVISION):
 
1017
        """See Tree.annotate_iter
 
1018
 
 
1019
        This implementation will use the basis tree implementation if possible.
 
1020
        Lines not in the basis are attributed to CURRENT_REVISION
 
1021
 
 
1022
        If there are pending merges, lines added by those merges will be
 
1023
        incorrectly attributed to CURRENT_REVISION (but after committing, the
 
1024
        attribution will be correct).
 
1025
        """
 
1026
        with self.lock_read():
 
1027
            maybe_file_parent_keys = []
 
1028
            for parent_id in self.get_parent_ids():
 
1029
                try:
 
1030
                    parent_tree = self.revision_tree(parent_id)
 
1031
                except errors.NoSuchRevisionInTree:
 
1032
                    parent_tree = self.branch.repository.revision_tree(
 
1033
                            parent_id)
 
1034
                with parent_tree.lock_read():
 
1035
                    # TODO(jelmer): Use rename/copy tracker to find path name in parent
 
1036
                    parent_path = path
 
1037
                    try:
 
1038
                        kind = parent_tree.kind(parent_path)
 
1039
                    except errors.NoSuchFile:
 
1040
                        continue
 
1041
                    if kind != 'file':
 
1042
                        # Note: this is slightly unnecessary, because symlinks and
 
1043
                        # directories have a "text" which is the empty text, and we
 
1044
                        # know that won't mess up annotations. But it seems cleaner
 
1045
                        continue
 
1046
                    parent_text_key = (
 
1047
                        parent_path,
 
1048
                        parent_tree.get_file_revision(parent_path))
 
1049
                    if parent_text_key not in maybe_file_parent_keys:
 
1050
                        maybe_file_parent_keys.append(parent_text_key)
 
1051
            graph = self.branch.repository.get_file_graph()
 
1052
            heads = graph.heads(maybe_file_parent_keys)
 
1053
            file_parent_keys = []
 
1054
            for key in maybe_file_parent_keys:
 
1055
                if key in heads:
 
1056
                    file_parent_keys.append(key)
 
1057
 
 
1058
            # Now we have the parents of this content
 
1059
            from breezy.annotate import Annotator
 
1060
            from .annotate import AnnotateProvider
 
1061
            annotator = Annotator(AnnotateProvider(
 
1062
                self.branch.repository._file_change_scanner))
 
1063
            text = self.get_file_text(path)
 
1064
            this_key = (path, default_revision)
 
1065
            annotator.add_special_text(this_key, file_parent_keys, text)
 
1066
            annotations = [(key[-1], line)
 
1067
                           for key, line in annotator.annotate_flat(this_key)]
 
1068
            return annotations
 
1069
 
 
1070
    def _rename_one(self, from_rel, to_rel):
 
1071
        os.rename(self.abspath(from_rel), self.abspath(to_rel))
 
1072
 
 
1073
    def _build_checkout_with_index(self):
 
1074
        build_index_from_tree(
 
1075
            self.user_transport.local_abspath('.'),
 
1076
            self.control_transport.local_abspath("index"),
 
1077
            self.store,
 
1078
            None if self.branch.head is None else self.store[self.branch.head].tree)
 
1079
 
 
1080
    def reset_state(self, revision_ids=None):
 
1081
        """Reset the state of the working tree.
 
1082
 
 
1083
        This does a hard-reset to a last-known-good state. This is a way to
 
1084
        fix if something got corrupted (like the .git/index file)
 
1085
        """
 
1086
        with self.lock_tree_write():
 
1087
            if revision_ids is not None:
 
1088
                self.set_parent_ids(revision_ids)
 
1089
            self.index.clear()
 
1090
            if self.branch.head is not None:
 
1091
                for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
 
1092
                    if not validate_path(entry.path):
 
1093
                        continue
 
1094
 
 
1095
                    if S_ISGITLINK(entry.mode):
 
1096
                        pass # TODO(jelmer): record and return submodule paths
 
1097
                    else:
 
1098
                        # Let's at least try to use the working tree file:
 
1099
                        try:
 
1100
                            st = self._lstat(self.abspath(entry.path))
 
1101
                        except OSError, (num, msg):
 
1102
                            # But if it doesn't exist, we'll make something up.
 
1103
                            obj = self.store[entry.sha]
 
1104
                            st = os.stat_result((entry.mode, 0, 0, 0,
 
1105
                                  0, 0, len(obj.as_raw_string()), 0,
 
1106
                                  0, 0))
 
1107
                    self.index[entry.path] = index_entry_from_stat(st, entry.sha, 0)
 
1108
            self.flush()
 
1109
 
 
1110
    def pull(self, source, overwrite=False, stop_revision=None,
 
1111
             change_reporter=None, possible_transports=None, local=False,
 
1112
             show_base=False):
 
1113
        with self.lock_write(), source.lock_read():
 
1114
            old_revision = self.branch.last_revision()
 
1115
            basis_tree = self.basis_tree()
 
1116
            count = self.branch.pull(source, overwrite, stop_revision,
 
1117
                                     possible_transports=possible_transports,
 
1118
                                     local=local)
 
1119
            new_revision = self.branch.last_revision()
 
1120
            if new_revision != old_revision:
 
1121
                with basis_tree.lock_read():
 
1122
                    new_basis_tree = self.branch.basis_tree()
 
1123
                    merge.merge_inner(
 
1124
                                self.branch,
 
1125
                                new_basis_tree,
 
1126
                                basis_tree,
 
1127
                                this_tree=self,
 
1128
                                change_reporter=change_reporter,
 
1129
                                show_base=show_base)
 
1130
            return count
 
1131
 
 
1132
 
 
1133
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
 
1134
 
 
1135
    _tree_class = GitWorkingTree
 
1136
 
 
1137
    supports_versioned_directories = False
 
1138
 
 
1139
    supports_setting_file_ids = False
 
1140
 
 
1141
    supports_store_uncommitted = False
 
1142
 
 
1143
    supports_leftmost_parent_id_as_ghost = False
 
1144
 
 
1145
    supports_righthand_parent_id_as_ghost = False
 
1146
 
 
1147
    requires_normalized_unicode_filenames = True
 
1148
 
 
1149
    supports_merge_modified = False
 
1150
 
 
1151
    @property
 
1152
    def _matchingcontroldir(self):
 
1153
        from .dir import LocalGitControlDirFormat
 
1154
        return LocalGitControlDirFormat()
 
1155
 
 
1156
    def get_format_description(self):
 
1157
        return "Git Working Tree"
 
1158
 
 
1159
    def initialize(self, a_controldir, revision_id=None, from_branch=None,
 
1160
                   accelerator_tree=None, hardlink=False):
 
1161
        """See WorkingTreeFormat.initialize()."""
 
1162
        if not isinstance(a_controldir, LocalGitDir):
 
1163
            raise errors.IncompatibleFormat(self, a_controldir)
 
1164
        index = Index(a_controldir.root_transport.local_abspath(".git/index"))
 
1165
        index.write()
 
1166
        branch = a_controldir.open_branch(nascent_ok=True)
 
1167
        if revision_id is not None:
 
1168
            branch.set_last_revision(revision_id)
 
1169
        wt = GitWorkingTree(
 
1170
                a_controldir, a_controldir.open_repository(), branch, index)
 
1171
        for hook in MutableTree.hooks['post_build_tree']:
 
1172
            hook(wt)
 
1173
        return wt
 
1174
 
 
1175
 
 
1176
class InterIndexGitTree(InterGitTrees):
 
1177
    """InterTree that works between a Git revision tree and an index."""
 
1178
 
 
1179
    def __init__(self, source, target):
 
1180
        super(InterIndexGitTree, self).__init__(source, target)
 
1181
        self._index = target.index
 
1182
 
 
1183
    @classmethod
 
1184
    def is_compatible(cls, source, target):
 
1185
        from .repository import GitRevisionTree
 
1186
        return (isinstance(source, GitRevisionTree) and
 
1187
                isinstance(target, GitWorkingTree))
 
1188
 
 
1189
    def _iter_git_changes(self, want_unchanged=False, specific_files=None,
 
1190
            require_versioned=False, extra_trees=None,
 
1191
            want_unversioned=False):
 
1192
        trees = [self.source]
 
1193
        if extra_trees is not None:
 
1194
            trees.extend(extra_trees)
 
1195
        if specific_files is not None:
 
1196
            specific_files = self.target.find_related_paths_across_trees(
 
1197
                    specific_files, trees,
 
1198
                    require_versioned=require_versioned)
 
1199
        # TODO(jelmer): Restrict to specific_files, for performance reasons.
 
1200
        with self.lock_read():
 
1201
            return changes_between_git_tree_and_working_copy(
 
1202
                self.source.store, self.source.tree,
 
1203
                self.target, want_unchanged=want_unchanged,
 
1204
                want_unversioned=want_unversioned)
 
1205
 
 
1206
 
 
1207
tree.InterTree.register_optimiser(InterIndexGitTree)
 
1208
 
 
1209
 
 
1210
def changes_between_git_tree_and_working_copy(store, from_tree_sha, target,
 
1211
        want_unchanged=False, want_unversioned=False):
 
1212
    """Determine the changes between a git tree and a working tree with index.
 
1213
 
 
1214
    """
 
1215
    extras = set()
 
1216
    blobs = {}
 
1217
    # Report dirified directories to commit_tree first, so that they can be
 
1218
    # replaced with non-empty directories if they have contents.
 
1219
    dirified = []
 
1220
    target_root_path = target.abspath('.').encode(sys.getfilesystemencoding())
 
1221
    for path, index_entry in target.index.iteritems():
 
1222
        try:
 
1223
            live_entry = index_entry_from_path(
 
1224
                    target.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
 
1225
        except EnvironmentError as e:
 
1226
            if e.errno == errno.ENOENT:
 
1227
                # Entry was removed; keep it listed, but mark it as gone.
 
1228
                blobs[path] = (ZERO_SHA, 0)
 
1229
            elif e.errno == errno.EISDIR:
 
1230
                # Entry was turned into a directory
 
1231
                dirified.append((path, Tree().id, stat.S_IFDIR))
 
1232
                store.add_object(Tree())
 
1233
            else:
 
1234
                raise
 
1235
        else:
 
1236
            blobs[path] = (live_entry.sha, cleanup_mode(live_entry.mode))
 
1237
    if want_unversioned:
 
1238
        for e in target.extras():
 
1239
            ap = target.abspath(e)
 
1240
            st = os.lstat(ap)
 
1241
            try:
 
1242
                np, accessible = osutils.normalized_filename(e)
 
1243
            except UnicodeDecodeError:
 
1244
                raise errors.BadFilenameEncoding(
 
1245
                    e, osutils._fs_enc)
 
1246
            if stat.S_ISDIR(st.st_mode):
 
1247
                blob = Tree()
 
1248
            else:
 
1249
                blob = blob_from_path_and_stat(ap.encode('utf-8'), st)
 
1250
            store.add_object(blob)
 
1251
            np = np.encode('utf-8')
 
1252
            blobs[np] = (blob.id, cleanup_mode(st.st_mode))
 
1253
            extras.add(np)
 
1254
    to_tree_sha = commit_tree(store, dirified + [(p, s, m) for (p, (s, m)) in blobs.iteritems()])
 
1255
    return store.tree_changes(
 
1256
        from_tree_sha, to_tree_sha, include_trees=True,
 
1257
        want_unchanged=want_unchanged, change_type_same=True), extras