/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/workingtree.py

Fix DeprecationWarning in RemoteRepository.get_rev_id_for_revno.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
 
 
18
"""An adapter between a Git index and a Bazaar Working Tree"""
 
19
 
 
20
from __future__ import absolute_import
 
21
 
 
22
import itertools
 
23
from collections import defaultdict
 
24
import errno
 
25
from dulwich.ignore import (
 
26
    IgnoreFilterManager,
 
27
    )
 
28
from dulwich.file import GitFile, FileLocked
 
29
from dulwich.index import (
 
30
    Index,
 
31
    SHA1Writer,
 
32
    build_index_from_tree,
 
33
    index_entry_from_path,
 
34
    index_entry_from_stat,
 
35
    FLAG_STAGEMASK,
 
36
    read_submodule_head,
 
37
    validate_path,
 
38
    write_index_dict,
 
39
    )
 
40
from dulwich.object_store import (
 
41
    tree_lookup_path,
 
42
    )
 
43
from dulwich.objects import (
 
44
    S_ISGITLINK,
 
45
    )
 
46
import os
 
47
import posixpath
 
48
import stat
 
49
import sys
 
50
 
 
51
from .. import (
 
52
    conflicts as _mod_conflicts,
 
53
    errors,
 
54
    controldir as _mod_controldir,
 
55
    globbing,
 
56
    ignores,
 
57
    lock,
 
58
    merge,
 
59
    osutils,
 
60
    revision as _mod_revision,
 
61
    trace,
 
62
    transport as _mod_transport,
 
63
    tree,
 
64
    workingtree,
 
65
    )
 
66
from ..decorators import (
 
67
    only_raises,
 
68
    )
 
69
from ..mutabletree import (
 
70
    BadReferenceTarget,
 
71
    MutableTree,
 
72
    )
 
73
 
 
74
 
 
75
from .dir import (
 
76
    LocalGitDir,
 
77
    )
 
78
from .tree import (
 
79
    MutableGitIndexTree,
 
80
    )
 
81
from .mapping import (
 
82
    GitFileIdMap,
 
83
    mode_kind,
 
84
    )
 
85
 
 
86
 
 
87
class GitWorkingTree(MutableGitIndexTree, workingtree.WorkingTree):
 
88
    """A Git working tree."""
 
89
 
 
90
    def __init__(self, controldir, repo, branch):
 
91
        MutableGitIndexTree.__init__(self)
 
92
        basedir = controldir.root_transport.local_abspath('.')
 
93
        self.basedir = osutils.realpath(basedir)
 
94
        self.controldir = controldir
 
95
        self.repository = repo
 
96
        self.store = self.repository._git.object_store
 
97
        self.mapping = self.repository.get_mapping()
 
98
        self._branch = branch
 
99
        self._transport = self.repository._git._controltransport
 
100
        self._format = GitWorkingTreeFormat()
 
101
        self.index = None
 
102
        self._index_file = None
 
103
        self.views = self._make_views()
 
104
        self._rules_searcher = None
 
105
        self._detect_case_handling()
 
106
        self._reset_data()
 
107
 
 
108
    def supports_tree_reference(self):
 
109
        return True
 
110
 
 
111
    def supports_rename_tracking(self):
 
112
        return False
 
113
 
 
114
    def _read_index(self):
 
115
        self.index = Index(self.control_transport.local_abspath('index'))
 
116
        self._index_dirty = False
 
117
 
 
118
    def lock_read(self):
 
119
        """Lock the repository for read operations.
 
120
 
 
121
        :return: A breezy.lock.LogicalLockResult.
 
122
        """
 
123
        if not self._lock_mode:
 
124
            self._lock_mode = 'r'
 
125
            self._lock_count = 1
 
126
            self._read_index()
 
127
        else:
 
128
            self._lock_count += 1
 
129
        self.branch.lock_read()
 
130
        return lock.LogicalLockResult(self.unlock)
 
131
 
 
132
    def _lock_write_tree(self):
 
133
        if not self._lock_mode:
 
134
            self._lock_mode = 'w'
 
135
            self._lock_count = 1
 
136
            try:
 
137
                self._index_file = GitFile(
 
138
                    self.control_transport.local_abspath('index'), 'wb')
 
139
            except FileLocked:
 
140
                raise errors.LockContention('index')
 
141
            self._read_index()
 
142
        elif self._lock_mode == 'r':
 
143
            raise errors.ReadOnlyError(self)
 
144
        else:
 
145
            self._lock_count += 1
 
146
 
 
147
    def lock_tree_write(self):
 
148
        self.branch.lock_read()
 
149
        try:
 
150
            self._lock_write_tree()
 
151
            return lock.LogicalLockResult(self.unlock)
 
152
        except BaseException:
 
153
            self.branch.unlock()
 
154
            raise
 
155
 
 
156
    def lock_write(self, token=None):
 
157
        self.branch.lock_write()
 
158
        try:
 
159
            self._lock_write_tree()
 
160
            return lock.LogicalLockResult(self.unlock)
 
161
        except BaseException:
 
162
            self.branch.unlock()
 
163
            raise
 
164
 
 
165
    def is_locked(self):
 
166
        return self._lock_count >= 1
 
167
 
 
168
    def get_physical_lock_status(self):
 
169
        return False
 
170
 
 
171
    def break_lock(self):
 
172
        try:
 
173
            self.control_transport.delete('index.lock')
 
174
        except errors.NoSuchFile:
 
175
            pass
 
176
        self.branch.break_lock()
 
177
 
 
178
    @only_raises(errors.LockNotHeld, errors.LockBroken)
 
179
    def unlock(self):
 
180
        if not self._lock_count:
 
181
            return lock.cant_unlock_not_held(self)
 
182
        try:
 
183
            self._cleanup()
 
184
            self._lock_count -= 1
 
185
            if self._lock_count > 0:
 
186
                return
 
187
            if self._index_file is not None:
 
188
                if self._index_dirty:
 
189
                    self._flush(self._index_file)
 
190
                    self._index_file.close()
 
191
                else:
 
192
                    # Something else already triggered a write of the index
 
193
                    # file by calling .flush()
 
194
                    self._index_file.abort()
 
195
                self._index_file = None
 
196
            self._lock_mode = None
 
197
            self.index = None
 
198
        finally:
 
199
            self.branch.unlock()
 
200
 
 
201
    def _cleanup(self):
 
202
        pass
 
203
 
 
204
    def _detect_case_handling(self):
 
205
        try:
 
206
            self._transport.stat(".git/cOnFiG")
 
207
        except errors.NoSuchFile:
 
208
            self.case_sensitive = True
 
209
        else:
 
210
            self.case_sensitive = False
 
211
 
 
212
    def merge_modified(self):
 
213
        return {}
 
214
 
 
215
    def set_merge_modified(self, modified_hashes):
 
216
        raise errors.UnsupportedOperation(self.set_merge_modified, self)
 
217
 
 
218
    def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
 
219
        self.set_parent_ids([p for p, t in parents_list])
 
220
 
 
221
    def _set_merges_from_parent_ids(self, rhs_parent_ids):
 
222
        try:
 
223
            merges = [self.branch.lookup_bzr_revision_id(
 
224
                revid)[0] for revid in rhs_parent_ids]
 
225
        except errors.NoSuchRevision as e:
 
226
            raise errors.GhostRevisionUnusableHere(e.revision)
 
227
        if merges:
 
228
            self.control_transport.put_bytes(
 
229
                'MERGE_HEAD', b'\n'.join(merges),
 
230
                mode=self.controldir._get_file_mode())
 
231
        else:
 
232
            try:
 
233
                self.control_transport.delete('MERGE_HEAD')
 
234
            except errors.NoSuchFile:
 
235
                pass
 
236
 
 
237
    def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
 
238
        """Set the parent ids to revision_ids.
 
239
 
 
240
        See also set_parent_trees. This api will try to retrieve the tree data
 
241
        for each element of revision_ids from the trees repository. If you have
 
242
        tree data already available, it is more efficient to use
 
243
        set_parent_trees rather than set_parent_ids. set_parent_ids is however
 
244
        an easier API to use.
 
245
 
 
246
        :param revision_ids: The revision_ids to set as the parent ids of this
 
247
            working tree. Any of these may be ghosts.
 
248
        """
 
249
        with self.lock_tree_write():
 
250
            self._check_parents_for_ghosts(
 
251
                revision_ids, allow_leftmost_as_ghost=allow_leftmost_as_ghost)
 
252
            for revision_id in revision_ids:
 
253
                _mod_revision.check_not_reserved_id(revision_id)
 
254
 
 
255
            revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
 
256
 
 
257
            if len(revision_ids) > 0:
 
258
                self.set_last_revision(revision_ids[0])
 
259
            else:
 
260
                self.set_last_revision(_mod_revision.NULL_REVISION)
 
261
 
 
262
            self._set_merges_from_parent_ids(revision_ids[1:])
 
263
 
 
264
    def get_parent_ids(self):
 
265
        """See Tree.get_parent_ids.
 
266
 
 
267
        This implementation reads the pending merges list and last_revision
 
268
        value and uses that to decide what the parents list should be.
 
269
        """
 
270
        last_rev = _mod_revision.ensure_null(self._last_revision())
 
271
        if _mod_revision.NULL_REVISION == last_rev:
 
272
            parents = []
 
273
        else:
 
274
            parents = [last_rev]
 
275
        try:
 
276
            merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
 
277
        except errors.NoSuchFile:
 
278
            pass
 
279
        else:
 
280
            for l in osutils.split_lines(merges_bytes):
 
281
                revision_id = l.rstrip(b'\n')
 
282
                parents.append(
 
283
                    self.branch.lookup_foreign_revision_id(revision_id))
 
284
        return parents
 
285
 
 
286
    def check_state(self):
 
287
        """Check that the working state is/isn't valid."""
 
288
        pass
 
289
 
 
290
    def remove(self, files, verbose=False, to_file=None, keep_files=True,
 
291
               force=False):
 
292
        """Remove nominated files from the working tree metadata.
 
293
 
 
294
        :param files: File paths relative to the basedir.
 
295
        :param keep_files: If true, the files will also be kept.
 
296
        :param force: Delete files and directories, even if they are changed
 
297
            and even if the directories are not empty.
 
298
        """
 
299
        if not isinstance(files, list):
 
300
            files = [files]
 
301
 
 
302
        if to_file is None:
 
303
            to_file = sys.stdout
 
304
 
 
305
        def backup(file_to_backup):
 
306
            abs_path = self.abspath(file_to_backup)
 
307
            backup_name = self.controldir._available_backup_name(
 
308
                file_to_backup)
 
309
            osutils.rename(abs_path, self.abspath(backup_name))
 
310
            return "removed %s (but kept a copy: %s)" % (
 
311
                file_to_backup, backup_name)
 
312
 
 
313
        # Sort needed to first handle directory content before the directory
 
314
        files_to_backup = []
 
315
 
 
316
        all_files = set()
 
317
 
 
318
        def recurse_directory_to_add_files(directory):
 
319
            # Recurse directory and add all files
 
320
            # so we can check if they have changed.
 
321
            for parent_info, file_infos in self.walkdirs(directory):
 
322
                for relpath, basename, kind, lstat, fileid, kind in file_infos:
 
323
                    # Is it versioned or ignored?
 
324
                    if self.is_versioned(relpath):
 
325
                        # Add nested content for deletion.
 
326
                        all_files.add(relpath)
 
327
                    else:
 
328
                        # Files which are not versioned
 
329
                        # should be treated as unknown.
 
330
                        files_to_backup.append(relpath)
 
331
 
 
332
        with self.lock_tree_write():
 
333
            for filepath in files:
 
334
                # Get file name into canonical form.
 
335
                abspath = self.abspath(filepath)
 
336
                filepath = self.relpath(abspath)
 
337
 
 
338
                if filepath:
 
339
                    all_files.add(filepath)
 
340
                    recurse_directory_to_add_files(filepath)
 
341
 
 
342
            files = list(all_files)
 
343
 
 
344
            if len(files) == 0:
 
345
                return  # nothing to do
 
346
 
 
347
            # Sort needed to first handle directory content before the
 
348
            # directory
 
349
            files.sort(reverse=True)
 
350
 
 
351
            # Bail out if we are going to delete files we shouldn't
 
352
            if not keep_files and not force:
 
353
                for (file_id, path, content_change, versioned, parent_id, name,
 
354
                     kind, executable) in self.iter_changes(
 
355
                         self.basis_tree(), include_unchanged=True,
 
356
                         require_versioned=False, want_unversioned=True,
 
357
                         specific_files=files):
 
358
                    if versioned[0] is False:
 
359
                        # The record is unknown or newly added
 
360
                        files_to_backup.append(path[1])
 
361
                        files_to_backup.extend(
 
362
                            osutils.parent_directories(path[1]))
 
363
                    elif (content_change and (kind[1] is not None)
 
364
                            and osutils.is_inside_any(files, path[1])):
 
365
                        # Versioned and changed, but not deleted, and still
 
366
                        # in one of the dirs to be deleted.
 
367
                        files_to_backup.append(path[1])
 
368
                        files_to_backup.extend(
 
369
                            osutils.parent_directories(path[1]))
 
370
 
 
371
            for f in files:
 
372
                if f == '':
 
373
                    continue
 
374
 
 
375
                try:
 
376
                    kind = self.kind(f)
 
377
                except errors.NoSuchFile:
 
378
                    kind = None
 
379
 
 
380
                abs_path = self.abspath(f)
 
381
                if verbose:
 
382
                    # having removed it, it must be either ignored or unknown
 
383
                    if self.is_ignored(f):
 
384
                        new_status = 'I'
 
385
                    else:
 
386
                        new_status = '?'
 
387
                    kind_ch = osutils.kind_marker(kind)
 
388
                    to_file.write(new_status + '       ' + f + kind_ch + '\n')
 
389
                if kind is None:
 
390
                    message = "%s does not exist" % (f, )
 
391
                else:
 
392
                    if not keep_files:
 
393
                        if f in files_to_backup and not force:
 
394
                            message = backup(f)
 
395
                        else:
 
396
                            if kind == 'directory':
 
397
                                osutils.rmtree(abs_path)
 
398
                            else:
 
399
                                osutils.delete_any(abs_path)
 
400
                            message = "deleted %s" % (f,)
 
401
                    else:
 
402
                        message = "removed %s" % (f,)
 
403
                self._unversion_path(f)
 
404
 
 
405
                # print only one message (if any) per file.
 
406
                if message is not None:
 
407
                    trace.note(message)
 
408
            self._versioned_dirs = None
 
409
 
 
410
    def smart_add(self, file_list, recurse=True, action=None, save=True):
 
411
        if not file_list:
 
412
            file_list = [u'.']
 
413
 
 
414
        # expand any symlinks in the directory part, while leaving the
 
415
        # filename alone
 
416
        # only expanding if symlinks are supported avoids windows path bugs
 
417
        if self.supports_symlinks():
 
418
            file_list = list(map(osutils.normalizepath, file_list))
 
419
 
 
420
        conflicts_related = set()
 
421
        for c in self.conflicts():
 
422
            conflicts_related.update(c.associated_filenames())
 
423
 
 
424
        added = []
 
425
        ignored = {}
 
426
        user_dirs = []
 
427
 
 
428
        def call_action(filepath, kind):
 
429
            if filepath == '':
 
430
                return
 
431
            if action is not None:
 
432
                parent_path = posixpath.dirname(filepath)
 
433
                parent_id = self.path2id(parent_path)
 
434
                parent_ie = self._get_dir_ie(parent_path, parent_id)
 
435
                file_id = action(self, parent_ie, filepath, kind)
 
436
                if file_id is not None:
 
437
                    raise workingtree.SettingFileIdUnsupported()
 
438
 
 
439
        with self.lock_tree_write():
 
440
            for filepath in osutils.canonical_relpaths(
 
441
                    self.basedir, file_list):
 
442
                filepath, can_access = osutils.normalized_filename(filepath)
 
443
                if not can_access:
 
444
                    raise errors.InvalidNormalization(filepath)
 
445
 
 
446
                abspath = self.abspath(filepath)
 
447
                kind = osutils.file_kind(abspath)
 
448
                if kind in ("file", "symlink"):
 
449
                    (index, subpath) = self._lookup_index(
 
450
                        filepath.encode('utf-8'))
 
451
                    if subpath in index:
 
452
                        # Already present
 
453
                        continue
 
454
                    call_action(filepath, kind)
 
455
                    if save:
 
456
                        self._index_add_entry(filepath, kind)
 
457
                    added.append(filepath)
 
458
                elif kind == "directory":
 
459
                    (index, subpath) = self._lookup_index(
 
460
                        filepath.encode('utf-8'))
 
461
                    if subpath not in index:
 
462
                        call_action(filepath, kind)
 
463
                    if recurse:
 
464
                        user_dirs.append(filepath)
 
465
                else:
 
466
                    raise errors.BadFileKindError(filename=abspath, kind=kind)
 
467
            for user_dir in user_dirs:
 
468
                abs_user_dir = self.abspath(user_dir)
 
469
                if user_dir != '':
 
470
                    try:
 
471
                        transport = _mod_transport.get_transport_from_path(
 
472
                            abs_user_dir)
 
473
                        _mod_controldir.ControlDirFormat.find_format(transport)
 
474
                        subtree = True
 
475
                    except errors.NotBranchError:
 
476
                        subtree = False
 
477
                    except errors.UnsupportedFormatError:
 
478
                        subtree = False
 
479
                else:
 
480
                    subtree = False
 
481
                if subtree:
 
482
                    trace.warning('skipping nested tree %r', abs_user_dir)
 
483
                    continue
 
484
 
 
485
                for name in os.listdir(abs_user_dir):
 
486
                    subp = os.path.join(user_dir, name)
 
487
                    if (self.is_control_filename(subp) or
 
488
                            self.mapping.is_special_file(subp)):
 
489
                        continue
 
490
                    ignore_glob = self.is_ignored(subp)
 
491
                    if ignore_glob is not None:
 
492
                        ignored.setdefault(ignore_glob, []).append(subp)
 
493
                        continue
 
494
                    abspath = self.abspath(subp)
 
495
                    kind = osutils.file_kind(abspath)
 
496
                    if kind == "directory":
 
497
                        user_dirs.append(subp)
 
498
                    else:
 
499
                        (index, subpath) = self._lookup_index(
 
500
                            subp.encode('utf-8'))
 
501
                        if subpath in index:
 
502
                            # Already present
 
503
                            continue
 
504
                        if subp in conflicts_related:
 
505
                            continue
 
506
                        call_action(subp, kind)
 
507
                        if save:
 
508
                            self._index_add_entry(subp, kind)
 
509
                        added.append(subp)
 
510
            return added, ignored
 
511
 
 
512
    def has_filename(self, filename):
 
513
        return osutils.lexists(self.abspath(filename))
 
514
 
 
515
    def _iter_files_recursive(self, from_dir=None, include_dirs=False):
 
516
        if from_dir is None:
 
517
            from_dir = u""
 
518
        encoded_from_dir = self.abspath(from_dir).encode(osutils._fs_enc)
 
519
        for (dirpath, dirnames, filenames) in os.walk(encoded_from_dir):
 
520
            dir_relpath = dirpath[len(self.basedir):].strip(b"/")
 
521
            if self.controldir.is_control_filename(
 
522
                    dir_relpath.decode(osutils._fs_enc)):
 
523
                continue
 
524
            for name in list(dirnames):
 
525
                if self.controldir.is_control_filename(
 
526
                        name.decode(osutils._fs_enc)):
 
527
                    dirnames.remove(name)
 
528
                    continue
 
529
                relpath = os.path.join(dir_relpath, name)
 
530
                if include_dirs:
 
531
                    try:
 
532
                        yield relpath.decode(osutils._fs_enc)
 
533
                    except UnicodeDecodeError:
 
534
                        raise errors.BadFilenameEncoding(
 
535
                            relpath, osutils._fs_enc)
 
536
                    if not self._has_dir(relpath):
 
537
                        dirnames.remove(name)
 
538
            for name in filenames:
 
539
                if self.mapping.is_special_file(name):
 
540
                    continue
 
541
                if self.controldir.is_control_filename(
 
542
                        name.decode(osutils._fs_enc, 'replace')):
 
543
                    continue
 
544
                yp = os.path.join(dir_relpath, name)
 
545
                try:
 
546
                    yield yp.decode(osutils._fs_enc)
 
547
                except UnicodeDecodeError:
 
548
                    raise errors.BadFilenameEncoding(
 
549
                        yp, osutils._fs_enc)
 
550
 
 
551
    def extras(self):
 
552
        """Yield all unversioned files in this WorkingTree.
 
553
        """
 
554
        with self.lock_read():
 
555
            index_paths = set(
 
556
                [p.decode('utf-8') for p, i in self._recurse_index_entries()])
 
557
            all_paths = set(self._iter_files_recursive(include_dirs=False))
 
558
            return iter(all_paths - index_paths)
 
559
 
 
560
    def _gather_kinds(self, files, kinds):
 
561
        """See MutableTree._gather_kinds."""
 
562
        with self.lock_tree_write():
 
563
            for pos, f in enumerate(files):
 
564
                if kinds[pos] is None:
 
565
                    fullpath = osutils.normpath(self.abspath(f))
 
566
                    try:
 
567
                        kind = osutils.file_kind(fullpath)
 
568
                    except OSError as e:
 
569
                        if e.errno == errno.ENOENT:
 
570
                            raise errors.NoSuchFile(fullpath)
 
571
                    if f != '' and self._directory_is_tree_reference(f):
 
572
                        kind = 'tree-reference'
 
573
                    kinds[pos] = kind
 
574
 
 
575
    def flush(self):
 
576
        if self._lock_mode != 'w':
 
577
            raise errors.NotWriteLocked(self)
 
578
        # TODO(jelmer): This shouldn't be writing in-place, but index.lock is
 
579
        # already in use and GitFile doesn't allow overriding the lock file
 
580
        # name :(
 
581
        f = open(self.control_transport.local_abspath('index'), 'wb')
 
582
        # Note that _flush will close the file
 
583
        self._flush(f)
 
584
 
 
585
    def _flush(self, f):
 
586
        try:
 
587
            shaf = SHA1Writer(f)
 
588
            write_index_dict(shaf, self.index)
 
589
            shaf.close()
 
590
        except BaseException:
 
591
            f.abort()
 
592
            raise
 
593
        self._index_dirty = False
 
594
 
 
595
    def has_or_had_id(self, file_id):
 
596
        if self.has_id(file_id):
 
597
            return True
 
598
        if self.had_id(file_id):
 
599
            return True
 
600
        return False
 
601
 
 
602
    def had_id(self, file_id):
 
603
        path = self._basis_fileid_map.lookup_path(file_id)
 
604
        try:
 
605
            head = self.repository._git.head()
 
606
        except KeyError:
 
607
            # Assume no if basis is not accessible
 
608
            return False
 
609
        try:
 
610
            root_tree = self.store[head].tree
 
611
        except KeyError:
 
612
            return False
 
613
        try:
 
614
            tree_lookup_path(self.store.__getitem__,
 
615
                             root_tree, path.encode('utf-8'))
 
616
        except KeyError:
 
617
            return False
 
618
        else:
 
619
            return True
 
620
 
 
621
    def get_file_mtime(self, path):
 
622
        """See Tree.get_file_mtime."""
 
623
        try:
 
624
            return self._lstat(path).st_mtime
 
625
        except OSError as e:
 
626
            if e.errno == errno.ENOENT:
 
627
                raise errors.NoSuchFile(path)
 
628
            raise
 
629
 
 
630
    def is_ignored(self, filename):
 
631
        r"""Check whether the filename matches an ignore pattern.
 
632
 
 
633
        If the file is ignored, returns the pattern which caused it to
 
634
        be ignored, otherwise None.  So this can simply be used as a
 
635
        boolean if desired."""
 
636
        if getattr(self, '_global_ignoreglobster', None) is None:
 
637
            ignore_globs = set()
 
638
            ignore_globs.update(ignores.get_runtime_ignores())
 
639
            ignore_globs.update(ignores.get_user_ignores())
 
640
            self._global_ignoreglobster = globbing.ExceptionGlobster(
 
641
                ignore_globs)
 
642
        match = self._global_ignoreglobster.match(filename)
 
643
        if match is not None:
 
644
            return match
 
645
        try:
 
646
            if self.kind(filename) == 'directory':
 
647
                filename += '/'
 
648
        except errors.NoSuchFile:
 
649
            pass
 
650
        filename = filename.lstrip('/')
 
651
        ignore_manager = self._get_ignore_manager()
 
652
        ps = list(ignore_manager.find_matching(filename))
 
653
        if not ps:
 
654
            return None
 
655
        if not ps[-1].is_exclude:
 
656
            return None
 
657
        return bytes(ps[-1])
 
658
 
 
659
    def _get_ignore_manager(self):
 
660
        ignoremanager = getattr(self, '_ignoremanager', None)
 
661
        if ignoremanager is not None:
 
662
            return ignoremanager
 
663
 
 
664
        ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
 
665
        self._ignoremanager = ignore_manager
 
666
        return ignore_manager
 
667
 
 
668
    def _flush_ignore_list_cache(self):
 
669
        self._ignoremanager = None
 
670
 
 
671
    def set_last_revision(self, revid):
 
672
        if _mod_revision.is_null(revid):
 
673
            self.branch.set_last_revision_info(0, revid)
 
674
            return False
 
675
        _mod_revision.check_not_reserved_id(revid)
 
676
        try:
 
677
            self.branch.generate_revision_history(revid)
 
678
        except errors.NoSuchRevision:
 
679
            raise errors.GhostRevisionUnusableHere(revid)
 
680
 
 
681
    def _reset_data(self):
 
682
        try:
 
683
            head = self.repository._git.head()
 
684
        except KeyError:
 
685
            self._basis_fileid_map = GitFileIdMap({}, self.mapping)
 
686
        else:
 
687
            self._basis_fileid_map = self.mapping.get_fileid_map(
 
688
                self.store.__getitem__, self.store[head].tree)
 
689
        self._fileid_map = self._basis_fileid_map.copy()
 
690
 
 
691
    def get_file_verifier(self, path, stat_value=None):
 
692
        with self.lock_read():
 
693
            (index, subpath) = self._lookup_index(path.encode('utf-8'))
 
694
            try:
 
695
                return ("GIT", index[subpath].sha)
 
696
            except KeyError:
 
697
                if self._has_dir(path):
 
698
                    return ("GIT", None)
 
699
                raise errors.NoSuchFile(path)
 
700
 
 
701
    def get_file_sha1(self, path, stat_value=None):
 
702
        with self.lock_read():
 
703
            if not self.is_versioned(path):
 
704
                raise errors.NoSuchFile(path)
 
705
            abspath = self.abspath(path)
 
706
            try:
 
707
                return osutils.sha_file_by_name(abspath)
 
708
            except OSError as e:
 
709
                if e.errno in (errno.EISDIR, errno.ENOENT):
 
710
                    return None
 
711
                raise
 
712
 
 
713
    def revision_tree(self, revid):
 
714
        return self.repository.revision_tree(revid)
 
715
 
 
716
    def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
 
717
        mode = stat_result.st_mode
 
718
        return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
 
719
 
 
720
    def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
 
721
        return self.basis_tree().is_executable(path)
 
722
 
 
723
    def stored_kind(self, path):
 
724
        with self.lock_read():
 
725
            encoded_path = path.encode('utf-8')
 
726
            (index, subpath) = self._lookup_index(encoded_path)
 
727
            try:
 
728
                return mode_kind(index[subpath].mode)
 
729
            except KeyError:
 
730
                # Maybe it's a directory?
 
731
                if self._has_dir(encoded_path):
 
732
                    return "directory"
 
733
                raise errors.NoSuchFile(path)
 
734
 
 
735
    def _lstat(self, path):
 
736
        return os.lstat(self.abspath(path))
 
737
 
 
738
    def _live_entry(self, path):
 
739
        encoded_path = self.abspath(path.decode('utf-8')).encode(
 
740
            osutils._fs_enc)
 
741
        return index_entry_from_path(encoded_path)
 
742
 
 
743
    def is_executable(self, path):
 
744
        with self.lock_read():
 
745
            if self._supports_executable():
 
746
                mode = self._lstat(path).st_mode
 
747
            else:
 
748
                (index, subpath) = self._lookup_index(path.encode('utf-8'))
 
749
                try:
 
750
                    mode = index[subpath].mode
 
751
                except KeyError:
 
752
                    mode = 0
 
753
            return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
 
754
 
 
755
    def _is_executable_from_path_and_stat(self, path, stat_result):
 
756
        if self._supports_executable():
 
757
            return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
 
758
        else:
 
759
            return self._is_executable_from_path_and_stat_from_basis(
 
760
                path, stat_result)
 
761
 
 
762
    def list_files(self, include_root=False, from_dir=None, recursive=True):
 
763
        if from_dir is None or from_dir == '.':
 
764
            from_dir = u""
 
765
        dir_ids = {}
 
766
        fk_entries = {'directory': tree.TreeDirectory,
 
767
                      'file': tree.TreeFile,
 
768
                      'symlink': tree.TreeLink,
 
769
                      'tree-reference': tree.TreeReference}
 
770
        with self.lock_read():
 
771
            root_ie = self._get_dir_ie(u"", None)
 
772
            if include_root and not from_dir:
 
773
                yield "", "V", root_ie.kind, root_ie
 
774
            dir_ids[u""] = root_ie.file_id
 
775
            if recursive:
 
776
                path_iterator = sorted(
 
777
                    self._iter_files_recursive(from_dir, include_dirs=True))
 
778
            else:
 
779
                encoded_from_dir = self.abspath(from_dir).encode(
 
780
                    osutils._fs_enc)
 
781
                path_iterator = sorted(
 
782
                    [os.path.join(from_dir, name.decode(osutils._fs_enc))
 
783
                     for name in os.listdir(encoded_from_dir)
 
784
                     if not self.controldir.is_control_filename(
 
785
                         name.decode(osutils._fs_enc)) and
 
786
                     not self.mapping.is_special_file(
 
787
                         name.decode(osutils._fs_enc))])
 
788
            for path in path_iterator:
 
789
                try:
 
790
                    encoded_path = path.encode("utf-8")
 
791
                except UnicodeEncodeError:
 
792
                    raise errors.BadFilenameEncoding(
 
793
                        path, osutils._fs_enc)
 
794
                (index, index_path) = self._lookup_index(encoded_path)
 
795
                try:
 
796
                    value = index[index_path]
 
797
                except KeyError:
 
798
                    value = None
 
799
                kind = self.kind(path)
 
800
                parent, name = posixpath.split(path)
 
801
                for dir_path, dir_ie in self._add_missing_parent_ids(
 
802
                        parent, dir_ids):
 
803
                    pass
 
804
                if kind in ('directory', 'tree-reference'):
 
805
                    if path != from_dir:
 
806
                        if self._has_dir(encoded_path):
 
807
                            ie = self._get_dir_ie(path, self.path2id(path))
 
808
                            status = "V"
 
809
                        elif self.is_ignored(path):
 
810
                            status = "I"
 
811
                            ie = fk_entries[kind]()
 
812
                        else:
 
813
                            status = "?"
 
814
                            ie = fk_entries[kind]()
 
815
                        yield (posixpath.relpath(path, from_dir), status, kind,
 
816
                               ie)
 
817
                    continue
 
818
                if value is not None:
 
819
                    ie = self._get_file_ie(name, path, value, dir_ids[parent])
 
820
                    yield (posixpath.relpath(path, from_dir), "V", ie.kind, ie)
 
821
                else:
 
822
                    ie = fk_entries[kind]()
 
823
                    yield (posixpath.relpath(path, from_dir),
 
824
                           ("I" if self.is_ignored(path) else "?"), kind, ie)
 
825
 
 
826
    def all_file_ids(self):
 
827
        raise errors.UnsupportedOperation(self.all_file_ids, self)
 
828
 
 
829
    def all_versioned_paths(self):
 
830
        with self.lock_read():
 
831
            paths = {u""}
 
832
            for path in self.index:
 
833
                if self.mapping.is_special_file(path):
 
834
                    continue
 
835
                path = path.decode("utf-8")
 
836
                paths.add(path)
 
837
                while path != "":
 
838
                    path = posixpath.dirname(path).strip("/")
 
839
                    if path in paths:
 
840
                        break
 
841
                    paths.add(path)
 
842
            return paths
 
843
 
 
844
    def iter_child_entries(self, path):
 
845
        encoded_path = path.encode('utf-8')
 
846
        with self.lock_read():
 
847
            parent_id = self.path2id(path)
 
848
            found_any = False
 
849
            for item_path, value in self.index.iteritems():
 
850
                decoded_item_path = item_path.decode('utf-8')
 
851
                if self.mapping.is_special_file(item_path):
 
852
                    continue
 
853
                if not osutils.is_inside(path, decoded_item_path):
 
854
                    continue
 
855
                found_any = True
 
856
                subpath = posixpath.relpath(decoded_item_path, path)
 
857
                if '/' in subpath:
 
858
                    dirname = subpath.split('/', 1)[0]
 
859
                    file_ie = self._get_dir_ie(
 
860
                        posixpath.join(path, dirname), parent_id)
 
861
                else:
 
862
                    (unused_parent, name) = posixpath.split(decoded_item_path)
 
863
                    file_ie = self._get_file_ie(
 
864
                        name, decoded_item_path, value, parent_id)
 
865
                yield file_ie
 
866
            if not found_any and path != u'':
 
867
                raise errors.NoSuchFile(path)
 
868
 
 
869
    def conflicts(self):
 
870
        with self.lock_read():
 
871
            conflicts = _mod_conflicts.ConflictList()
 
872
            for item_path, value in self.index.iteritems():
 
873
                if value.flags & FLAG_STAGEMASK:
 
874
                    conflicts.append(_mod_conflicts.TextConflict(
 
875
                        item_path.decode('utf-8')))
 
876
            return conflicts
 
877
 
 
878
    def set_conflicts(self, conflicts):
 
879
        by_path = set()
 
880
        for conflict in conflicts:
 
881
            if conflict.typestring in ('text conflict', 'contents conflict'):
 
882
                by_path.add(conflict.path.encode('utf-8'))
 
883
            else:
 
884
                raise errors.UnsupportedOperation(self.set_conflicts, self)
 
885
        with self.lock_tree_write():
 
886
            for path in self.index:
 
887
                self._set_conflicted(path, path in by_path)
 
888
 
 
889
    def _set_conflicted(self, path, conflicted):
 
890
        trace.mutter('change conflict: %r -> %r', path, conflicted)
 
891
        value = self.index[path]
 
892
        self._index_dirty = True
 
893
        if conflicted:
 
894
            self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
 
895
        else:
 
896
            self.index[path] = (value[:9] + (value[9] & ~ FLAG_STAGEMASK, ))
 
897
 
 
898
    def add_conflicts(self, new_conflicts):
 
899
        with self.lock_tree_write():
 
900
            for conflict in new_conflicts:
 
901
                if conflict.typestring in ('text conflict',
 
902
                                           'contents conflict'):
 
903
                    try:
 
904
                        self._set_conflicted(
 
905
                            conflict.path.encode('utf-8'), True)
 
906
                    except KeyError:
 
907
                        raise errors.UnsupportedOperation(
 
908
                            self.add_conflicts, self)
 
909
                else:
 
910
                    raise errors.UnsupportedOperation(self.add_conflicts, self)
 
911
 
 
912
    def walkdirs(self, prefix=""):
 
913
        """Walk the directories of this tree.
 
914
 
 
915
        returns a generator which yields items in the form:
 
916
                ((curren_directory_path, fileid),
 
917
                 [(file1_path, file1_name, file1_kind, (lstat), file1_id,
 
918
                   file1_kind), ... ])
 
919
 
 
920
        This API returns a generator, which is only valid during the current
 
921
        tree transaction - within a single lock_read or lock_write duration.
 
922
 
 
923
        If the tree is not locked, it may cause an error to be raised,
 
924
        depending on the tree implementation.
 
925
        """
 
926
        from bisect import bisect_left
 
927
        import operator
 
928
        disk_top = self.abspath(prefix)
 
929
        if disk_top.endswith('/'):
 
930
            disk_top = disk_top[:-1]
 
931
        top_strip_len = len(disk_top) + 1
 
932
        inventory_iterator = self._walkdirs(prefix)
 
933
        disk_iterator = osutils.walkdirs(disk_top, prefix)
 
934
        try:
 
935
            current_disk = next(disk_iterator)
 
936
            disk_finished = False
 
937
        except OSError as e:
 
938
            if not (e.errno == errno.ENOENT
 
939
                    or (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
 
940
                raise
 
941
            current_disk = None
 
942
            disk_finished = True
 
943
        try:
 
944
            current_inv = next(inventory_iterator)
 
945
            inv_finished = False
 
946
        except StopIteration:
 
947
            current_inv = None
 
948
            inv_finished = True
 
949
        while not inv_finished or not disk_finished:
 
950
            if current_disk:
 
951
                ((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
 
952
                    cur_disk_dir_content) = current_disk
 
953
            else:
 
954
                ((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
 
955
                    cur_disk_dir_content) = ((None, None), None)
 
956
            if not disk_finished:
 
957
                # strip out .bzr dirs
 
958
                if (cur_disk_dir_path_from_top[top_strip_len:] == ''
 
959
                        and len(cur_disk_dir_content) > 0):
 
960
                    # osutils.walkdirs can be made nicer -
 
961
                    # yield the path-from-prefix rather than the pathjoined
 
962
                    # value.
 
963
                    bzrdir_loc = bisect_left(cur_disk_dir_content,
 
964
                                             ('.git', '.git'))
 
965
                    if (bzrdir_loc < len(cur_disk_dir_content) and
 
966
                        self.controldir.is_control_filename(
 
967
                            cur_disk_dir_content[bzrdir_loc][0])):
 
968
                        # we dont yield the contents of, or, .bzr itself.
 
969
                        del cur_disk_dir_content[bzrdir_loc]
 
970
            if inv_finished:
 
971
                # everything is unknown
 
972
                direction = 1
 
973
            elif disk_finished:
 
974
                # everything is missing
 
975
                direction = -1
 
976
            else:
 
977
                direction = ((current_inv[0][0] > cur_disk_dir_relpath)
 
978
                             - (current_inv[0][0] < cur_disk_dir_relpath))
 
979
            if direction > 0:
 
980
                # disk is before inventory - unknown
 
981
                dirblock = [(relpath, basename, kind, stat, None, None) for
 
982
                            relpath, basename, kind, stat, top_path in
 
983
                            cur_disk_dir_content]
 
984
                yield (cur_disk_dir_relpath, None), dirblock
 
985
                try:
 
986
                    current_disk = next(disk_iterator)
 
987
                except StopIteration:
 
988
                    disk_finished = True
 
989
            elif direction < 0:
 
990
                # inventory is before disk - missing.
 
991
                dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
 
992
                            for relpath, basename, dkind, stat, fileid, kind in
 
993
                            current_inv[1]]
 
994
                yield (current_inv[0][0], current_inv[0][1]), dirblock
 
995
                try:
 
996
                    current_inv = next(inventory_iterator)
 
997
                except StopIteration:
 
998
                    inv_finished = True
 
999
            else:
 
1000
                # versioned present directory
 
1001
                # merge the inventory and disk data together
 
1002
                dirblock = []
 
1003
                for relpath, subiterator in itertools.groupby(sorted(
 
1004
                        current_inv[1] + cur_disk_dir_content,
 
1005
                        key=operator.itemgetter(0)), operator.itemgetter(1)):
 
1006
                    path_elements = list(subiterator)
 
1007
                    if len(path_elements) == 2:
 
1008
                        inv_row, disk_row = path_elements
 
1009
                        # versioned, present file
 
1010
                        dirblock.append((inv_row[0],
 
1011
                                         inv_row[1], disk_row[2],
 
1012
                                         disk_row[3], inv_row[4],
 
1013
                                         inv_row[5]))
 
1014
                    elif len(path_elements[0]) == 5:
 
1015
                        # unknown disk file
 
1016
                        dirblock.append(
 
1017
                            (path_elements[0][0], path_elements[0][1],
 
1018
                                path_elements[0][2], path_elements[0][3],
 
1019
                                None, None))
 
1020
                    elif len(path_elements[0]) == 6:
 
1021
                        # versioned, absent file.
 
1022
                        dirblock.append(
 
1023
                            (path_elements[0][0], path_elements[0][1],
 
1024
                                'unknown', None, path_elements[0][4],
 
1025
                                path_elements[0][5]))
 
1026
                    else:
 
1027
                        raise NotImplementedError('unreachable code')
 
1028
                yield current_inv[0], dirblock
 
1029
                try:
 
1030
                    current_inv = next(inventory_iterator)
 
1031
                except StopIteration:
 
1032
                    inv_finished = True
 
1033
                try:
 
1034
                    current_disk = next(disk_iterator)
 
1035
                except StopIteration:
 
1036
                    disk_finished = True
 
1037
 
 
1038
    def _walkdirs(self, prefix=u""):
 
1039
        if prefix != u"":
 
1040
            prefix += u"/"
 
1041
        prefix = prefix.encode('utf-8')
 
1042
        per_dir = defaultdict(set)
 
1043
        if prefix == b"":
 
1044
            per_dir[(u'', self.get_root_id())] = set()
 
1045
 
 
1046
        def add_entry(path, kind):
 
1047
            if path == b'' or not path.startswith(prefix):
 
1048
                return
 
1049
            (dirname, child_name) = posixpath.split(path)
 
1050
            add_entry(dirname, 'directory')
 
1051
            dirname = dirname.decode("utf-8")
 
1052
            dir_file_id = self.path2id(dirname)
 
1053
            if not isinstance(value, tuple) or len(value) != 10:
 
1054
                raise ValueError(value)
 
1055
            per_dir[(dirname, dir_file_id)].add(
 
1056
                (path.decode("utf-8"), child_name.decode("utf-8"),
 
1057
                 kind, None,
 
1058
                 self.path2id(path.decode("utf-8")),
 
1059
                 kind))
 
1060
        with self.lock_read():
 
1061
            for path, value in self.index.iteritems():
 
1062
                if self.mapping.is_special_file(path):
 
1063
                    continue
 
1064
                if not path.startswith(prefix):
 
1065
                    continue
 
1066
                add_entry(path, mode_kind(value.mode))
 
1067
        return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
 
1068
 
 
1069
    def get_shelf_manager(self):
 
1070
        raise workingtree.ShelvingUnsupported()
 
1071
 
 
1072
    def store_uncommitted(self):
 
1073
        raise errors.StoringUncommittedNotSupported(self)
 
1074
 
 
1075
    def apply_inventory_delta(self, changes):
 
1076
        for (old_path, new_path, file_id, ie) in changes:
 
1077
            if old_path is not None:
 
1078
                (index, old_subpath) = self._lookup_index(
 
1079
                    old_path.encode('utf-8'))
 
1080
                try:
 
1081
                    self._index_del_entry(index, old_subpath)
 
1082
                except KeyError:
 
1083
                    pass
 
1084
                else:
 
1085
                    self._versioned_dirs = None
 
1086
            if new_path is not None and ie.kind != 'directory':
 
1087
                if ie.kind == 'tree-reference':
 
1088
                    self._index_add_entry(
 
1089
                        new_path, ie.kind,
 
1090
                        reference_revision=ie.reference_revision)
 
1091
                else:
 
1092
                    self._index_add_entry(new_path, ie.kind)
 
1093
        self.flush()
 
1094
 
 
1095
    def annotate_iter(self, path,
 
1096
                      default_revision=_mod_revision.CURRENT_REVISION):
 
1097
        """See Tree.annotate_iter
 
1098
 
 
1099
        This implementation will use the basis tree implementation if possible.
 
1100
        Lines not in the basis are attributed to CURRENT_REVISION
 
1101
 
 
1102
        If there are pending merges, lines added by those merges will be
 
1103
        incorrectly attributed to CURRENT_REVISION (but after committing, the
 
1104
        attribution will be correct).
 
1105
        """
 
1106
        with self.lock_read():
 
1107
            maybe_file_parent_keys = []
 
1108
            for parent_id in self.get_parent_ids():
 
1109
                try:
 
1110
                    parent_tree = self.revision_tree(parent_id)
 
1111
                except errors.NoSuchRevisionInTree:
 
1112
                    parent_tree = self.branch.repository.revision_tree(
 
1113
                        parent_id)
 
1114
                with parent_tree.lock_read():
 
1115
                    # TODO(jelmer): Use rename/copy tracker to find path name
 
1116
                    # in parent
 
1117
                    parent_path = path
 
1118
                    try:
 
1119
                        kind = parent_tree.kind(parent_path)
 
1120
                    except errors.NoSuchFile:
 
1121
                        continue
 
1122
                    if kind != 'file':
 
1123
                        # Note: this is slightly unnecessary, because symlinks
 
1124
                        # and directories have a "text" which is the empty
 
1125
                        # text, and we know that won't mess up annotations. But
 
1126
                        # it seems cleaner
 
1127
                        continue
 
1128
                    parent_text_key = (
 
1129
                        parent_path,
 
1130
                        parent_tree.get_file_revision(parent_path))
 
1131
                    if parent_text_key not in maybe_file_parent_keys:
 
1132
                        maybe_file_parent_keys.append(parent_text_key)
 
1133
            # Now we have the parents of this content
 
1134
            from breezy.annotate import Annotator
 
1135
            from .annotate import AnnotateProvider
 
1136
            annotate_provider = AnnotateProvider(
 
1137
                self.branch.repository._file_change_scanner)
 
1138
            annotator = Annotator(annotate_provider)
 
1139
 
 
1140
            from breezy.graph import Graph
 
1141
            graph = Graph(annotate_provider)
 
1142
            heads = graph.heads(maybe_file_parent_keys)
 
1143
            file_parent_keys = []
 
1144
            for key in maybe_file_parent_keys:
 
1145
                if key in heads:
 
1146
                    file_parent_keys.append(key)
 
1147
 
 
1148
            text = self.get_file_text(path)
 
1149
            this_key = (path, default_revision)
 
1150
            annotator.add_special_text(this_key, file_parent_keys, text)
 
1151
            annotations = [(key[-1], line)
 
1152
                           for key, line in annotator.annotate_flat(this_key)]
 
1153
            return annotations
 
1154
 
 
1155
    def _rename_one(self, from_rel, to_rel):
 
1156
        os.rename(self.abspath(from_rel), self.abspath(to_rel))
 
1157
 
 
1158
    def _build_checkout_with_index(self):
 
1159
        build_index_from_tree(
 
1160
            self.user_transport.local_abspath('.'),
 
1161
            self.control_transport.local_abspath("index"),
 
1162
            self.store,
 
1163
            None
 
1164
            if self.branch.head is None
 
1165
            else self.store[self.branch.head].tree,
 
1166
            honor_filemode=self._supports_executable())
 
1167
 
 
1168
    def reset_state(self, revision_ids=None):
 
1169
        """Reset the state of the working tree.
 
1170
 
 
1171
        This does a hard-reset to a last-known-good state. This is a way to
 
1172
        fix if something got corrupted (like the .git/index file)
 
1173
        """
 
1174
        with self.lock_tree_write():
 
1175
            if revision_ids is not None:
 
1176
                self.set_parent_ids(revision_ids)
 
1177
            self.index.clear()
 
1178
            self._index_dirty = True
 
1179
            if self.branch.head is not None:
 
1180
                for entry in self.store.iter_tree_contents(
 
1181
                        self.store[self.branch.head].tree):
 
1182
                    if not validate_path(entry.path):
 
1183
                        continue
 
1184
 
 
1185
                    if S_ISGITLINK(entry.mode):
 
1186
                        pass  # TODO(jelmer): record and return submodule paths
 
1187
                    else:
 
1188
                        # Let's at least try to use the working tree file:
 
1189
                        try:
 
1190
                            st = self._lstat(self.abspath(
 
1191
                                entry.path.decode('utf-8')))
 
1192
                        except OSError:
 
1193
                            # But if it doesn't exist, we'll make something up.
 
1194
                            obj = self.store[entry.sha]
 
1195
                            st = os.stat_result((entry.mode, 0, 0, 0,
 
1196
                                                 0, 0, len(
 
1197
                                                     obj.as_raw_string()), 0,
 
1198
                                                 0, 0))
 
1199
                    (index, subpath) = self._lookup_index(entry.path)
 
1200
                    index[subpath] = index_entry_from_stat(st, entry.sha, 0)
 
1201
 
 
1202
    def pull(self, source, overwrite=False, stop_revision=None,
 
1203
             change_reporter=None, possible_transports=None, local=False,
 
1204
             show_base=False):
 
1205
        with self.lock_write(), source.lock_read():
 
1206
            old_revision = self.branch.last_revision()
 
1207
            basis_tree = self.basis_tree()
 
1208
            count = self.branch.pull(source, overwrite, stop_revision,
 
1209
                                     possible_transports=possible_transports,
 
1210
                                     local=local)
 
1211
            new_revision = self.branch.last_revision()
 
1212
            if new_revision != old_revision:
 
1213
                with basis_tree.lock_read():
 
1214
                    new_basis_tree = self.branch.basis_tree()
 
1215
                    merge.merge_inner(
 
1216
                        self.branch,
 
1217
                        new_basis_tree,
 
1218
                        basis_tree,
 
1219
                        this_tree=self,
 
1220
                        change_reporter=change_reporter,
 
1221
                        show_base=show_base)
 
1222
            return count
 
1223
 
 
1224
    def add_reference(self, sub_tree):
 
1225
        """Add a TreeReference to the tree, pointing at sub_tree.
 
1226
 
 
1227
        :param sub_tree: subtree to add.
 
1228
        """
 
1229
        with self.lock_tree_write():
 
1230
            try:
 
1231
                sub_tree_path = self.relpath(sub_tree.basedir)
 
1232
            except errors.PathNotChild:
 
1233
                raise BadReferenceTarget(
 
1234
                    self, sub_tree, 'Target not inside tree.')
 
1235
 
 
1236
            self._add([sub_tree_path], [None], ['tree-reference'])
 
1237
 
 
1238
    def _read_submodule_head(self, path):
 
1239
        return read_submodule_head(self.abspath(path))
 
1240
 
 
1241
    def get_reference_revision(self, path):
 
1242
        hexsha = self._read_submodule_head(path)
 
1243
        if hexsha is None:
 
1244
            return _mod_revision.NULL_REVISION
 
1245
        return self.branch.lookup_foreign_revision_id(hexsha)
 
1246
 
 
1247
    def get_nested_tree(self, path):
 
1248
        return workingtree.WorkingTree.open(self.abspath(path))
 
1249
 
 
1250
    def _directory_is_tree_reference(self, relpath):
 
1251
        # as a special case, if a directory contains control files then
 
1252
        # it's a tree reference, except that the root of the tree is not
 
1253
        return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
 
1254
 
 
1255
    def extract(self, sub_path, format=None):
 
1256
        """Extract a subtree from this tree.
 
1257
 
 
1258
        A new branch will be created, relative to the path for this tree.
 
1259
        """
 
1260
        def mkdirs(path):
 
1261
            segments = osutils.splitpath(path)
 
1262
            transport = self.branch.controldir.root_transport
 
1263
            for name in segments:
 
1264
                transport = transport.clone(name)
 
1265
                transport.ensure_base()
 
1266
            return transport
 
1267
 
 
1268
        with self.lock_tree_write():
 
1269
            self.flush()
 
1270
            branch_transport = mkdirs(sub_path)
 
1271
            if format is None:
 
1272
                format = self.controldir.cloning_metadir()
 
1273
            branch_transport.ensure_base()
 
1274
            branch_bzrdir = format.initialize_on_transport(branch_transport)
 
1275
            try:
 
1276
                repo = branch_bzrdir.find_repository()
 
1277
            except errors.NoRepositoryPresent:
 
1278
                repo = branch_bzrdir.create_repository()
 
1279
            if not repo.supports_rich_root():
 
1280
                raise errors.RootNotRich()
 
1281
            new_branch = branch_bzrdir.create_branch()
 
1282
            new_branch.pull(self.branch)
 
1283
            for parent_id in self.get_parent_ids():
 
1284
                new_branch.fetch(self.branch, parent_id)
 
1285
            tree_transport = self.controldir.root_transport.clone(sub_path)
 
1286
            if tree_transport.base != branch_transport.base:
 
1287
                tree_bzrdir = format.initialize_on_transport(tree_transport)
 
1288
                tree_bzrdir.set_branch_reference(new_branch)
 
1289
            else:
 
1290
                tree_bzrdir = branch_bzrdir
 
1291
            wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
 
1292
            wt.set_parent_ids(self.get_parent_ids())
 
1293
            return wt
 
1294
 
 
1295
    def _get_check_refs(self):
 
1296
        """Return the references needed to perform a check of this tree.
 
1297
 
 
1298
        The default implementation returns no refs, and is only suitable for
 
1299
        trees that have no local caching and can commit on ghosts at any time.
 
1300
 
 
1301
        :seealso: breezy.check for details about check_refs.
 
1302
        """
 
1303
        return []
 
1304
 
 
1305
    def copy_content_into(self, tree, revision_id=None):
 
1306
        """Copy the current content and user files of this tree into tree."""
 
1307
        with self.lock_read():
 
1308
            if revision_id is None:
 
1309
                merge.transform_tree(tree, self)
 
1310
            else:
 
1311
                # TODO now merge from tree.last_revision to revision (to
 
1312
                # preserve user local changes)
 
1313
                try:
 
1314
                    other_tree = self.revision_tree(revision_id)
 
1315
                except errors.NoSuchRevision:
 
1316
                    other_tree = self.branch.repository.revision_tree(
 
1317
                        revision_id)
 
1318
 
 
1319
                merge.transform_tree(tree, other_tree)
 
1320
                if revision_id == _mod_revision.NULL_REVISION:
 
1321
                    new_parents = []
 
1322
                else:
 
1323
                    new_parents = [revision_id]
 
1324
                tree.set_parent_ids(new_parents)
 
1325
 
 
1326
 
 
1327
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
 
1328
 
 
1329
    _tree_class = GitWorkingTree
 
1330
 
 
1331
    supports_versioned_directories = False
 
1332
 
 
1333
    supports_setting_file_ids = False
 
1334
 
 
1335
    supports_store_uncommitted = False
 
1336
 
 
1337
    supports_leftmost_parent_id_as_ghost = False
 
1338
 
 
1339
    supports_righthand_parent_id_as_ghost = False
 
1340
 
 
1341
    requires_normalized_unicode_filenames = True
 
1342
 
 
1343
    supports_merge_modified = False
 
1344
 
 
1345
    ignore_filename = ".gitignore"
 
1346
 
 
1347
    @property
 
1348
    def _matchingcontroldir(self):
 
1349
        from .dir import LocalGitControlDirFormat
 
1350
        return LocalGitControlDirFormat()
 
1351
 
 
1352
    def get_format_description(self):
 
1353
        return "Git Working Tree"
 
1354
 
 
1355
    def initialize(self, a_controldir, revision_id=None, from_branch=None,
 
1356
                   accelerator_tree=None, hardlink=False):
 
1357
        """See WorkingTreeFormat.initialize()."""
 
1358
        if not isinstance(a_controldir, LocalGitDir):
 
1359
            raise errors.IncompatibleFormat(self, a_controldir)
 
1360
        branch = a_controldir.open_branch(nascent_ok=True)
 
1361
        if revision_id is not None:
 
1362
            branch.set_last_revision(revision_id)
 
1363
        wt = GitWorkingTree(
 
1364
            a_controldir, a_controldir.open_repository(), branch)
 
1365
        for hook in MutableTree.hooks['post_build_tree']:
 
1366
            hook(wt)
 
1367
        return wt