/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/plugins/git/workingtree.py

  • Committer: Jelmer Vernooij
  • Date: 2018-05-22 02:05:12 UTC
  • mto: (6973.12.2 python3-k)
  • mto: This revision was merged to the branch mainline in revision 6992.
  • Revision ID: jelmer@jelmer.uk-20180522020512-btpj2jchdlehi3en
Add more bees.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
 
 
18
"""An adapter between a Git index and a Bazaar Working Tree"""
 
19
 
 
20
from __future__ import absolute_import
 
21
 
 
22
import itertools
 
23
from collections import defaultdict
 
24
import errno
 
25
from dulwich.ignore import (
 
26
    IgnoreFilterManager,
 
27
    )
 
28
from dulwich.file import GitFile, FileLocked
 
29
from dulwich.index import (
 
30
    Index,
 
31
    SHA1Writer,
 
32
    build_index_from_tree,
 
33
    changes_from_tree,
 
34
    cleanup_mode,
 
35
    commit_tree,
 
36
    index_entry_from_path,
 
37
    index_entry_from_stat,
 
38
    iter_fresh_entries,
 
39
    blob_from_path_and_stat,
 
40
    FLAG_STAGEMASK,
 
41
    read_submodule_head,
 
42
    validate_path,
 
43
    write_index_dict,
 
44
    )
 
45
from dulwich.object_store import (
 
46
    tree_lookup_path,
 
47
    )
 
48
from dulwich.objects import (
 
49
    Blob,
 
50
    Tree,
 
51
    S_IFGITLINK,
 
52
    S_ISGITLINK,
 
53
    ZERO_SHA,
 
54
    )
 
55
from dulwich.repo import (
 
56
    NotGitRepository,
 
57
    Repo as GitRepo,
 
58
    )
 
59
import os
 
60
import posixpath
 
61
import re
 
62
import stat
 
63
import sys
 
64
 
 
65
from ... import (
 
66
    conflicts as _mod_conflicts,
 
67
    errors,
 
68
    controldir as _mod_controldir,
 
69
    globbing,
 
70
    ignores,
 
71
    lock,
 
72
    merge,
 
73
    osutils,
 
74
    revision as _mod_revision,
 
75
    trace,
 
76
    transport as _mod_transport,
 
77
    tree,
 
78
    workingtree,
 
79
    )
 
80
from ...decorators import (
 
81
    only_raises,
 
82
    )
 
83
from ...bzr import (
 
84
    inventory,
 
85
    )
 
86
from ...mutabletree import (
 
87
    BadReferenceTarget,
 
88
    MutableTree,
 
89
    )
 
90
 
 
91
 
 
92
from .dir import (
 
93
    LocalGitDir,
 
94
    )
 
95
from .tree import (
 
96
    changes_from_git_changes,
 
97
    tree_delta_from_git_changes,
 
98
    InterGitTrees,
 
99
    MutableGitIndexTree,
 
100
    )
 
101
from .mapping import (
 
102
    GitFileIdMap,
 
103
    mode_kind,
 
104
    )
 
105
 
 
106
IGNORE_FILENAME = ".gitignore"
 
107
 
 
108
 
 
109
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
 
110
    """A Git working tree."""
 
111
 
 
112
    def __init__(self, controldir, repo, branch):
 
113
        MutableGitIndexTree.__init__(self)
 
114
        basedir = controldir.root_transport.local_abspath('.')
 
115
        self.basedir = osutils.realpath(basedir)
 
116
        self.controldir = controldir
 
117
        self.repository = repo
 
118
        self.store = self.repository._git.object_store
 
119
        self.mapping = self.repository.get_mapping()
 
120
        self._branch = branch
 
121
        self._transport = controldir.transport
 
122
        self._format = GitWorkingTreeFormat()
 
123
        self.index = None
 
124
        self._index_file = None
 
125
        self.views = self._make_views()
 
126
        self._rules_searcher = None
 
127
        self._detect_case_handling()
 
128
        self._reset_data()
 
129
 
 
130
    def supports_tree_reference(self):
 
131
        return True
 
132
 
 
133
    def supports_rename_tracking(self):
 
134
        return False
 
135
 
 
136
    def _read_index(self):
 
137
        self.index = Index(self.control_transport.local_abspath('index'))
 
138
        self._index_dirty = False
 
139
 
 
140
    def lock_read(self):
 
141
        """Lock the repository for read operations.
 
142
 
 
143
        :return: A breezy.lock.LogicalLockResult.
 
144
        """
 
145
        if not self._lock_mode:
 
146
            self._lock_mode = 'r'
 
147
            self._lock_count = 1
 
148
            self._read_index()
 
149
        else:
 
150
            self._lock_count += 1
 
151
        self.branch.lock_read()
 
152
        return lock.LogicalLockResult(self.unlock)
 
153
 
 
154
    def _lock_write_tree(self):
 
155
        # TODO(jelmer): Actually create index.lock
 
156
        if not self._lock_mode:
 
157
            self._lock_mode = 'w'
 
158
            self._lock_count = 1
 
159
            try:
 
160
                self._index_file = GitFile(self.control_transport.local_abspath('index'), 'wb')
 
161
            except FileLocked:
 
162
                raise errors.LockContention('index')
 
163
            self._read_index()
 
164
        elif self._lock_mode == 'r':
 
165
            raise errors.ReadOnlyError(self)
 
166
        else:
 
167
            self._lock_count +=1
 
168
 
 
169
    def lock_tree_write(self):
 
170
        self.branch.lock_read()
 
171
        try:
 
172
            self._lock_write_tree()
 
173
            return lock.LogicalLockResult(self.unlock)
 
174
        except:
 
175
            self.branch.unlock()
 
176
            raise
 
177
 
 
178
    def lock_write(self, token=None):
 
179
        self.branch.lock_write()
 
180
        try:
 
181
            self._lock_write_tree()
 
182
            return lock.LogicalLockResult(self.unlock)
 
183
        except:
 
184
            self.branch.unlock()
 
185
            raise
 
186
 
 
187
    def is_locked(self):
 
188
        return self._lock_count >= 1
 
189
 
 
190
    def get_physical_lock_status(self):
 
191
        return False
 
192
 
 
193
    def break_lock(self):
 
194
        try:
 
195
            self.control_transport.delete('index.lock')
 
196
        except errors.NoSuchFile:
 
197
            pass
 
198
        self.branch.break_lock()
 
199
 
 
200
    @only_raises(errors.LockNotHeld, errors.LockBroken)
 
201
    def unlock(self):
 
202
        if not self._lock_count:
 
203
            return lock.cant_unlock_not_held(self)
 
204
        try:
 
205
            self._cleanup()
 
206
            self._lock_count -= 1
 
207
            if self._lock_count > 0:
 
208
                return
 
209
            if self._index_file is not None:
 
210
                if self._index_dirty:
 
211
                    self._flush(self._index_file)
 
212
                    self._index_file.close()
 
213
                else:
 
214
                    # Somebody else already wrote the index file
 
215
                    # by calling .flush()
 
216
                    self._index_file.abort()
 
217
                self._index_file = None
 
218
            self._lock_mode = None
 
219
            self.index = None
 
220
        finally:
 
221
            self.branch.unlock()
 
222
 
 
223
    def _cleanup(self):
 
224
        pass
 
225
 
 
226
    def _detect_case_handling(self):
 
227
        try:
 
228
            self._transport.stat(".git/cOnFiG")
 
229
        except errors.NoSuchFile:
 
230
            self.case_sensitive = True
 
231
        else:
 
232
            self.case_sensitive = False
 
233
 
 
234
    def merge_modified(self):
 
235
        return {}
 
236
 
 
237
    def set_merge_modified(self, modified_hashes):
 
238
        raise errors.UnsupportedOperation(self.set_merge_modified, self)
 
239
 
 
240
    def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
 
241
        self.set_parent_ids([p for p, t in parents_list])
 
242
 
 
243
    def _set_merges_from_parent_ids(self, rhs_parent_ids):
 
244
        try:
 
245
            merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
 
246
        except errors.NoSuchRevision as e:
 
247
            raise errors.GhostRevisionUnusableHere(e.revision)
 
248
        if merges:
 
249
            self.control_transport.put_bytes('MERGE_HEAD', '\n'.join(merges),
 
250
                mode=self.controldir._get_file_mode())
 
251
        else:
 
252
            try:
 
253
                self.control_transport.delete('MERGE_HEAD')
 
254
            except errors.NoSuchFile:
 
255
                pass
 
256
 
 
257
    def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
 
258
        """Set the parent ids to revision_ids.
 
259
 
 
260
        See also set_parent_trees. This api will try to retrieve the tree data
 
261
        for each element of revision_ids from the trees repository. If you have
 
262
        tree data already available, it is more efficient to use
 
263
        set_parent_trees rather than set_parent_ids. set_parent_ids is however
 
264
        an easier API to use.
 
265
 
 
266
        :param revision_ids: The revision_ids to set as the parent ids of this
 
267
            working tree. Any of these may be ghosts.
 
268
        """
 
269
        with self.lock_tree_write():
 
270
            self._check_parents_for_ghosts(revision_ids,
 
271
                allow_leftmost_as_ghost=allow_leftmost_as_ghost)
 
272
            for revision_id in revision_ids:
 
273
                _mod_revision.check_not_reserved_id(revision_id)
 
274
 
 
275
            revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
 
276
 
 
277
            if len(revision_ids) > 0:
 
278
                self.set_last_revision(revision_ids[0])
 
279
            else:
 
280
                self.set_last_revision(_mod_revision.NULL_REVISION)
 
281
 
 
282
            self._set_merges_from_parent_ids(revision_ids[1:])
 
283
 
 
284
    def get_parent_ids(self):
 
285
        """See Tree.get_parent_ids.
 
286
 
 
287
        This implementation reads the pending merges list and last_revision
 
288
        value and uses that to decide what the parents list should be.
 
289
        """
 
290
        last_rev = _mod_revision.ensure_null(self._last_revision())
 
291
        if _mod_revision.NULL_REVISION == last_rev:
 
292
            parents = []
 
293
        else:
 
294
            parents = [last_rev]
 
295
        try:
 
296
            merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
 
297
        except errors.NoSuchFile:
 
298
            pass
 
299
        else:
 
300
            for l in osutils.split_lines(merges_bytes):
 
301
                revision_id = l.rstrip('\n')
 
302
                parents.append(self.branch.lookup_foreign_revision_id(revision_id))
 
303
        return parents
 
304
 
 
305
    def check_state(self):
 
306
        """Check that the working state is/isn't valid."""
 
307
        pass
 
308
 
 
309
    def remove(self, files, verbose=False, to_file=None, keep_files=True,
 
310
        force=False):
 
311
        """Remove nominated files from the working tree metadata.
 
312
 
 
313
        :param files: File paths relative to the basedir.
 
314
        :param keep_files: If true, the files will also be kept.
 
315
        :param force: Delete files and directories, even if they are changed
 
316
            and even if the directories are not empty.
 
317
        """
 
318
        if isinstance(files, basestring):
 
319
            files = [files]
 
320
 
 
321
        if to_file is None:
 
322
            to_file = sys.stdout
 
323
 
 
324
        def backup(file_to_backup):
 
325
            abs_path = self.abspath(file_to_backup)
 
326
            backup_name = self.controldir._available_backup_name(file_to_backup)
 
327
            osutils.rename(abs_path, self.abspath(backup_name))
 
328
            return "removed %s (but kept a copy: %s)" % (
 
329
                file_to_backup, backup_name)
 
330
 
 
331
        # Sort needed to first handle directory content before the directory
 
332
        files_to_backup = []
 
333
 
 
334
        all_files = set()
 
335
 
 
336
        def recurse_directory_to_add_files(directory):
 
337
            # Recurse directory and add all files
 
338
            # so we can check if they have changed.
 
339
            for parent_info, file_infos in self.walkdirs(directory):
 
340
                for relpath, basename, kind, lstat, fileid, kind in file_infos:
 
341
                    # Is it versioned or ignored?
 
342
                    if self.is_versioned(relpath):
 
343
                        # Add nested content for deletion.
 
344
                        all_files.add(relpath)
 
345
                    else:
 
346
                        # Files which are not versioned
 
347
                        # should be treated as unknown.
 
348
                        files_to_backup.append(relpath)
 
349
 
 
350
        with self.lock_tree_write():
 
351
            for filepath in files:
 
352
                # Get file name into canonical form.
 
353
                abspath = self.abspath(filepath)
 
354
                filepath = self.relpath(abspath)
 
355
 
 
356
                if filepath:
 
357
                    all_files.add(filepath)
 
358
                    recurse_directory_to_add_files(filepath)
 
359
 
 
360
            files = list(all_files)
 
361
 
 
362
            if len(files) == 0:
 
363
                return # nothing to do
 
364
 
 
365
            # Sort needed to first handle directory content before the directory
 
366
            files.sort(reverse=True)
 
367
 
 
368
            # Bail out if we are going to delete files we shouldn't
 
369
            if not keep_files and not force:
 
370
                for (file_id, path, content_change, versioned, parent_id, name,
 
371
                     kind, executable) in self.iter_changes(self.basis_tree(),
 
372
                         include_unchanged=True, require_versioned=False,
 
373
                         want_unversioned=True, specific_files=files):
 
374
                    if versioned[0] == False:
 
375
                        # The record is unknown or newly added
 
376
                        files_to_backup.append(path[1])
 
377
                        files_to_backup.extend(osutils.parent_directories(path[1]))
 
378
                    elif (content_change and (kind[1] is not None) and
 
379
                            osutils.is_inside_any(files, path[1])):
 
380
                        # Versioned and changed, but not deleted, and still
 
381
                        # in one of the dirs to be deleted.
 
382
                        files_to_backup.append(path[1])
 
383
                        files_to_backup.extend(osutils.parent_directories(path[1]))
 
384
 
 
385
            for f in files:
 
386
                if f == '':
 
387
                    continue
 
388
 
 
389
                try:
 
390
                    kind = self.kind(f)
 
391
                except errors.NoSuchFile:
 
392
                    kind = None
 
393
 
 
394
                abs_path = self.abspath(f)
 
395
                if verbose:
 
396
                    # having removed it, it must be either ignored or unknown
 
397
                    if self.is_ignored(f):
 
398
                        new_status = 'I'
 
399
                    else:
 
400
                        new_status = '?'
 
401
                    kind_ch = osutils.kind_marker(kind)
 
402
                    to_file.write(new_status + '       ' + f + kind_ch + '\n')
 
403
                if kind is None:
 
404
                    message = "%s does not exist" % (f, )
 
405
                else:
 
406
                    if not keep_files:
 
407
                        if f in files_to_backup and not force:
 
408
                            message = backup(f)
 
409
                        else:
 
410
                            if kind == 'directory':
 
411
                                osutils.rmtree(abs_path)
 
412
                            else:
 
413
                                osutils.delete_any(abs_path)
 
414
                            message = "deleted %s" % (f,)
 
415
                    else:
 
416
                        message = "removed %s" % (f,)
 
417
                self._unversion_path(f)
 
418
 
 
419
                # print only one message (if any) per file.
 
420
                if message is not None:
 
421
                    trace.note(message)
 
422
            self._versioned_dirs = None
 
423
 
 
424
    def smart_add(self, file_list, recurse=True, action=None, save=True):
 
425
        if not file_list:
 
426
            file_list = [u'.']
 
427
 
 
428
        # expand any symlinks in the directory part, while leaving the
 
429
        # filename alone
 
430
        # only expanding if symlinks are supported avoids windows path bugs
 
431
        if osutils.has_symlinks():
 
432
            file_list = list(map(osutils.normalizepath, file_list))
 
433
 
 
434
        conflicts_related = set()
 
435
        for c in self.conflicts():
 
436
            conflicts_related.update(c.associated_filenames())
 
437
 
 
438
        added = []
 
439
        ignored = {}
 
440
        user_dirs = []
 
441
        def call_action(filepath, kind):
 
442
            if action is not None:
 
443
                parent_path = posixpath.dirname(filepath)
 
444
                parent_id = self.path2id(parent_path)
 
445
                parent_ie = self._get_dir_ie(parent_path, parent_id)
 
446
                file_id = action(self, parent_ie, filepath, kind)
 
447
                if file_id is not None:
 
448
                    raise workingtree.SettingFileIdUnsupported()
 
449
 
 
450
        with self.lock_tree_write():
 
451
            for filepath in osutils.canonical_relpaths(self.basedir, file_list):
 
452
                filepath, can_access = osutils.normalized_filename(filepath)
 
453
                if not can_access:
 
454
                    raise errors.InvalidNormalization(filepath)
 
455
 
 
456
                abspath = self.abspath(filepath)
 
457
                kind = osutils.file_kind(abspath)
 
458
                if kind in ("file", "symlink"):
 
459
                    (index, subpath) = self._lookup_index(filepath.encode('utf-8'))
 
460
                    if subpath in index:
 
461
                        # Already present
 
462
                        continue
 
463
                    call_action(filepath, kind)
 
464
                    if save:
 
465
                        self._index_add_entry(filepath, kind)
 
466
                    added.append(filepath)
 
467
                elif kind == "directory":
 
468
                    (index, subpath) = self._lookup_index(filepath.encode('utf-8'))
 
469
                    if subpath not in index:
 
470
                        call_action(filepath, kind)
 
471
                    if recurse:
 
472
                        user_dirs.append(filepath)
 
473
                else:
 
474
                    raise errors.BadFileKindError(filename=abspath, kind=kind)
 
475
            for user_dir in user_dirs:
 
476
                abs_user_dir = self.abspath(user_dir)
 
477
                if user_dir != '':
 
478
                    try:
 
479
                        transport = _mod_transport.get_transport_from_path(abs_user_dir)
 
480
                        _mod_controldir.ControlDirFormat.find_format(transport)
 
481
                        subtree = True
 
482
                    except errors.NotBranchError:
 
483
                        subtree = False
 
484
                    except errors.UnsupportedFormatError:
 
485
                        subtree = False
 
486
                else:
 
487
                    subtree = False
 
488
                if subtree:
 
489
                    trace.warning('skipping nested tree %r', abs_user_dir)
 
490
                    continue
 
491
 
 
492
                for name in os.listdir(abs_user_dir):
 
493
                    subp = os.path.join(user_dir, name)
 
494
                    if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
 
495
                        continue
 
496
                    ignore_glob = self.is_ignored(subp)
 
497
                    if ignore_glob is not None:
 
498
                        ignored.setdefault(ignore_glob, []).append(subp)
 
499
                        continue
 
500
                    abspath = self.abspath(subp)
 
501
                    kind = osutils.file_kind(abspath)
 
502
                    if kind == "directory":
 
503
                        user_dirs.append(subp)
 
504
                    else:
 
505
                        if subp in self.index:
 
506
                            # Already present
 
507
                            continue
 
508
                        if subp in conflicts_related:
 
509
                            continue
 
510
                        call_action(filepath, kind)
 
511
                        if save:
 
512
                            self._index_add_entry(subp, kind)
 
513
                        added.append(subp)
 
514
            return added, ignored
 
515
 
 
516
    def has_filename(self, filename):
 
517
        return osutils.lexists(self.abspath(filename))
 
518
 
 
519
    def _iter_files_recursive(self, from_dir=None, include_dirs=False):
 
520
        if from_dir is None:
 
521
            from_dir = u""
 
522
        for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
 
523
            dir_relpath = dirpath[len(self.basedir):].strip("/")
 
524
            if self.controldir.is_control_filename(dir_relpath):
 
525
                continue
 
526
            for name in list(dirnames):
 
527
                if self.controldir.is_control_filename(name):
 
528
                    dirnames.remove(name)
 
529
                    continue
 
530
                relpath = os.path.join(dir_relpath, name)
 
531
                if include_dirs:
 
532
                    try:
 
533
                        yield relpath.decode(osutils._fs_enc)
 
534
                    except UnicodeDecodeError:
 
535
                        raise errors.BadFilenameEncoding(
 
536
                            relpath, osutils._fs_enc)
 
537
                if not self._has_dir(relpath):
 
538
                    dirnames.remove(name)
 
539
            for name in filenames:
 
540
                if not self.mapping.is_special_file(name):
 
541
                    yp = os.path.join(dir_relpath, name)
 
542
                    try:
 
543
                        yield yp.decode(osutils._fs_enc)
 
544
                    except UnicodeDecodeError:
 
545
                        raise errors.BadFilenameEncoding(
 
546
                            yp, osutils._fs_enc)
 
547
 
 
548
    def extras(self):
 
549
        """Yield all unversioned files in this WorkingTree.
 
550
        """
 
551
        with self.lock_read():
 
552
            index_paths = set([p.decode('utf-8') for p, i in self._recurse_index_entries()])
 
553
            all_paths = set(self._iter_files_recursive(include_dirs=True))
 
554
            for p in (all_paths - index_paths):
 
555
                if not self._has_dir(p):
 
556
                    yield p
 
557
 
 
558
    def _gather_kinds(self, files, kinds):
 
559
        """See MutableTree._gather_kinds."""
 
560
        with self.lock_tree_write():
 
561
            for pos, f in enumerate(files):
 
562
                if kinds[pos] is None:
 
563
                    fullpath = osutils.normpath(self.abspath(f))
 
564
                    try:
 
565
                         kind = osutils.file_kind(fullpath)
 
566
                    except OSError as e:
 
567
                        if e.errno == errno.ENOENT:
 
568
                            raise errors.NoSuchFile(fullpath)
 
569
                    if kind == 'directory' and f != '' and os.path.exists(os.path.join(fullpath, '.git')):
 
570
                        kind = 'tree-reference'
 
571
                    kinds[pos] = kind
 
572
 
 
573
    def flush(self):
 
574
        if self._lock_mode != 'w':
 
575
            raise errors.NotWriteLocked(self)
 
576
        # TODO(jelmer): This shouldn't be writing in-place, but index.lock is
 
577
        # already in use and GitFile doesn't allow overriding the lock file name :(
 
578
        f = open(self.control_transport.local_abspath('index'), 'wb')
 
579
        # Note that _flush will close the file
 
580
        self._flush(f)
 
581
 
 
582
    def _flush(self, f):
 
583
        try:
 
584
            shaf = SHA1Writer(f)
 
585
            write_index_dict(shaf, self.index)
 
586
            shaf.close()
 
587
        except:
 
588
            f.abort()
 
589
            raise
 
590
        self._index_dirty = False
 
591
 
 
592
    def has_or_had_id(self, file_id):
 
593
        if self.has_id(file_id):
 
594
            return True
 
595
        if self.had_id(file_id):
 
596
            return True
 
597
        return False
 
598
 
 
599
    def had_id(self, file_id):
 
600
        path = self._basis_fileid_map.lookup_file_id(file_id)
 
601
        try:
 
602
            head = self.repository._git.head()
 
603
        except KeyError:
 
604
            # Assume no if basis is not accessible
 
605
            return False
 
606
        try:
 
607
            root_tree = self.store[head].tree
 
608
        except KeyError:
 
609
            return False
 
610
        try:
 
611
            tree_lookup_path(self.store.__getitem__, root_tree, path)
 
612
        except KeyError:
 
613
            return False
 
614
        else:
 
615
            return True
 
616
 
 
617
    def get_file_mtime(self, path, file_id=None):
 
618
        """See Tree.get_file_mtime."""
 
619
        try:
 
620
            return self._lstat(path).st_mtime
 
621
        except OSError as e:
 
622
            (num, msg) = e
 
623
            if num == errno.ENOENT:
 
624
                raise errors.NoSuchFile(path)
 
625
            raise
 
626
 
 
627
    def is_ignored(self, filename):
 
628
        r"""Check whether the filename matches an ignore pattern.
 
629
 
 
630
        If the file is ignored, returns the pattern which caused it to
 
631
        be ignored, otherwise None.  So this can simply be used as a
 
632
        boolean if desired."""
 
633
        if getattr(self, '_global_ignoreglobster', None) is None:
 
634
            ignore_globs = set()
 
635
            ignore_globs.update(ignores.get_runtime_ignores())
 
636
            ignore_globs.update(ignores.get_user_ignores())
 
637
            self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
 
638
        match = self._global_ignoreglobster.match(filename)
 
639
        if match is not None:
 
640
            return match
 
641
        try:
 
642
            if self.kind(filename) == 'directory':
 
643
                filename += b'/'
 
644
        except errors.NoSuchFile:
 
645
            pass
 
646
        filename = filename.lstrip(b'/')
 
647
        ignore_manager = self._get_ignore_manager()
 
648
        ps = list(ignore_manager.find_matching(filename))
 
649
        if not ps:
 
650
            return None
 
651
        if not ps[-1].is_exclude:
 
652
            return None
 
653
        return bytes(ps[-1])
 
654
 
 
655
    def _get_ignore_manager(self):
 
656
        ignoremanager = getattr(self, '_ignoremanager', None)
 
657
        if ignoremanager is not None:
 
658
            return ignoremanager
 
659
 
 
660
        ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
 
661
        self._ignoremanager = ignore_manager
 
662
        return ignore_manager
 
663
 
 
664
    def _flush_ignore_list_cache(self):
 
665
        self._ignoremanager = None
 
666
 
 
667
    def set_last_revision(self, revid):
 
668
        if _mod_revision.is_null(revid):
 
669
            self.branch.set_last_revision_info(0, revid)
 
670
            return False
 
671
        _mod_revision.check_not_reserved_id(revid)
 
672
        try:
 
673
            self.branch.generate_revision_history(revid)
 
674
        except errors.NoSuchRevision:
 
675
            raise errors.GhostRevisionUnusableHere(revid)
 
676
 
 
677
    def _reset_data(self):
 
678
        try:
 
679
            head = self.repository._git.head()
 
680
        except KeyError:
 
681
            self._basis_fileid_map = GitFileIdMap({}, self.mapping)
 
682
        else:
 
683
            self._basis_fileid_map = self.mapping.get_fileid_map(
 
684
                self.store.__getitem__, self.store[head].tree)
 
685
        self._fileid_map = self._basis_fileid_map.copy()
 
686
 
 
687
    def get_file_verifier(self, path, file_id=None, stat_value=None):
 
688
        with self.lock_read():
 
689
            (index, subpath) = self._lookup_index(path.encode('utf-8'))
 
690
            try:
 
691
                return ("GIT", index[subpath].sha)
 
692
            except KeyError:
 
693
                if self._has_dir(path):
 
694
                    return ("GIT", None)
 
695
                raise errors.NoSuchFile(path)
 
696
 
 
697
    def get_file_sha1(self, path, file_id=None, stat_value=None):
 
698
        with self.lock_read():
 
699
            if not self.is_versioned(path):
 
700
                raise errors.NoSuchFile(path)
 
701
            abspath = self.abspath(path)
 
702
            try:
 
703
                return osutils.sha_file_by_name(abspath)
 
704
            except OSError as e:
 
705
                (num, msg) = e
 
706
                if num in (errno.EISDIR, errno.ENOENT):
 
707
                    return None
 
708
                raise
 
709
 
 
710
    def revision_tree(self, revid):
 
711
        return self.repository.revision_tree(revid)
 
712
 
 
713
    def filter_unversioned_files(self, files):
 
714
        return set([p for p in files if not self.is_versioned(p)])
 
715
 
 
716
    def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
 
717
        mode = stat_result.st_mode
 
718
        return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
 
719
 
 
720
    def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
 
721
        return self.basis_tree().is_executable(path)
 
722
 
 
723
    def stored_kind(self, path, file_id=None):
 
724
        with self.lock_read():
 
725
            (index, subpath) = self._lookup_index(path.encode('utf-8'))
 
726
            try:
 
727
                return mode_kind(index[subpath].mode)
 
728
            except KeyError:
 
729
                # Maybe it's a directory?
 
730
                if self._has_dir(path):
 
731
                    return "directory"
 
732
                raise errors.NoSuchFile(path)
 
733
 
 
734
    def _lstat(self, path):
 
735
        return os.lstat(self.abspath(path))
 
736
 
 
737
    def is_executable(self, path, file_id=None):
 
738
        with self.lock_read():
 
739
            if getattr(self, "_supports_executable", osutils.supports_executable)():
 
740
                mode = self._lstat(path).st_mode
 
741
            else:
 
742
                (index, subpath) = self._lookup_index(path.encode('utf-8'))
 
743
                try:
 
744
                    mode = index[subpath].mode
 
745
                except KeyError:
 
746
                    mode = 0
 
747
            return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
 
748
 
 
749
    def _is_executable_from_path_and_stat(self, path, stat_result):
 
750
        if getattr(self, "_supports_executable", osutils.supports_executable)():
 
751
            return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
 
752
        else:
 
753
            return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
 
754
 
 
755
    def list_files(self, include_root=False, from_dir=None, recursive=True):
 
756
        if from_dir is None:
 
757
            from_dir = u""
 
758
        dir_ids = {}
 
759
        fk_entries = {'directory': tree.TreeDirectory,
 
760
                      'file': tree.TreeFile,
 
761
                      'symlink': tree.TreeLink,
 
762
                      'tree-reference': tree.TreeReference}
 
763
        with self.lock_read():
 
764
            root_ie = self._get_dir_ie(u"", None)
 
765
            if include_root and not from_dir:
 
766
                yield "", "V", root_ie.kind, root_ie.file_id, root_ie
 
767
            dir_ids[u""] = root_ie.file_id
 
768
            if recursive:
 
769
                path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
 
770
            else:
 
771
                path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
 
772
                    os.listdir(self.abspath(from_dir).encode(osutils._fs_enc)) if not self.controldir.is_control_filename(name)
 
773
                    and not self.mapping.is_special_file(name)])
 
774
            for path in path_iterator:
 
775
                try:
 
776
                    encoded_path = path.encode("utf-8")
 
777
                except UnicodeEncodeError:
 
778
                    raise errors.BadFilenameEncoding(
 
779
                        path, osutils._fs_enc)
 
780
                (index, index_path) = self._lookup_index(encoded_path)
 
781
                try:
 
782
                    value = index[index_path]
 
783
                except KeyError:
 
784
                    value = None
 
785
                kind = self.kind(path)
 
786
                parent, name = posixpath.split(path)
 
787
                for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
 
788
                    pass
 
789
                if kind in ('directory', 'tree-reference'):
 
790
                    if path != from_dir:
 
791
                        if self._has_dir(path):
 
792
                            ie = self._get_dir_ie(path, self.path2id(path))
 
793
                            status = "V"
 
794
                            file_id = ie.file_id
 
795
                        elif self.is_ignored(path):
 
796
                            status = "I"
 
797
                            ie = fk_entries[kind]()
 
798
                            file_id = None
 
799
                        else:
 
800
                            status = "?"
 
801
                            ie = fk_entries[kind]()
 
802
                            file_id = None
 
803
                        yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
 
804
                    continue
 
805
                if value is not None:
 
806
                    ie = self._get_file_ie(name, path, value, dir_ids[parent])
 
807
                    yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
 
808
                else:
 
809
                    ie = fk_entries[kind]()
 
810
                    yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
 
811
 
 
812
    def all_file_ids(self):
 
813
        with self.lock_read():
 
814
            ids = {u"": self.path2id("")}
 
815
            for path in self.index:
 
816
                if self.mapping.is_special_file(path):
 
817
                    continue
 
818
                path = path.decode("utf-8")
 
819
                parent = posixpath.dirname(path).strip("/")
 
820
                for e in self._add_missing_parent_ids(parent, ids):
 
821
                    pass
 
822
                ids[path] = self.path2id(path)
 
823
            return set(ids.values())
 
824
 
 
825
    def all_versioned_paths(self):
 
826
        with self.lock_read():
 
827
            paths = {u""}
 
828
            for path in self.index:
 
829
                if self.mapping.is_special_file(path):
 
830
                    continue
 
831
                path = path.decode("utf-8")
 
832
                paths.add(path)
 
833
                while path != "":
 
834
                    path = posixpath.dirname(path).strip("/")
 
835
                    if path in paths:
 
836
                        break
 
837
                    paths.add(path)
 
838
            return paths
 
839
 
 
840
    def iter_child_entries(self, path, file_id=None):
 
841
        encoded_path = path.encode('utf-8')
 
842
        with self.lock_read():
 
843
            parent_id = self.path2id(path)
 
844
            found_any = False
 
845
            seen_children = set()
 
846
            for item_path, value in self.index.iteritems():
 
847
                if self.mapping.is_special_file(item_path):
 
848
                    continue
 
849
                if not osutils.is_inside(encoded_path, item_path):
 
850
                    continue
 
851
                found_any = True
 
852
                subpath = posixpath.relpath(item_path, encoded_path)
 
853
                if b'/' in subpath:
 
854
                    dirname = subpath.split(b'/', 1)[0]
 
855
                    file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
 
856
                else:
 
857
                    (parent, name) = posixpath.split(item_path)
 
858
                    file_ie = self._get_file_ie(
 
859
                            name.decode('utf-8'),
 
860
                            item_path.decode('utf-8'), value, parent_id)
 
861
                yield file_ie
 
862
            if not found_any and path != u'':
 
863
                raise errors.NoSuchFile(path)
 
864
 
 
865
    def conflicts(self):
 
866
        with self.lock_read():
 
867
            conflicts = _mod_conflicts.ConflictList()
 
868
            for item_path, value in self.index.iteritems():
 
869
                if value.flags & FLAG_STAGEMASK:
 
870
                    conflicts.append(_mod_conflicts.TextConflict(item_path.decode('utf-8')))
 
871
            return conflicts
 
872
 
 
873
    def set_conflicts(self, conflicts):
 
874
        by_path = set()
 
875
        for conflict in conflicts:
 
876
            if conflict.typestring in ('text conflict', 'contents conflict'):
 
877
                by_path.add(conflict.path.encode('utf-8'))
 
878
            else:
 
879
                raise errors.UnsupportedOperation(self.set_conflicts, self)
 
880
        with self.lock_tree_write():
 
881
            for path in self.index:
 
882
                self._set_conflicted(path, path in by_path)
 
883
 
 
884
    def _set_conflicted(self, path, conflicted):
 
885
        trace.mutter('change conflict: %r -> %r', path, conflicted)
 
886
        value = self.index[path]
 
887
        self._index_dirty = True
 
888
        if conflicted:
 
889
            self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
 
890
        else:
 
891
            self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
 
892
 
 
893
    def add_conflicts(self, new_conflicts):
 
894
        with self.lock_tree_write():
 
895
            for conflict in new_conflicts:
 
896
                if conflict.typestring in ('text conflict', 'contents conflict'):
 
897
                    try:
 
898
                        self._set_conflicted(conflict.path.encode('utf-8'), True)
 
899
                    except KeyError:
 
900
                        raise errors.UnsupportedOperation(self.add_conflicts, self)
 
901
                else:
 
902
                    raise errors.UnsupportedOperation(self.add_conflicts, self)
 
903
 
 
904
    def walkdirs(self, prefix=""):
 
905
        """Walk the directories of this tree.
 
906
 
 
907
        returns a generator which yields items in the form:
 
908
                ((curren_directory_path, fileid),
 
909
                 [(file1_path, file1_name, file1_kind, (lstat), file1_id,
 
910
                   file1_kind), ... ])
 
911
 
 
912
        This API returns a generator, which is only valid during the current
 
913
        tree transaction - within a single lock_read or lock_write duration.
 
914
 
 
915
        If the tree is not locked, it may cause an error to be raised,
 
916
        depending on the tree implementation.
 
917
        """
 
918
        from bisect import bisect_left
 
919
        import operator
 
920
        disk_top = self.abspath(prefix)
 
921
        if disk_top.endswith('/'):
 
922
            disk_top = disk_top[:-1]
 
923
        top_strip_len = len(disk_top) + 1
 
924
        inventory_iterator = self._walkdirs(prefix)
 
925
        disk_iterator = osutils.walkdirs(disk_top, prefix)
 
926
        try:
 
927
            current_disk = next(disk_iterator)
 
928
            disk_finished = False
 
929
        except OSError as e:
 
930
            if not (e.errno == errno.ENOENT or
 
931
                (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
 
932
                raise
 
933
            current_disk = None
 
934
            disk_finished = True
 
935
        try:
 
936
            current_inv = next(inventory_iterator)
 
937
            inv_finished = False
 
938
        except StopIteration:
 
939
            current_inv = None
 
940
            inv_finished = True
 
941
        while not inv_finished or not disk_finished:
 
942
            if current_disk:
 
943
                ((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
 
944
                    cur_disk_dir_content) = current_disk
 
945
            else:
 
946
                ((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
 
947
                    cur_disk_dir_content) = ((None, None), None)
 
948
            if not disk_finished:
 
949
                # strip out .bzr dirs
 
950
                if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
 
951
                    len(cur_disk_dir_content) > 0):
 
952
                    # osutils.walkdirs can be made nicer -
 
953
                    # yield the path-from-prefix rather than the pathjoined
 
954
                    # value.
 
955
                    bzrdir_loc = bisect_left(cur_disk_dir_content,
 
956
                        ('.git', '.git'))
 
957
                    if (bzrdir_loc < len(cur_disk_dir_content)
 
958
                        and self.controldir.is_control_filename(
 
959
                            cur_disk_dir_content[bzrdir_loc][0])):
 
960
                        # we dont yield the contents of, or, .bzr itself.
 
961
                        del cur_disk_dir_content[bzrdir_loc]
 
962
            if inv_finished:
 
963
                # everything is unknown
 
964
                direction = 1
 
965
            elif disk_finished:
 
966
                # everything is missing
 
967
                direction = -1
 
968
            else:
 
969
                direction = cmp(current_inv[0][0], cur_disk_dir_relpath)
 
970
            if direction > 0:
 
971
                # disk is before inventory - unknown
 
972
                dirblock = [(relpath, basename, kind, stat, None, None) for
 
973
                    relpath, basename, kind, stat, top_path in
 
974
                    cur_disk_dir_content]
 
975
                yield (cur_disk_dir_relpath, None), dirblock
 
976
                try:
 
977
                    current_disk = next(disk_iterator)
 
978
                except StopIteration:
 
979
                    disk_finished = True
 
980
            elif direction < 0:
 
981
                # inventory is before disk - missing.
 
982
                dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
 
983
                    for relpath, basename, dkind, stat, fileid, kind in
 
984
                    current_inv[1]]
 
985
                yield (current_inv[0][0], current_inv[0][1]), dirblock
 
986
                try:
 
987
                    current_inv = next(inventory_iterator)
 
988
                except StopIteration:
 
989
                    inv_finished = True
 
990
            else:
 
991
                # versioned present directory
 
992
                # merge the inventory and disk data together
 
993
                dirblock = []
 
994
                for relpath, subiterator in itertools.groupby(sorted(
 
995
                    current_inv[1] + cur_disk_dir_content,
 
996
                    key=operator.itemgetter(0)), operator.itemgetter(1)):
 
997
                    path_elements = list(subiterator)
 
998
                    if len(path_elements) == 2:
 
999
                        inv_row, disk_row = path_elements
 
1000
                        # versioned, present file
 
1001
                        dirblock.append((inv_row[0],
 
1002
                            inv_row[1], disk_row[2],
 
1003
                            disk_row[3], inv_row[4],
 
1004
                            inv_row[5]))
 
1005
                    elif len(path_elements[0]) == 5:
 
1006
                        # unknown disk file
 
1007
                        dirblock.append((path_elements[0][0],
 
1008
                            path_elements[0][1], path_elements[0][2],
 
1009
                            path_elements[0][3], None, None))
 
1010
                    elif len(path_elements[0]) == 6:
 
1011
                        # versioned, absent file.
 
1012
                        dirblock.append((path_elements[0][0],
 
1013
                            path_elements[0][1], 'unknown', None,
 
1014
                            path_elements[0][4], path_elements[0][5]))
 
1015
                    else:
 
1016
                        raise NotImplementedError('unreachable code')
 
1017
                yield current_inv[0], dirblock
 
1018
                try:
 
1019
                    current_inv = next(inventory_iterator)
 
1020
                except StopIteration:
 
1021
                    inv_finished = True
 
1022
                try:
 
1023
                    current_disk = next(disk_iterator)
 
1024
                except StopIteration:
 
1025
                    disk_finished = True
 
1026
 
 
1027
    def _walkdirs(self, prefix=""):
 
1028
        if prefix != "":
 
1029
            prefix += "/"
 
1030
        prefix = prefix.encode('utf-8')
 
1031
        per_dir = defaultdict(set)
 
1032
        if prefix == "":
 
1033
            per_dir[('', self.get_root_id())] = set()
 
1034
        def add_entry(path, kind):
 
1035
            if path == '' or not path.startswith(prefix):
 
1036
                return
 
1037
            (dirname, child_name) = posixpath.split(path)
 
1038
            add_entry(dirname, 'directory')
 
1039
            dirname = dirname.decode("utf-8")
 
1040
            dir_file_id = self.path2id(dirname)
 
1041
            if not isinstance(value, tuple) or len(value) != 10:
 
1042
                raise ValueError(value)
 
1043
            per_dir[(dirname, dir_file_id)].add(
 
1044
                (path.decode("utf-8"), child_name.decode("utf-8"),
 
1045
                kind, None,
 
1046
                self.path2id(path.decode("utf-8")),
 
1047
                kind))
 
1048
        with self.lock_read():
 
1049
            for path, value in self.index.iteritems():
 
1050
                if self.mapping.is_special_file(path):
 
1051
                    continue
 
1052
                if not path.startswith(prefix):
 
1053
                    continue
 
1054
                add_entry(path, mode_kind(value.mode))
 
1055
        return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
 
1056
 
 
1057
    def get_shelf_manager(self):
 
1058
        raise workingtree.ShelvingUnsupported()
 
1059
 
 
1060
    def store_uncommitted(self):
 
1061
        raise errors.StoringUncommittedNotSupported(self)
 
1062
 
 
1063
    def apply_inventory_delta(self, changes):
 
1064
        for (old_path, new_path, file_id, ie) in changes:
 
1065
            if old_path is not None:
 
1066
                (index, old_subpath) = self._lookup_index(old_path.encode('utf-8'))
 
1067
                try:
 
1068
                    self._index_del_entry(index, old_subpath)
 
1069
                except KeyError:
 
1070
                    pass
 
1071
                else:
 
1072
                    self._versioned_dirs = None
 
1073
            if new_path is not None and ie.kind != 'directory':
 
1074
                if ie.kind == 'tree-reference':
 
1075
                    self._index_add_entry(
 
1076
                            new_path, ie.kind,
 
1077
                            reference_revision=ie.reference_revision)
 
1078
                else:
 
1079
                    self._index_add_entry(new_path, ie.kind)
 
1080
        self.flush()
 
1081
 
 
1082
    def annotate_iter(self, path, file_id=None,
 
1083
                      default_revision=_mod_revision.CURRENT_REVISION):
 
1084
        """See Tree.annotate_iter
 
1085
 
 
1086
        This implementation will use the basis tree implementation if possible.
 
1087
        Lines not in the basis are attributed to CURRENT_REVISION
 
1088
 
 
1089
        If there are pending merges, lines added by those merges will be
 
1090
        incorrectly attributed to CURRENT_REVISION (but after committing, the
 
1091
        attribution will be correct).
 
1092
        """
 
1093
        with self.lock_read():
 
1094
            maybe_file_parent_keys = []
 
1095
            for parent_id in self.get_parent_ids():
 
1096
                try:
 
1097
                    parent_tree = self.revision_tree(parent_id)
 
1098
                except errors.NoSuchRevisionInTree:
 
1099
                    parent_tree = self.branch.repository.revision_tree(
 
1100
                            parent_id)
 
1101
                with parent_tree.lock_read():
 
1102
                    # TODO(jelmer): Use rename/copy tracker to find path name in parent
 
1103
                    parent_path = path
 
1104
                    try:
 
1105
                        kind = parent_tree.kind(parent_path)
 
1106
                    except errors.NoSuchFile:
 
1107
                        continue
 
1108
                    if kind != 'file':
 
1109
                        # Note: this is slightly unnecessary, because symlinks and
 
1110
                        # directories have a "text" which is the empty text, and we
 
1111
                        # know that won't mess up annotations. But it seems cleaner
 
1112
                        continue
 
1113
                    parent_text_key = (
 
1114
                        parent_path,
 
1115
                        parent_tree.get_file_revision(parent_path))
 
1116
                    if parent_text_key not in maybe_file_parent_keys:
 
1117
                        maybe_file_parent_keys.append(parent_text_key)
 
1118
            graph = self.branch.repository.get_file_graph()
 
1119
            heads = graph.heads(maybe_file_parent_keys)
 
1120
            file_parent_keys = []
 
1121
            for key in maybe_file_parent_keys:
 
1122
                if key in heads:
 
1123
                    file_parent_keys.append(key)
 
1124
 
 
1125
            # Now we have the parents of this content
 
1126
            from breezy.annotate import Annotator
 
1127
            from .annotate import AnnotateProvider
 
1128
            annotator = Annotator(AnnotateProvider(
 
1129
                self.branch.repository._file_change_scanner))
 
1130
            text = self.get_file_text(path)
 
1131
            this_key = (path, default_revision)
 
1132
            annotator.add_special_text(this_key, file_parent_keys, text)
 
1133
            annotations = [(key[-1], line)
 
1134
                           for key, line in annotator.annotate_flat(this_key)]
 
1135
            return annotations
 
1136
 
 
1137
    def _rename_one(self, from_rel, to_rel):
 
1138
        os.rename(self.abspath(from_rel), self.abspath(to_rel))
 
1139
 
 
1140
    def _build_checkout_with_index(self):
 
1141
        build_index_from_tree(
 
1142
            self.user_transport.local_abspath('.'),
 
1143
            self.control_transport.local_abspath("index"),
 
1144
            self.store,
 
1145
            None if self.branch.head is None else self.store[self.branch.head].tree)
 
1146
 
 
1147
    def reset_state(self, revision_ids=None):
 
1148
        """Reset the state of the working tree.
 
1149
 
 
1150
        This does a hard-reset to a last-known-good state. This is a way to
 
1151
        fix if something got corrupted (like the .git/index file)
 
1152
        """
 
1153
        with self.lock_tree_write():
 
1154
            if revision_ids is not None:
 
1155
                self.set_parent_ids(revision_ids)
 
1156
            self.index.clear()
 
1157
            self._index_dirty = True
 
1158
            if self.branch.head is not None:
 
1159
                for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
 
1160
                    if not validate_path(entry.path):
 
1161
                        continue
 
1162
 
 
1163
                    if S_ISGITLINK(entry.mode):
 
1164
                        pass # TODO(jelmer): record and return submodule paths
 
1165
                    else:
 
1166
                        # Let's at least try to use the working tree file:
 
1167
                        try:
 
1168
                            st = self._lstat(self.abspath(entry.path))
 
1169
                        except OSError:
 
1170
                            # But if it doesn't exist, we'll make something up.
 
1171
                            obj = self.store[entry.sha]
 
1172
                            st = os.stat_result((entry.mode, 0, 0, 0,
 
1173
                                  0, 0, len(obj.as_raw_string()), 0,
 
1174
                                  0, 0))
 
1175
                    (index, subpath) = self._lookup_index(entry.path)
 
1176
                    index[subpath] = index_entry_from_stat(st, entry.sha, 0)
 
1177
 
 
1178
    def pull(self, source, overwrite=False, stop_revision=None,
 
1179
             change_reporter=None, possible_transports=None, local=False,
 
1180
             show_base=False):
 
1181
        with self.lock_write(), source.lock_read():
 
1182
            old_revision = self.branch.last_revision()
 
1183
            basis_tree = self.basis_tree()
 
1184
            count = self.branch.pull(source, overwrite, stop_revision,
 
1185
                                     possible_transports=possible_transports,
 
1186
                                     local=local)
 
1187
            new_revision = self.branch.last_revision()
 
1188
            if new_revision != old_revision:
 
1189
                with basis_tree.lock_read():
 
1190
                    new_basis_tree = self.branch.basis_tree()
 
1191
                    merge.merge_inner(
 
1192
                                self.branch,
 
1193
                                new_basis_tree,
 
1194
                                basis_tree,
 
1195
                                this_tree=self,
 
1196
                                change_reporter=change_reporter,
 
1197
                                show_base=show_base)
 
1198
            return count
 
1199
 
 
1200
    def add_reference(self, sub_tree):
 
1201
        """Add a TreeReference to the tree, pointing at sub_tree.
 
1202
 
 
1203
        :param sub_tree: subtree to add.
 
1204
        """
 
1205
        with self.lock_tree_write():
 
1206
            try:
 
1207
                sub_tree_path = self.relpath(sub_tree.basedir)
 
1208
            except errors.PathNotChild:
 
1209
                raise BadReferenceTarget(
 
1210
                        self, sub_tree, 'Target not inside tree.')
 
1211
 
 
1212
            self._add([sub_tree_path], [None], ['tree-reference'])
 
1213
 
 
1214
    def _read_submodule_head(self, path):
 
1215
        return read_submodule_head(self.abspath(path))
 
1216
 
 
1217
    def get_reference_revision(self, path, file_id=None):
 
1218
        hexsha = self._read_submodule_head(path)
 
1219
        if hexsha is None:
 
1220
            return _mod_revision.NULL_REVISION
 
1221
        return self.branch.lookup_foreign_revision_id(hexsha)
 
1222
 
 
1223
    def get_nested_tree(self, path, file_id=None):
 
1224
        return workingtree.WorkingTree.open(self.abspath(path))
 
1225
 
 
1226
    def _directory_is_tree_reference(self, relpath):
 
1227
        # as a special case, if a directory contains control files then
 
1228
        # it's a tree reference, except that the root of the tree is not
 
1229
        return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
 
1230
 
 
1231
    def extract(self, sub_path, file_id=None, format=None):
 
1232
        """Extract a subtree from this tree.
 
1233
 
 
1234
        A new branch will be created, relative to the path for this tree.
 
1235
        """
 
1236
        def mkdirs(path):
 
1237
            segments = osutils.splitpath(path)
 
1238
            transport = self.branch.controldir.root_transport
 
1239
            for name in segments:
 
1240
                transport = transport.clone(name)
 
1241
                transport.ensure_base()
 
1242
            return transport
 
1243
 
 
1244
        with self.lock_tree_write():
 
1245
            self.flush()
 
1246
            branch_transport = mkdirs(sub_path)
 
1247
            if format is None:
 
1248
                format = self.controldir.cloning_metadir()
 
1249
            branch_transport.ensure_base()
 
1250
            branch_bzrdir = format.initialize_on_transport(branch_transport)
 
1251
            try:
 
1252
                repo = branch_bzrdir.find_repository()
 
1253
            except errors.NoRepositoryPresent:
 
1254
                repo = branch_bzrdir.create_repository()
 
1255
            if not repo.supports_rich_root():
 
1256
                raise errors.RootNotRich()
 
1257
            new_branch = branch_bzrdir.create_branch()
 
1258
            new_branch.pull(self.branch)
 
1259
            for parent_id in self.get_parent_ids():
 
1260
                new_branch.fetch(self.branch, parent_id)
 
1261
            tree_transport = self.controldir.root_transport.clone(sub_path)
 
1262
            if tree_transport.base != branch_transport.base:
 
1263
                tree_bzrdir = format.initialize_on_transport(tree_transport)
 
1264
                tree_bzrdir.set_branch_reference(new_branch)
 
1265
            else:
 
1266
                tree_bzrdir = branch_bzrdir
 
1267
            wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
 
1268
            wt.set_parent_ids(self.get_parent_ids())
 
1269
            return wt
 
1270
 
 
1271
    def _get_check_refs(self):
 
1272
        """Return the references needed to perform a check of this tree.
 
1273
 
 
1274
        The default implementation returns no refs, and is only suitable for
 
1275
        trees that have no local caching and can commit on ghosts at any time.
 
1276
 
 
1277
        :seealso: breezy.check for details about check_refs.
 
1278
        """
 
1279
        return []
 
1280
 
 
1281
    def copy_content_into(self, tree, revision_id=None):
 
1282
        """Copy the current content and user files of this tree into tree."""
 
1283
        with self.lock_read():
 
1284
            if revision_id is None:
 
1285
                merge.transform_tree(tree, self)
 
1286
            else:
 
1287
                # TODO now merge from tree.last_revision to revision (to
 
1288
                # preserve user local changes)
 
1289
                try:
 
1290
                    other_tree = self.revision_tree(revision_id)
 
1291
                except errors.NoSuchRevision:
 
1292
                    other_tree = self.branch.repository.revision_tree(
 
1293
                            revision_id)
 
1294
 
 
1295
                merge.transform_tree(tree, other_tree)
 
1296
                if revision_id == _mod_revision.NULL_REVISION:
 
1297
                    new_parents = []
 
1298
                else:
 
1299
                    new_parents = [revision_id]
 
1300
                tree.set_parent_ids(new_parents)
 
1301
 
 
1302
 
 
1303
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
 
1304
 
 
1305
    _tree_class = GitWorkingTree
 
1306
 
 
1307
    supports_versioned_directories = False
 
1308
 
 
1309
    supports_setting_file_ids = False
 
1310
 
 
1311
    supports_store_uncommitted = False
 
1312
 
 
1313
    supports_leftmost_parent_id_as_ghost = False
 
1314
 
 
1315
    supports_righthand_parent_id_as_ghost = False
 
1316
 
 
1317
    requires_normalized_unicode_filenames = True
 
1318
 
 
1319
    supports_merge_modified = False
 
1320
 
 
1321
    @property
 
1322
    def _matchingcontroldir(self):
 
1323
        from .dir import LocalGitControlDirFormat
 
1324
        return LocalGitControlDirFormat()
 
1325
 
 
1326
    def get_format_description(self):
 
1327
        return "Git Working Tree"
 
1328
 
 
1329
    def initialize(self, a_controldir, revision_id=None, from_branch=None,
 
1330
                   accelerator_tree=None, hardlink=False):
 
1331
        """See WorkingTreeFormat.initialize()."""
 
1332
        if not isinstance(a_controldir, LocalGitDir):
 
1333
            raise errors.IncompatibleFormat(self, a_controldir)
 
1334
        branch = a_controldir.open_branch(nascent_ok=True)
 
1335
        if revision_id is not None:
 
1336
            branch.set_last_revision(revision_id)
 
1337
        wt = GitWorkingTree(
 
1338
                a_controldir, a_controldir.open_repository(), branch)
 
1339
        for hook in MutableTree.hooks['post_build_tree']:
 
1340
            hook(wt)
 
1341
        return wt
 
1342
 
 
1343
 
 
1344
class InterIndexGitTree(InterGitTrees):
 
1345
    """InterTree that works between a Git revision tree and an index."""
 
1346
 
 
1347
    def __init__(self, source, target):
 
1348
        super(InterIndexGitTree, self).__init__(source, target)
 
1349
        self._index = target.index
 
1350
 
 
1351
    @classmethod
 
1352
    def is_compatible(cls, source, target):
 
1353
        from .repository import GitRevisionTree
 
1354
        return (isinstance(source, GitRevisionTree) and
 
1355
                isinstance(target, GitWorkingTree))
 
1356
 
 
1357
    def _iter_git_changes(self, want_unchanged=False, specific_files=None,
 
1358
            require_versioned=False, extra_trees=None,
 
1359
            want_unversioned=False):
 
1360
        trees = [self.source]
 
1361
        if extra_trees is not None:
 
1362
            trees.extend(extra_trees)
 
1363
        if specific_files is not None:
 
1364
            specific_files = self.target.find_related_paths_across_trees(
 
1365
                    specific_files, trees,
 
1366
                    require_versioned=require_versioned)
 
1367
        # TODO(jelmer): Restrict to specific_files, for performance reasons.
 
1368
        with self.lock_read():
 
1369
            return changes_between_git_tree_and_working_copy(
 
1370
                self.source.store, self.source.tree,
 
1371
                self.target, want_unchanged=want_unchanged,
 
1372
                want_unversioned=want_unversioned)
 
1373
 
 
1374
 
 
1375
tree.InterTree.register_optimiser(InterIndexGitTree)
 
1376
 
 
1377
 
 
1378
def changes_between_git_tree_and_working_copy(store, from_tree_sha, target,
 
1379
        want_unchanged=False, want_unversioned=False):
 
1380
    """Determine the changes between a git tree and a working tree with index.
 
1381
 
 
1382
    """
 
1383
    extras = set()
 
1384
    blobs = {}
 
1385
    # Report dirified directories to commit_tree first, so that they can be
 
1386
    # replaced with non-empty directories if they have contents.
 
1387
    dirified = []
 
1388
    target_root_path = target.abspath('.').encode(sys.getfilesystemencoding())
 
1389
    for path, index_entry in target._recurse_index_entries():
 
1390
        try:
 
1391
            live_entry = index_entry_from_path(
 
1392
                    target.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
 
1393
        except EnvironmentError as e:
 
1394
            if e.errno == errno.ENOENT:
 
1395
                # Entry was removed; keep it listed, but mark it as gone.
 
1396
                blobs[path] = (ZERO_SHA, 0)
 
1397
            elif e.errno == errno.EISDIR:
 
1398
                # Entry was turned into a directory
 
1399
                dirified.append((path, Tree().id, stat.S_IFDIR))
 
1400
                store.add_object(Tree())
 
1401
            else:
 
1402
                raise
 
1403
        else:
 
1404
            blobs[path] = (live_entry.sha, cleanup_mode(live_entry.mode))
 
1405
    if want_unversioned:
 
1406
        for e in target.extras():
 
1407
            ap = target.abspath(e)
 
1408
            st = os.lstat(ap)
 
1409
            try:
 
1410
                np, accessible = osutils.normalized_filename(e)
 
1411
            except UnicodeDecodeError:
 
1412
                raise errors.BadFilenameEncoding(
 
1413
                    e, osutils._fs_enc)
 
1414
            if stat.S_ISDIR(st.st_mode):
 
1415
                blob = Tree()
 
1416
            else:
 
1417
                blob = blob_from_path_and_stat(ap.encode('utf-8'), st)
 
1418
            store.add_object(blob)
 
1419
            np = np.encode('utf-8')
 
1420
            blobs[np] = (blob.id, cleanup_mode(st.st_mode))
 
1421
            extras.add(np)
 
1422
    to_tree_sha = commit_tree(store, dirified + [(p, s, m) for (p, (s, m)) in blobs.items()])
 
1423
    return store.tree_changes(
 
1424
        from_tree_sha, to_tree_sha, include_trees=True,
 
1425
        want_unchanged=want_unchanged, change_type_same=True), extras