1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from collections import defaultdict
25
from dulwich.ignore import (
28
from dulwich.file import GitFile, FileLocked
29
from dulwich.index import (
32
build_index_from_tree,
33
index_entry_from_path,
34
index_entry_from_stat,
40
from dulwich.object_store import (
43
from dulwich.objects import (
52
conflicts as _mod_conflicts,
54
controldir as _mod_controldir,
60
revision as _mod_revision,
62
transport as _mod_transport,
66
from ..decorators import (
69
from ..mutabletree import (
81
from .mapping import (
86
IGNORE_FILENAME = ".gitignore"
89
class GitWorkingTree(MutableGitIndexTree, workingtree.WorkingTree):
90
"""A Git working tree."""
92
def __init__(self, controldir, repo, branch):
93
MutableGitIndexTree.__init__(self)
94
basedir = controldir.root_transport.local_abspath('.')
95
self.basedir = osutils.realpath(basedir)
96
self.controldir = controldir
97
self.repository = repo
98
self.store = self.repository._git.object_store
99
self.mapping = self.repository.get_mapping()
100
self._branch = branch
101
self._transport = self.repository._git._controltransport
102
self._format = GitWorkingTreeFormat()
104
self._index_file = None
105
self.views = self._make_views()
106
self._rules_searcher = None
107
self._detect_case_handling()
110
def supports_tree_reference(self):
113
def supports_rename_tracking(self):
116
def _read_index(self):
117
self.index = Index(self.control_transport.local_abspath('index'))
118
self._index_dirty = False
121
"""Lock the repository for read operations.
123
:return: A breezy.lock.LogicalLockResult.
125
if not self._lock_mode:
126
self._lock_mode = 'r'
130
self._lock_count += 1
131
self.branch.lock_read()
132
return lock.LogicalLockResult(self.unlock)
134
def _lock_write_tree(self):
135
if not self._lock_mode:
136
self._lock_mode = 'w'
139
self._index_file = GitFile(
140
self.control_transport.local_abspath('index'), 'wb')
142
raise errors.LockContention('index')
144
elif self._lock_mode == 'r':
145
raise errors.ReadOnlyError(self)
147
self._lock_count += 1
149
def lock_tree_write(self):
150
self.branch.lock_read()
152
self._lock_write_tree()
153
return lock.LogicalLockResult(self.unlock)
158
def lock_write(self, token=None):
159
self.branch.lock_write()
161
self._lock_write_tree()
162
return lock.LogicalLockResult(self.unlock)
168
return self._lock_count >= 1
170
def get_physical_lock_status(self):
173
def break_lock(self):
175
self.control_transport.delete('index.lock')
176
except errors.NoSuchFile:
178
self.branch.break_lock()
180
@only_raises(errors.LockNotHeld, errors.LockBroken)
182
if not self._lock_count:
183
return lock.cant_unlock_not_held(self)
186
self._lock_count -= 1
187
if self._lock_count > 0:
189
if self._index_file is not None:
190
if self._index_dirty:
191
self._flush(self._index_file)
192
self._index_file.close()
194
# Something else already triggered a write of the index
195
# file by calling .flush()
196
self._index_file.abort()
197
self._index_file = None
198
self._lock_mode = None
206
def _detect_case_handling(self):
208
self._transport.stat(".git/cOnFiG")
209
except errors.NoSuchFile:
210
self.case_sensitive = True
212
self.case_sensitive = False
214
def merge_modified(self):
217
def set_merge_modified(self, modified_hashes):
218
raise errors.UnsupportedOperation(self.set_merge_modified, self)
220
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
221
self.set_parent_ids([p for p, t in parents_list])
223
def _set_merges_from_parent_ids(self, rhs_parent_ids):
225
merges = [self.branch.lookup_bzr_revision_id(
226
revid)[0] for revid in rhs_parent_ids]
227
except errors.NoSuchRevision as e:
228
raise errors.GhostRevisionUnusableHere(e.revision)
230
self.control_transport.put_bytes('MERGE_HEAD', b'\n'.join(merges),
231
mode=self.controldir._get_file_mode())
234
self.control_transport.delete('MERGE_HEAD')
235
except errors.NoSuchFile:
238
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
239
"""Set the parent ids to revision_ids.
241
See also set_parent_trees. This api will try to retrieve the tree data
242
for each element of revision_ids from the trees repository. If you have
243
tree data already available, it is more efficient to use
244
set_parent_trees rather than set_parent_ids. set_parent_ids is however
245
an easier API to use.
247
:param revision_ids: The revision_ids to set as the parent ids of this
248
working tree. Any of these may be ghosts.
250
with self.lock_tree_write():
251
self._check_parents_for_ghosts(revision_ids,
252
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
253
for revision_id in revision_ids:
254
_mod_revision.check_not_reserved_id(revision_id)
256
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
258
if len(revision_ids) > 0:
259
self.set_last_revision(revision_ids[0])
261
self.set_last_revision(_mod_revision.NULL_REVISION)
263
self._set_merges_from_parent_ids(revision_ids[1:])
265
def get_parent_ids(self):
266
"""See Tree.get_parent_ids.
268
This implementation reads the pending merges list and last_revision
269
value and uses that to decide what the parents list should be.
271
last_rev = _mod_revision.ensure_null(self._last_revision())
272
if _mod_revision.NULL_REVISION == last_rev:
277
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
278
except errors.NoSuchFile:
281
for l in osutils.split_lines(merges_bytes):
282
revision_id = l.rstrip(b'\n')
284
self.branch.lookup_foreign_revision_id(revision_id))
287
def check_state(self):
288
"""Check that the working state is/isn't valid."""
291
def remove(self, files, verbose=False, to_file=None, keep_files=True,
293
"""Remove nominated files from the working tree metadata.
295
:param files: File paths relative to the basedir.
296
:param keep_files: If true, the files will also be kept.
297
:param force: Delete files and directories, even if they are changed
298
and even if the directories are not empty.
300
if not isinstance(files, list):
306
def backup(file_to_backup):
307
abs_path = self.abspath(file_to_backup)
308
backup_name = self.controldir._available_backup_name(
310
osutils.rename(abs_path, self.abspath(backup_name))
311
return "removed %s (but kept a copy: %s)" % (
312
file_to_backup, backup_name)
314
# Sort needed to first handle directory content before the directory
319
def recurse_directory_to_add_files(directory):
320
# Recurse directory and add all files
321
# so we can check if they have changed.
322
for parent_info, file_infos in self.walkdirs(directory):
323
for relpath, basename, kind, lstat, fileid, kind in file_infos:
324
# Is it versioned or ignored?
325
if self.is_versioned(relpath):
326
# Add nested content for deletion.
327
all_files.add(relpath)
329
# Files which are not versioned
330
# should be treated as unknown.
331
files_to_backup.append(relpath)
333
with self.lock_tree_write():
334
for filepath in files:
335
# Get file name into canonical form.
336
abspath = self.abspath(filepath)
337
filepath = self.relpath(abspath)
340
all_files.add(filepath)
341
recurse_directory_to_add_files(filepath)
343
files = list(all_files)
346
return # nothing to do
348
# Sort needed to first handle directory content before the directory
349
files.sort(reverse=True)
351
# Bail out if we are going to delete files we shouldn't
352
if not keep_files and not force:
353
for (file_id, path, content_change, versioned, parent_id, name,
354
kind, executable) in self.iter_changes(self.basis_tree(),
355
include_unchanged=True, require_versioned=False,
356
want_unversioned=True, specific_files=files):
357
if versioned[0] == False:
358
# The record is unknown or newly added
359
files_to_backup.append(path[1])
360
files_to_backup.extend(
361
osutils.parent_directories(path[1]))
362
elif (content_change and (kind[1] is not None)
363
and osutils.is_inside_any(files, path[1])):
364
# Versioned and changed, but not deleted, and still
365
# in one of the dirs to be deleted.
366
files_to_backup.append(path[1])
367
files_to_backup.extend(
368
osutils.parent_directories(path[1]))
376
except errors.NoSuchFile:
379
abs_path = self.abspath(f)
381
# having removed it, it must be either ignored or unknown
382
if self.is_ignored(f):
386
kind_ch = osutils.kind_marker(kind)
387
to_file.write(new_status + ' ' + f + kind_ch + '\n')
389
message = "%s does not exist" % (f, )
392
if f in files_to_backup and not force:
395
if kind == 'directory':
396
osutils.rmtree(abs_path)
398
osutils.delete_any(abs_path)
399
message = "deleted %s" % (f,)
401
message = "removed %s" % (f,)
402
self._unversion_path(f)
404
# print only one message (if any) per file.
405
if message is not None:
407
self._versioned_dirs = None
409
def smart_add(self, file_list, recurse=True, action=None, save=True):
413
# expand any symlinks in the directory part, while leaving the
415
# only expanding if symlinks are supported avoids windows path bugs
416
if osutils.has_symlinks():
417
file_list = list(map(osutils.normalizepath, file_list))
419
conflicts_related = set()
420
for c in self.conflicts():
421
conflicts_related.update(c.associated_filenames())
427
def call_action(filepath, kind):
428
if action is not None:
429
parent_path = posixpath.dirname(filepath)
430
parent_id = self.path2id(parent_path)
431
parent_ie = self._get_dir_ie(parent_path, parent_id)
432
file_id = action(self, parent_ie, filepath, kind)
433
if file_id is not None:
434
raise workingtree.SettingFileIdUnsupported()
436
with self.lock_tree_write():
437
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
438
filepath, can_access = osutils.normalized_filename(filepath)
440
raise errors.InvalidNormalization(filepath)
442
abspath = self.abspath(filepath)
443
kind = osutils.file_kind(abspath)
444
if kind in ("file", "symlink"):
445
(index, subpath) = self._lookup_index(
446
filepath.encode('utf-8'))
450
call_action(filepath, kind)
452
self._index_add_entry(filepath, kind)
453
added.append(filepath)
454
elif kind == "directory":
455
(index, subpath) = self._lookup_index(
456
filepath.encode('utf-8'))
457
if subpath not in index:
458
call_action(filepath, kind)
460
user_dirs.append(filepath)
462
raise errors.BadFileKindError(filename=abspath, kind=kind)
463
for user_dir in user_dirs:
464
abs_user_dir = self.abspath(user_dir)
467
transport = _mod_transport.get_transport_from_path(
469
_mod_controldir.ControlDirFormat.find_format(transport)
471
except errors.NotBranchError:
473
except errors.UnsupportedFormatError:
478
trace.warning('skipping nested tree %r', abs_user_dir)
481
for name in os.listdir(abs_user_dir):
482
subp = os.path.join(user_dir, name)
483
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
485
ignore_glob = self.is_ignored(subp)
486
if ignore_glob is not None:
487
ignored.setdefault(ignore_glob, []).append(subp)
489
abspath = self.abspath(subp)
490
kind = osutils.file_kind(abspath)
491
if kind == "directory":
492
user_dirs.append(subp)
494
(index, subpath) = self._lookup_index(
495
subp.encode('utf-8'))
499
if subp in conflicts_related:
501
call_action(filepath, kind)
503
self._index_add_entry(subp, kind)
505
return added, ignored
507
def has_filename(self, filename):
508
return osutils.lexists(self.abspath(filename))
510
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
513
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
514
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
515
if self.controldir.is_control_filename(dir_relpath.decode(osutils._fs_enc)):
517
for name in list(dirnames):
518
if self.controldir.is_control_filename(name.decode(osutils._fs_enc)):
519
dirnames.remove(name)
521
relpath = os.path.join(dir_relpath, name)
524
yield relpath.decode(osutils._fs_enc)
525
except UnicodeDecodeError:
526
raise errors.BadFilenameEncoding(
527
relpath, osutils._fs_enc)
528
if not self._has_dir(relpath):
529
dirnames.remove(name)
530
for name in filenames:
531
if not self.mapping.is_special_file(name):
532
yp = os.path.join(dir_relpath, name)
534
yield yp.decode(osutils._fs_enc)
535
except UnicodeDecodeError:
536
raise errors.BadFilenameEncoding(
540
"""Yield all unversioned files in this WorkingTree.
542
with self.lock_read():
543
index_paths = set([p.decode('utf-8')
544
for p, i in self._recurse_index_entries()])
545
all_paths = set(self._iter_files_recursive(include_dirs=True))
546
for p in (all_paths - index_paths):
547
if not self._has_dir(p.encode('utf-8')):
550
def _gather_kinds(self, files, kinds):
551
"""See MutableTree._gather_kinds."""
552
with self.lock_tree_write():
553
for pos, f in enumerate(files):
554
if kinds[pos] is None:
555
fullpath = osutils.normpath(self.abspath(f))
557
kind = osutils.file_kind(fullpath)
559
if e.errno == errno.ENOENT:
560
raise errors.NoSuchFile(fullpath)
561
if kind == 'directory' and f != '' and os.path.exists(os.path.join(fullpath, '.git')):
562
kind = 'tree-reference'
566
if self._lock_mode != 'w':
567
raise errors.NotWriteLocked(self)
568
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
569
# already in use and GitFile doesn't allow overriding the lock file name :(
570
f = open(self.control_transport.local_abspath('index'), 'wb')
571
# Note that _flush will close the file
577
write_index_dict(shaf, self.index)
582
self._index_dirty = False
584
def has_or_had_id(self, file_id):
585
if self.has_id(file_id):
587
if self.had_id(file_id):
591
def had_id(self, file_id):
592
path = self._basis_fileid_map.lookup_path(file_id)
594
head = self.repository._git.head()
596
# Assume no if basis is not accessible
599
root_tree = self.store[head].tree
603
tree_lookup_path(self.store.__getitem__,
604
root_tree, path.encode('utf-8'))
610
def get_file_mtime(self, path, file_id=None):
611
"""See Tree.get_file_mtime."""
613
return self._lstat(path).st_mtime
615
if e.errno == errno.ENOENT:
616
raise errors.NoSuchFile(path)
619
def is_ignored(self, filename):
620
r"""Check whether the filename matches an ignore pattern.
622
If the file is ignored, returns the pattern which caused it to
623
be ignored, otherwise None. So this can simply be used as a
624
boolean if desired."""
625
if getattr(self, '_global_ignoreglobster', None) is None:
627
ignore_globs.update(ignores.get_runtime_ignores())
628
ignore_globs.update(ignores.get_user_ignores())
629
self._global_ignoreglobster = globbing.ExceptionGlobster(
631
match = self._global_ignoreglobster.match(filename)
632
if match is not None:
635
if self.kind(filename) == 'directory':
637
except errors.NoSuchFile:
639
filename = filename.lstrip('/')
640
ignore_manager = self._get_ignore_manager()
641
ps = list(ignore_manager.find_matching(filename))
644
if not ps[-1].is_exclude:
648
def _get_ignore_manager(self):
649
ignoremanager = getattr(self, '_ignoremanager', None)
650
if ignoremanager is not None:
653
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
654
self._ignoremanager = ignore_manager
655
return ignore_manager
657
def _flush_ignore_list_cache(self):
658
self._ignoremanager = None
660
def set_last_revision(self, revid):
661
if _mod_revision.is_null(revid):
662
self.branch.set_last_revision_info(0, revid)
664
_mod_revision.check_not_reserved_id(revid)
666
self.branch.generate_revision_history(revid)
667
except errors.NoSuchRevision:
668
raise errors.GhostRevisionUnusableHere(revid)
670
def _reset_data(self):
672
head = self.repository._git.head()
674
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
676
self._basis_fileid_map = self.mapping.get_fileid_map(
677
self.store.__getitem__, self.store[head].tree)
678
self._fileid_map = self._basis_fileid_map.copy()
680
def get_file_verifier(self, path, file_id=None, stat_value=None):
681
with self.lock_read():
682
(index, subpath) = self._lookup_index(path.encode('utf-8'))
684
return ("GIT", index[subpath].sha)
686
if self._has_dir(path):
688
raise errors.NoSuchFile(path)
690
def get_file_sha1(self, path, file_id=None, stat_value=None):
691
with self.lock_read():
692
if not self.is_versioned(path):
693
raise errors.NoSuchFile(path)
694
abspath = self.abspath(path)
696
return osutils.sha_file_by_name(abspath)
698
if e.errno in (errno.EISDIR, errno.ENOENT):
702
def revision_tree(self, revid):
703
return self.repository.revision_tree(revid)
705
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
706
mode = stat_result.st_mode
707
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
709
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
710
return self.basis_tree().is_executable(path)
712
def stored_kind(self, path, file_id=None):
713
with self.lock_read():
714
encoded_path = path.encode('utf-8')
715
(index, subpath) = self._lookup_index(encoded_path)
717
return mode_kind(index[subpath].mode)
719
# Maybe it's a directory?
720
if self._has_dir(encoded_path):
722
raise errors.NoSuchFile(path)
724
def _lstat(self, path):
725
return os.lstat(self.abspath(path))
727
def _live_entry(self, path):
728
return index_entry_from_path(self.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
730
def is_executable(self, path, file_id=None):
731
with self.lock_read():
732
if getattr(self, "_supports_executable", osutils.supports_executable)():
733
mode = self._lstat(path).st_mode
735
(index, subpath) = self._lookup_index(path.encode('utf-8'))
737
mode = index[subpath].mode
740
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
742
def _is_executable_from_path_and_stat(self, path, stat_result):
743
if getattr(self, "_supports_executable", osutils.supports_executable)():
744
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
746
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
748
def list_files(self, include_root=False, from_dir=None, recursive=True):
752
fk_entries = {'directory': tree.TreeDirectory,
753
'file': tree.TreeFile,
754
'symlink': tree.TreeLink,
755
'tree-reference': tree.TreeReference}
756
with self.lock_read():
757
root_ie = self._get_dir_ie(u"", None)
758
if include_root and not from_dir:
759
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
760
dir_ids[u""] = root_ie.file_id
762
path_iterator = sorted(
763
self._iter_files_recursive(from_dir, include_dirs=True))
765
path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
766
os.listdir(self.abspath(
767
from_dir).encode(osutils._fs_enc))
768
if not self.controldir.is_control_filename(name.decode(osutils._fs_enc)) and
769
not self.mapping.is_special_file(name.decode(osutils._fs_enc))])
770
for path in path_iterator:
772
encoded_path = path.encode("utf-8")
773
except UnicodeEncodeError:
774
raise errors.BadFilenameEncoding(
775
path, osutils._fs_enc)
776
(index, index_path) = self._lookup_index(encoded_path)
778
value = index[index_path]
781
kind = self.kind(path)
782
parent, name = posixpath.split(path)
783
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
785
if kind in ('directory', 'tree-reference'):
787
if self._has_dir(encoded_path):
788
ie = self._get_dir_ie(path, self.path2id(path))
791
elif self.is_ignored(path):
793
ie = fk_entries[kind]()
797
ie = fk_entries[kind]()
799
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
801
if value is not None:
802
ie = self._get_file_ie(name, path, value, dir_ids[parent])
803
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
805
ie = fk_entries[kind]()
806
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
808
def all_file_ids(self):
809
with self.lock_read():
810
ids = {u"": self.path2id("")}
811
for path in self.index:
812
if self.mapping.is_special_file(path):
814
path = path.decode("utf-8")
815
parent = posixpath.dirname(path).strip("/")
816
for e in self._add_missing_parent_ids(parent, ids):
818
ids[path] = self.path2id(path)
819
return set(ids.values())
821
def all_versioned_paths(self):
822
with self.lock_read():
824
for path in self.index:
825
if self.mapping.is_special_file(path):
827
path = path.decode("utf-8")
830
path = posixpath.dirname(path).strip("/")
836
def iter_child_entries(self, path, file_id=None):
837
encoded_path = path.encode('utf-8')
838
with self.lock_read():
839
parent_id = self.path2id(path)
841
seen_children = set()
842
for item_path, value in self.index.iteritems():
843
decoded_item_path = item_path.decode('utf-8')
844
if self.mapping.is_special_file(item_path):
846
if not osutils.is_inside(path, decoded_item_path):
849
subpath = posixpath.relpath(decoded_item_path, path)
851
dirname = subpath.split('/', 1)[0]
852
file_ie = self._get_dir_ie(
853
posixpath.join(path, dirname), parent_id)
855
(unused_parent, name) = posixpath.split(decoded_item_path)
856
file_ie = self._get_file_ie(
857
name, decoded_item_path, value, parent_id)
859
if not found_any and path != u'':
860
raise errors.NoSuchFile(path)
863
with self.lock_read():
864
conflicts = _mod_conflicts.ConflictList()
865
for item_path, value in self.index.iteritems():
866
if value.flags & FLAG_STAGEMASK:
867
conflicts.append(_mod_conflicts.TextConflict(
868
item_path.decode('utf-8')))
871
def set_conflicts(self, conflicts):
873
for conflict in conflicts:
874
if conflict.typestring in ('text conflict', 'contents conflict'):
875
by_path.add(conflict.path.encode('utf-8'))
877
raise errors.UnsupportedOperation(self.set_conflicts, self)
878
with self.lock_tree_write():
879
for path in self.index:
880
self._set_conflicted(path, path in by_path)
882
def _set_conflicted(self, path, conflicted):
883
trace.mutter('change conflict: %r -> %r', path, conflicted)
884
value = self.index[path]
885
self._index_dirty = True
887
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
889
self.index[path] = (value[:9] + (value[9] & ~ FLAG_STAGEMASK, ))
891
def add_conflicts(self, new_conflicts):
892
with self.lock_tree_write():
893
for conflict in new_conflicts:
894
if conflict.typestring in ('text conflict', 'contents conflict'):
896
self._set_conflicted(
897
conflict.path.encode('utf-8'), True)
899
raise errors.UnsupportedOperation(
900
self.add_conflicts, self)
902
raise errors.UnsupportedOperation(self.add_conflicts, self)
904
def walkdirs(self, prefix=""):
905
"""Walk the directories of this tree.
907
returns a generator which yields items in the form:
908
((curren_directory_path, fileid),
909
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
912
This API returns a generator, which is only valid during the current
913
tree transaction - within a single lock_read or lock_write duration.
915
If the tree is not locked, it may cause an error to be raised,
916
depending on the tree implementation.
918
from bisect import bisect_left
920
disk_top = self.abspath(prefix)
921
if disk_top.endswith('/'):
922
disk_top = disk_top[:-1]
923
top_strip_len = len(disk_top) + 1
924
inventory_iterator = self._walkdirs(prefix)
925
disk_iterator = osutils.walkdirs(disk_top, prefix)
927
current_disk = next(disk_iterator)
928
disk_finished = False
930
if not (e.errno == errno.ENOENT
931
or (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
936
current_inv = next(inventory_iterator)
938
except StopIteration:
941
while not inv_finished or not disk_finished:
943
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
944
cur_disk_dir_content) = current_disk
946
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
947
cur_disk_dir_content) = ((None, None), None)
948
if not disk_finished:
949
# strip out .bzr dirs
950
if (cur_disk_dir_path_from_top[top_strip_len:] == ''
951
and len(cur_disk_dir_content) > 0):
952
# osutils.walkdirs can be made nicer -
953
# yield the path-from-prefix rather than the pathjoined
955
bzrdir_loc = bisect_left(cur_disk_dir_content,
957
if (bzrdir_loc < len(cur_disk_dir_content) and
958
self.controldir.is_control_filename(
959
cur_disk_dir_content[bzrdir_loc][0])):
960
# we dont yield the contents of, or, .bzr itself.
961
del cur_disk_dir_content[bzrdir_loc]
963
# everything is unknown
966
# everything is missing
969
direction = ((current_inv[0][0] > cur_disk_dir_relpath)
970
- (current_inv[0][0] < cur_disk_dir_relpath))
972
# disk is before inventory - unknown
973
dirblock = [(relpath, basename, kind, stat, None, None) for
974
relpath, basename, kind, stat, top_path in
975
cur_disk_dir_content]
976
yield (cur_disk_dir_relpath, None), dirblock
978
current_disk = next(disk_iterator)
979
except StopIteration:
982
# inventory is before disk - missing.
983
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
984
for relpath, basename, dkind, stat, fileid, kind in
986
yield (current_inv[0][0], current_inv[0][1]), dirblock
988
current_inv = next(inventory_iterator)
989
except StopIteration:
992
# versioned present directory
993
# merge the inventory and disk data together
995
for relpath, subiterator in itertools.groupby(sorted(
996
current_inv[1] + cur_disk_dir_content,
997
key=operator.itemgetter(0)), operator.itemgetter(1)):
998
path_elements = list(subiterator)
999
if len(path_elements) == 2:
1000
inv_row, disk_row = path_elements
1001
# versioned, present file
1002
dirblock.append((inv_row[0],
1003
inv_row[1], disk_row[2],
1004
disk_row[3], inv_row[4],
1006
elif len(path_elements[0]) == 5:
1008
dirblock.append((path_elements[0][0],
1009
path_elements[0][1], path_elements[0][2],
1010
path_elements[0][3], None, None))
1011
elif len(path_elements[0]) == 6:
1012
# versioned, absent file.
1013
dirblock.append((path_elements[0][0],
1014
path_elements[0][1], 'unknown', None,
1015
path_elements[0][4], path_elements[0][5]))
1017
raise NotImplementedError('unreachable code')
1018
yield current_inv[0], dirblock
1020
current_inv = next(inventory_iterator)
1021
except StopIteration:
1024
current_disk = next(disk_iterator)
1025
except StopIteration:
1026
disk_finished = True
1028
def _walkdirs(self, prefix=u""):
1031
prefix = prefix.encode('utf-8')
1032
per_dir = defaultdict(set)
1034
per_dir[(u'', self.get_root_id())] = set()
1036
def add_entry(path, kind):
1037
if path == b'' or not path.startswith(prefix):
1039
(dirname, child_name) = posixpath.split(path)
1040
add_entry(dirname, 'directory')
1041
dirname = dirname.decode("utf-8")
1042
dir_file_id = self.path2id(dirname)
1043
if not isinstance(value, tuple) or len(value) != 10:
1044
raise ValueError(value)
1045
per_dir[(dirname, dir_file_id)].add(
1046
(path.decode("utf-8"), child_name.decode("utf-8"),
1048
self.path2id(path.decode("utf-8")),
1050
with self.lock_read():
1051
for path, value in self.index.iteritems():
1052
if self.mapping.is_special_file(path):
1054
if not path.startswith(prefix):
1056
add_entry(path, mode_kind(value.mode))
1057
return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
1059
def get_shelf_manager(self):
1060
raise workingtree.ShelvingUnsupported()
1062
def store_uncommitted(self):
1063
raise errors.StoringUncommittedNotSupported(self)
1065
def apply_inventory_delta(self, changes):
1066
for (old_path, new_path, file_id, ie) in changes:
1067
if old_path is not None:
1068
(index, old_subpath) = self._lookup_index(
1069
old_path.encode('utf-8'))
1071
self._index_del_entry(index, old_subpath)
1075
self._versioned_dirs = None
1076
if new_path is not None and ie.kind != 'directory':
1077
if ie.kind == 'tree-reference':
1078
self._index_add_entry(
1080
reference_revision=ie.reference_revision)
1082
self._index_add_entry(new_path, ie.kind)
1085
def annotate_iter(self, path, file_id=None,
1086
default_revision=_mod_revision.CURRENT_REVISION):
1087
"""See Tree.annotate_iter
1089
This implementation will use the basis tree implementation if possible.
1090
Lines not in the basis are attributed to CURRENT_REVISION
1092
If there are pending merges, lines added by those merges will be
1093
incorrectly attributed to CURRENT_REVISION (but after committing, the
1094
attribution will be correct).
1096
with self.lock_read():
1097
maybe_file_parent_keys = []
1098
for parent_id in self.get_parent_ids():
1100
parent_tree = self.revision_tree(parent_id)
1101
except errors.NoSuchRevisionInTree:
1102
parent_tree = self.branch.repository.revision_tree(
1104
with parent_tree.lock_read():
1105
# TODO(jelmer): Use rename/copy tracker to find path name in parent
1108
kind = parent_tree.kind(parent_path)
1109
except errors.NoSuchFile:
1112
# Note: this is slightly unnecessary, because symlinks and
1113
# directories have a "text" which is the empty text, and we
1114
# know that won't mess up annotations. But it seems cleaner
1118
parent_tree.get_file_revision(parent_path))
1119
if parent_text_key not in maybe_file_parent_keys:
1120
maybe_file_parent_keys.append(parent_text_key)
1121
# Now we have the parents of this content
1122
from breezy.annotate import Annotator
1123
from .annotate import AnnotateProvider
1124
annotate_provider = AnnotateProvider(
1125
self.branch.repository._file_change_scanner)
1126
annotator = Annotator(annotate_provider)
1128
from breezy.graph import Graph
1129
graph = Graph(annotate_provider)
1130
heads = graph.heads(maybe_file_parent_keys)
1131
file_parent_keys = []
1132
for key in maybe_file_parent_keys:
1134
file_parent_keys.append(key)
1136
text = self.get_file_text(path)
1137
this_key = (path, default_revision)
1138
annotator.add_special_text(this_key, file_parent_keys, text)
1139
annotations = [(key[-1], line)
1140
for key, line in annotator.annotate_flat(this_key)]
1143
def _rename_one(self, from_rel, to_rel):
1144
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1146
def _build_checkout_with_index(self):
1147
build_index_from_tree(
1148
self.user_transport.local_abspath('.'),
1149
self.control_transport.local_abspath("index"),
1151
None if self.branch.head is None else self.store[self.branch.head].tree)
1153
def reset_state(self, revision_ids=None):
1154
"""Reset the state of the working tree.
1156
This does a hard-reset to a last-known-good state. This is a way to
1157
fix if something got corrupted (like the .git/index file)
1159
with self.lock_tree_write():
1160
if revision_ids is not None:
1161
self.set_parent_ids(revision_ids)
1163
self._index_dirty = True
1164
if self.branch.head is not None:
1165
for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
1166
if not validate_path(entry.path):
1169
if S_ISGITLINK(entry.mode):
1170
pass # TODO(jelmer): record and return submodule paths
1172
# Let's at least try to use the working tree file:
1174
st = self._lstat(self.abspath(
1175
entry.path.decode('utf-8')))
1177
# But if it doesn't exist, we'll make something up.
1178
obj = self.store[entry.sha]
1179
st = os.stat_result((entry.mode, 0, 0, 0,
1181
obj.as_raw_string()), 0,
1183
(index, subpath) = self._lookup_index(entry.path)
1184
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1186
def pull(self, source, overwrite=False, stop_revision=None,
1187
change_reporter=None, possible_transports=None, local=False,
1189
with self.lock_write(), source.lock_read():
1190
old_revision = self.branch.last_revision()
1191
basis_tree = self.basis_tree()
1192
count = self.branch.pull(source, overwrite, stop_revision,
1193
possible_transports=possible_transports,
1195
new_revision = self.branch.last_revision()
1196
if new_revision != old_revision:
1197
with basis_tree.lock_read():
1198
new_basis_tree = self.branch.basis_tree()
1204
change_reporter=change_reporter,
1205
show_base=show_base)
1208
def add_reference(self, sub_tree):
1209
"""Add a TreeReference to the tree, pointing at sub_tree.
1211
:param sub_tree: subtree to add.
1213
with self.lock_tree_write():
1215
sub_tree_path = self.relpath(sub_tree.basedir)
1216
except errors.PathNotChild:
1217
raise BadReferenceTarget(
1218
self, sub_tree, 'Target not inside tree.')
1220
self._add([sub_tree_path], [None], ['tree-reference'])
1222
def _read_submodule_head(self, path):
1223
return read_submodule_head(self.abspath(path))
1225
def get_reference_revision(self, path, file_id=None):
1226
hexsha = self._read_submodule_head(path)
1228
return _mod_revision.NULL_REVISION
1229
return self.branch.lookup_foreign_revision_id(hexsha)
1231
def get_nested_tree(self, path, file_id=None):
1232
return workingtree.WorkingTree.open(self.abspath(path))
1234
def _directory_is_tree_reference(self, relpath):
1235
# as a special case, if a directory contains control files then
1236
# it's a tree reference, except that the root of the tree is not
1237
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1239
def extract(self, sub_path, file_id=None, format=None):
1240
"""Extract a subtree from this tree.
1242
A new branch will be created, relative to the path for this tree.
1245
segments = osutils.splitpath(path)
1246
transport = self.branch.controldir.root_transport
1247
for name in segments:
1248
transport = transport.clone(name)
1249
transport.ensure_base()
1252
with self.lock_tree_write():
1254
branch_transport = mkdirs(sub_path)
1256
format = self.controldir.cloning_metadir()
1257
branch_transport.ensure_base()
1258
branch_bzrdir = format.initialize_on_transport(branch_transport)
1260
repo = branch_bzrdir.find_repository()
1261
except errors.NoRepositoryPresent:
1262
repo = branch_bzrdir.create_repository()
1263
if not repo.supports_rich_root():
1264
raise errors.RootNotRich()
1265
new_branch = branch_bzrdir.create_branch()
1266
new_branch.pull(self.branch)
1267
for parent_id in self.get_parent_ids():
1268
new_branch.fetch(self.branch, parent_id)
1269
tree_transport = self.controldir.root_transport.clone(sub_path)
1270
if tree_transport.base != branch_transport.base:
1271
tree_bzrdir = format.initialize_on_transport(tree_transport)
1272
tree_bzrdir.set_branch_reference(new_branch)
1274
tree_bzrdir = branch_bzrdir
1275
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1276
wt.set_parent_ids(self.get_parent_ids())
1279
def _get_check_refs(self):
1280
"""Return the references needed to perform a check of this tree.
1282
The default implementation returns no refs, and is only suitable for
1283
trees that have no local caching and can commit on ghosts at any time.
1285
:seealso: breezy.check for details about check_refs.
1289
def copy_content_into(self, tree, revision_id=None):
1290
"""Copy the current content and user files of this tree into tree."""
1291
with self.lock_read():
1292
if revision_id is None:
1293
merge.transform_tree(tree, self)
1295
# TODO now merge from tree.last_revision to revision (to
1296
# preserve user local changes)
1298
other_tree = self.revision_tree(revision_id)
1299
except errors.NoSuchRevision:
1300
other_tree = self.branch.repository.revision_tree(
1303
merge.transform_tree(tree, other_tree)
1304
if revision_id == _mod_revision.NULL_REVISION:
1307
new_parents = [revision_id]
1308
tree.set_parent_ids(new_parents)
1311
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1313
_tree_class = GitWorkingTree
1315
supports_versioned_directories = False
1317
supports_setting_file_ids = False
1319
supports_store_uncommitted = False
1321
supports_leftmost_parent_id_as_ghost = False
1323
supports_righthand_parent_id_as_ghost = False
1325
requires_normalized_unicode_filenames = True
1327
supports_merge_modified = False
1330
def _matchingcontroldir(self):
1331
from .dir import LocalGitControlDirFormat
1332
return LocalGitControlDirFormat()
1334
def get_format_description(self):
1335
return "Git Working Tree"
1337
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1338
accelerator_tree=None, hardlink=False):
1339
"""See WorkingTreeFormat.initialize()."""
1340
if not isinstance(a_controldir, LocalGitDir):
1341
raise errors.IncompatibleFormat(self, a_controldir)
1342
branch = a_controldir.open_branch(nascent_ok=True)
1343
if revision_id is not None:
1344
branch.set_last_revision(revision_id)
1345
wt = GitWorkingTree(
1346
a_controldir, a_controldir.open_repository(), branch)
1347
for hook in MutableTree.hooks['post_build_tree']: