1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from collections import defaultdict
25
from dulwich.ignore import (
28
from dulwich.file import GitFile, FileLocked
29
from dulwich.index import (
32
build_index_from_tree,
33
index_entry_from_path,
34
index_entry_from_stat,
40
from dulwich.object_store import (
43
from dulwich.objects import (
52
conflicts as _mod_conflicts,
54
controldir as _mod_controldir,
60
revision as _mod_revision,
62
transport as _mod_transport,
66
from ..decorators import (
69
from ..mutabletree import (
81
from .mapping import (
86
IGNORE_FILENAME = ".gitignore"
89
class GitWorkingTree(MutableGitIndexTree, workingtree.WorkingTree):
90
"""A Git working tree."""
92
def __init__(self, controldir, repo, branch):
93
MutableGitIndexTree.__init__(self)
94
basedir = controldir.root_transport.local_abspath('.')
95
self.basedir = osutils.realpath(basedir)
96
self.controldir = controldir
97
self.repository = repo
98
self.store = self.repository._git.object_store
99
self.mapping = self.repository.get_mapping()
100
self._branch = branch
101
self._transport = self.repository._git._controltransport
102
self._format = GitWorkingTreeFormat()
104
self._index_file = None
105
self.views = self._make_views()
106
self._rules_searcher = None
107
self._detect_case_handling()
110
def supports_tree_reference(self):
113
def supports_rename_tracking(self):
116
def _read_index(self):
117
self.index = Index(self.control_transport.local_abspath('index'))
118
self._index_dirty = False
121
"""Lock the repository for read operations.
123
:return: A breezy.lock.LogicalLockResult.
125
if not self._lock_mode:
126
self._lock_mode = 'r'
130
self._lock_count += 1
131
self.branch.lock_read()
132
return lock.LogicalLockResult(self.unlock)
134
def _lock_write_tree(self):
135
if not self._lock_mode:
136
self._lock_mode = 'w'
139
self._index_file = GitFile(
140
self.control_transport.local_abspath('index'), 'wb')
142
raise errors.LockContention('index')
144
elif self._lock_mode == 'r':
145
raise errors.ReadOnlyError(self)
147
self._lock_count += 1
149
def lock_tree_write(self):
150
self.branch.lock_read()
152
self._lock_write_tree()
153
return lock.LogicalLockResult(self.unlock)
154
except BaseException:
158
def lock_write(self, token=None):
159
self.branch.lock_write()
161
self._lock_write_tree()
162
return lock.LogicalLockResult(self.unlock)
163
except BaseException:
168
return self._lock_count >= 1
170
def get_physical_lock_status(self):
173
def break_lock(self):
175
self.control_transport.delete('index.lock')
176
except errors.NoSuchFile:
178
self.branch.break_lock()
180
@only_raises(errors.LockNotHeld, errors.LockBroken)
182
if not self._lock_count:
183
return lock.cant_unlock_not_held(self)
186
self._lock_count -= 1
187
if self._lock_count > 0:
189
if self._index_file is not None:
190
if self._index_dirty:
191
self._flush(self._index_file)
192
self._index_file.close()
194
# Something else already triggered a write of the index
195
# file by calling .flush()
196
self._index_file.abort()
197
self._index_file = None
198
self._lock_mode = None
206
def _detect_case_handling(self):
208
self._transport.stat(".git/cOnFiG")
209
except errors.NoSuchFile:
210
self.case_sensitive = True
212
self.case_sensitive = False
214
def merge_modified(self):
217
def set_merge_modified(self, modified_hashes):
218
raise errors.UnsupportedOperation(self.set_merge_modified, self)
220
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
221
self.set_parent_ids([p for p, t in parents_list])
223
def _set_merges_from_parent_ids(self, rhs_parent_ids):
225
merges = [self.branch.lookup_bzr_revision_id(
226
revid)[0] for revid in rhs_parent_ids]
227
except errors.NoSuchRevision as e:
228
raise errors.GhostRevisionUnusableHere(e.revision)
230
self.control_transport.put_bytes(
231
'MERGE_HEAD', b'\n'.join(merges),
232
mode=self.controldir._get_file_mode())
235
self.control_transport.delete('MERGE_HEAD')
236
except errors.NoSuchFile:
239
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
240
"""Set the parent ids to revision_ids.
242
See also set_parent_trees. This api will try to retrieve the tree data
243
for each element of revision_ids from the trees repository. If you have
244
tree data already available, it is more efficient to use
245
set_parent_trees rather than set_parent_ids. set_parent_ids is however
246
an easier API to use.
248
:param revision_ids: The revision_ids to set as the parent ids of this
249
working tree. Any of these may be ghosts.
251
with self.lock_tree_write():
252
self._check_parents_for_ghosts(
253
revision_ids, allow_leftmost_as_ghost=allow_leftmost_as_ghost)
254
for revision_id in revision_ids:
255
_mod_revision.check_not_reserved_id(revision_id)
257
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
259
if len(revision_ids) > 0:
260
self.set_last_revision(revision_ids[0])
262
self.set_last_revision(_mod_revision.NULL_REVISION)
264
self._set_merges_from_parent_ids(revision_ids[1:])
266
def get_parent_ids(self):
267
"""See Tree.get_parent_ids.
269
This implementation reads the pending merges list and last_revision
270
value and uses that to decide what the parents list should be.
272
last_rev = _mod_revision.ensure_null(self._last_revision())
273
if _mod_revision.NULL_REVISION == last_rev:
278
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
279
except errors.NoSuchFile:
282
for l in osutils.split_lines(merges_bytes):
283
revision_id = l.rstrip(b'\n')
285
self.branch.lookup_foreign_revision_id(revision_id))
288
def check_state(self):
289
"""Check that the working state is/isn't valid."""
292
def remove(self, files, verbose=False, to_file=None, keep_files=True,
294
"""Remove nominated files from the working tree metadata.
296
:param files: File paths relative to the basedir.
297
:param keep_files: If true, the files will also be kept.
298
:param force: Delete files and directories, even if they are changed
299
and even if the directories are not empty.
301
if not isinstance(files, list):
307
def backup(file_to_backup):
308
abs_path = self.abspath(file_to_backup)
309
backup_name = self.controldir._available_backup_name(
311
osutils.rename(abs_path, self.abspath(backup_name))
312
return "removed %s (but kept a copy: %s)" % (
313
file_to_backup, backup_name)
315
# Sort needed to first handle directory content before the directory
320
def recurse_directory_to_add_files(directory):
321
# Recurse directory and add all files
322
# so we can check if they have changed.
323
for parent_info, file_infos in self.walkdirs(directory):
324
for relpath, basename, kind, lstat, fileid, kind in file_infos:
325
# Is it versioned or ignored?
326
if self.is_versioned(relpath):
327
# Add nested content for deletion.
328
all_files.add(relpath)
330
# Files which are not versioned
331
# should be treated as unknown.
332
files_to_backup.append(relpath)
334
with self.lock_tree_write():
335
for filepath in files:
336
# Get file name into canonical form.
337
abspath = self.abspath(filepath)
338
filepath = self.relpath(abspath)
341
all_files.add(filepath)
342
recurse_directory_to_add_files(filepath)
344
files = list(all_files)
347
return # nothing to do
349
# Sort needed to first handle directory content before the
351
files.sort(reverse=True)
353
# Bail out if we are going to delete files we shouldn't
354
if not keep_files and not force:
355
for (file_id, path, content_change, versioned, parent_id, name,
356
kind, executable) in self.iter_changes(
357
self.basis_tree(), include_unchanged=True,
358
require_versioned=False, want_unversioned=True,
359
specific_files=files):
360
if versioned[0] is False:
361
# The record is unknown or newly added
362
files_to_backup.append(path[1])
363
files_to_backup.extend(
364
osutils.parent_directories(path[1]))
365
elif (content_change and (kind[1] is not None)
366
and osutils.is_inside_any(files, path[1])):
367
# Versioned and changed, but not deleted, and still
368
# in one of the dirs to be deleted.
369
files_to_backup.append(path[1])
370
files_to_backup.extend(
371
osutils.parent_directories(path[1]))
379
except errors.NoSuchFile:
382
abs_path = self.abspath(f)
384
# having removed it, it must be either ignored or unknown
385
if self.is_ignored(f):
389
kind_ch = osutils.kind_marker(kind)
390
to_file.write(new_status + ' ' + f + kind_ch + '\n')
392
message = "%s does not exist" % (f, )
395
if f in files_to_backup and not force:
398
if kind == 'directory':
399
osutils.rmtree(abs_path)
401
osutils.delete_any(abs_path)
402
message = "deleted %s" % (f,)
404
message = "removed %s" % (f,)
405
self._unversion_path(f)
407
# print only one message (if any) per file.
408
if message is not None:
410
self._versioned_dirs = None
412
def smart_add(self, file_list, recurse=True, action=None, save=True):
416
# expand any symlinks in the directory part, while leaving the
418
# only expanding if symlinks are supported avoids windows path bugs
419
if osutils.has_symlinks():
420
file_list = list(map(osutils.normalizepath, file_list))
422
conflicts_related = set()
423
for c in self.conflicts():
424
conflicts_related.update(c.associated_filenames())
430
def call_action(filepath, kind):
433
if action is not None:
434
parent_path = posixpath.dirname(filepath)
435
parent_id = self.path2id(parent_path)
436
parent_ie = self._get_dir_ie(parent_path, parent_id)
437
file_id = action(self, parent_ie, filepath, kind)
438
if file_id is not None:
439
raise workingtree.SettingFileIdUnsupported()
441
with self.lock_tree_write():
442
for filepath in osutils.canonical_relpaths(
443
self.basedir, file_list):
444
filepath, can_access = osutils.normalized_filename(filepath)
446
raise errors.InvalidNormalization(filepath)
448
abspath = self.abspath(filepath)
449
kind = osutils.file_kind(abspath)
450
if kind in ("file", "symlink"):
451
(index, subpath) = self._lookup_index(
452
filepath.encode('utf-8'))
456
call_action(filepath, kind)
458
self._index_add_entry(filepath, kind)
459
added.append(filepath)
460
elif kind == "directory":
461
(index, subpath) = self._lookup_index(
462
filepath.encode('utf-8'))
463
if subpath not in index:
464
call_action(filepath, kind)
466
user_dirs.append(filepath)
468
raise errors.BadFileKindError(filename=abspath, kind=kind)
469
for user_dir in user_dirs:
470
abs_user_dir = self.abspath(user_dir)
473
transport = _mod_transport.get_transport_from_path(
475
_mod_controldir.ControlDirFormat.find_format(transport)
477
except errors.NotBranchError:
479
except errors.UnsupportedFormatError:
484
trace.warning('skipping nested tree %r', abs_user_dir)
487
for name in os.listdir(abs_user_dir):
488
subp = os.path.join(user_dir, name)
489
if (self.is_control_filename(subp) or
490
self.mapping.is_special_file(subp)):
492
ignore_glob = self.is_ignored(subp)
493
if ignore_glob is not None:
494
ignored.setdefault(ignore_glob, []).append(subp)
496
abspath = self.abspath(subp)
497
kind = osutils.file_kind(abspath)
498
if kind == "directory":
499
user_dirs.append(subp)
501
(index, subpath) = self._lookup_index(
502
subp.encode('utf-8'))
506
if subp in conflicts_related:
508
call_action(subp, kind)
510
self._index_add_entry(subp, kind)
512
return added, ignored
514
def has_filename(self, filename):
515
return osutils.lexists(self.abspath(filename))
517
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
520
encoded_from_dir = self.abspath(from_dir).encode(osutils._fs_enc)
521
for (dirpath, dirnames, filenames) in os.walk(encoded_from_dir):
522
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
523
if self.controldir.is_control_filename(
524
dir_relpath.decode(osutils._fs_enc)):
526
for name in list(dirnames):
527
if self.controldir.is_control_filename(
528
name.decode(osutils._fs_enc)):
529
dirnames.remove(name)
531
relpath = os.path.join(dir_relpath, name)
534
yield relpath.decode(osutils._fs_enc)
535
except UnicodeDecodeError:
536
raise errors.BadFilenameEncoding(
537
relpath, osutils._fs_enc)
538
if not self._has_dir(relpath):
539
dirnames.remove(name)
540
for name in filenames:
541
if not self.mapping.is_special_file(name):
542
yp = os.path.join(dir_relpath, name)
544
yield yp.decode(osutils._fs_enc)
545
except UnicodeDecodeError:
546
raise errors.BadFilenameEncoding(
550
"""Yield all unversioned files in this WorkingTree.
552
with self.lock_read():
553
index_paths = set([p.decode('utf-8')
554
for p, i in self._recurse_index_entries()])
555
all_paths = set(self._iter_files_recursive(include_dirs=True))
556
for p in (all_paths - index_paths):
557
if not self._has_dir(p.encode('utf-8')):
560
def _gather_kinds(self, files, kinds):
561
"""See MutableTree._gather_kinds."""
562
with self.lock_tree_write():
563
for pos, f in enumerate(files):
564
if kinds[pos] is None:
565
fullpath = osutils.normpath(self.abspath(f))
567
kind = osutils.file_kind(fullpath)
569
if e.errno == errno.ENOENT:
570
raise errors.NoSuchFile(fullpath)
571
if (kind == 'directory' and f != '' and
572
os.path.exists(os.path.join(fullpath, '.git'))):
573
kind = 'tree-reference'
577
if self._lock_mode != 'w':
578
raise errors.NotWriteLocked(self)
579
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
580
# already in use and GitFile doesn't allow overriding the lock file
582
f = open(self.control_transport.local_abspath('index'), 'wb')
583
# Note that _flush will close the file
589
write_index_dict(shaf, self.index)
591
except BaseException:
594
self._index_dirty = False
596
def has_or_had_id(self, file_id):
597
if self.has_id(file_id):
599
if self.had_id(file_id):
603
def had_id(self, file_id):
604
path = self._basis_fileid_map.lookup_path(file_id)
606
head = self.repository._git.head()
608
# Assume no if basis is not accessible
611
root_tree = self.store[head].tree
615
tree_lookup_path(self.store.__getitem__,
616
root_tree, path.encode('utf-8'))
622
def get_file_mtime(self, path, file_id=None):
623
"""See Tree.get_file_mtime."""
625
return self._lstat(path).st_mtime
627
if e.errno == errno.ENOENT:
628
raise errors.NoSuchFile(path)
631
def is_ignored(self, filename):
632
r"""Check whether the filename matches an ignore pattern.
634
If the file is ignored, returns the pattern which caused it to
635
be ignored, otherwise None. So this can simply be used as a
636
boolean if desired."""
637
if getattr(self, '_global_ignoreglobster', None) is None:
639
ignore_globs.update(ignores.get_runtime_ignores())
640
ignore_globs.update(ignores.get_user_ignores())
641
self._global_ignoreglobster = globbing.ExceptionGlobster(
643
match = self._global_ignoreglobster.match(filename)
644
if match is not None:
647
if self.kind(filename) == 'directory':
649
except errors.NoSuchFile:
651
filename = filename.lstrip('/')
652
ignore_manager = self._get_ignore_manager()
653
ps = list(ignore_manager.find_matching(filename))
656
if not ps[-1].is_exclude:
660
def _get_ignore_manager(self):
661
ignoremanager = getattr(self, '_ignoremanager', None)
662
if ignoremanager is not None:
665
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
666
self._ignoremanager = ignore_manager
667
return ignore_manager
669
def _flush_ignore_list_cache(self):
670
self._ignoremanager = None
672
def set_last_revision(self, revid):
673
if _mod_revision.is_null(revid):
674
self.branch.set_last_revision_info(0, revid)
676
_mod_revision.check_not_reserved_id(revid)
678
self.branch.generate_revision_history(revid)
679
except errors.NoSuchRevision:
680
raise errors.GhostRevisionUnusableHere(revid)
682
def _reset_data(self):
684
head = self.repository._git.head()
686
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
688
self._basis_fileid_map = self.mapping.get_fileid_map(
689
self.store.__getitem__, self.store[head].tree)
690
self._fileid_map = self._basis_fileid_map.copy()
692
def get_file_verifier(self, path, file_id=None, stat_value=None):
693
with self.lock_read():
694
(index, subpath) = self._lookup_index(path.encode('utf-8'))
696
return ("GIT", index[subpath].sha)
698
if self._has_dir(path):
700
raise errors.NoSuchFile(path)
702
def get_file_sha1(self, path, file_id=None, stat_value=None):
703
with self.lock_read():
704
if not self.is_versioned(path):
705
raise errors.NoSuchFile(path)
706
abspath = self.abspath(path)
708
return osutils.sha_file_by_name(abspath)
710
if e.errno in (errno.EISDIR, errno.ENOENT):
714
def revision_tree(self, revid):
715
return self.repository.revision_tree(revid)
717
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
718
mode = stat_result.st_mode
719
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
721
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
722
return self.basis_tree().is_executable(path)
724
def stored_kind(self, path, file_id=None):
725
with self.lock_read():
726
encoded_path = path.encode('utf-8')
727
(index, subpath) = self._lookup_index(encoded_path)
729
return mode_kind(index[subpath].mode)
731
# Maybe it's a directory?
732
if self._has_dir(encoded_path):
734
raise errors.NoSuchFile(path)
736
def _lstat(self, path):
737
return os.lstat(self.abspath(path))
739
def _live_entry(self, path):
740
encoded_path = self.abspath(path.decode('utf-8')).encode(
742
return index_entry_from_path(encoded_path)
744
def is_executable(self, path, file_id=None):
745
with self.lock_read():
746
if getattr(self, "_supports_executable",
747
osutils.supports_executable)():
748
mode = self._lstat(path).st_mode
750
(index, subpath) = self._lookup_index(path.encode('utf-8'))
752
mode = index[subpath].mode
755
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
757
def _is_executable_from_path_and_stat(self, path, stat_result):
758
if getattr(self, "_supports_executable",
759
osutils.supports_executable)():
760
return self._is_executable_from_path_and_stat_from_stat(
763
return self._is_executable_from_path_and_stat_from_basis(
766
def list_files(self, include_root=False, from_dir=None, recursive=True):
770
fk_entries = {'directory': tree.TreeDirectory,
771
'file': tree.TreeFile,
772
'symlink': tree.TreeLink,
773
'tree-reference': tree.TreeReference}
774
with self.lock_read():
775
root_ie = self._get_dir_ie(u"", None)
776
if include_root and not from_dir:
777
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
778
dir_ids[u""] = root_ie.file_id
780
path_iterator = sorted(
781
self._iter_files_recursive(from_dir, include_dirs=True))
783
encoded_from_dir = self.abspath(from_dir).encode(
785
path_iterator = sorted(
786
[os.path.join(from_dir, name.decode(osutils._fs_enc))
787
for name in os.listdir(encoded_from_dir)
788
if not self.controldir.is_control_filename(
789
name.decode(osutils._fs_enc)) and
790
not self.mapping.is_special_file(
791
name.decode(osutils._fs_enc))])
792
for path in path_iterator:
794
encoded_path = path.encode("utf-8")
795
except UnicodeEncodeError:
796
raise errors.BadFilenameEncoding(
797
path, osutils._fs_enc)
798
(index, index_path) = self._lookup_index(encoded_path)
800
value = index[index_path]
803
kind = self.kind(path)
804
parent, name = posixpath.split(path)
805
for dir_path, dir_ie in self._add_missing_parent_ids(
808
if kind in ('directory', 'tree-reference'):
810
if self._has_dir(encoded_path):
811
ie = self._get_dir_ie(path, self.path2id(path))
814
elif self.is_ignored(path):
816
ie = fk_entries[kind]()
820
ie = fk_entries[kind]()
823
posixpath.relpath(path, from_dir), status, kind,
826
if value is not None:
827
ie = self._get_file_ie(name, path, value, dir_ids[parent])
828
yield (posixpath.relpath(path, from_dir), "V", ie.kind,
831
ie = fk_entries[kind]()
832
yield (posixpath.relpath(path, from_dir), ("I" if
833
self.is_ignored(path) else "?"), kind, None, ie)
835
def all_file_ids(self):
836
with self.lock_read():
837
ids = {u"": self.path2id("")}
838
for path in self.index:
839
if self.mapping.is_special_file(path):
841
path = path.decode("utf-8")
842
parent = posixpath.dirname(path).strip("/")
843
for e in self._add_missing_parent_ids(parent, ids):
845
ids[path] = self.path2id(path)
846
return set(ids.values())
848
def all_versioned_paths(self):
849
with self.lock_read():
851
for path in self.index:
852
if self.mapping.is_special_file(path):
854
path = path.decode("utf-8")
857
path = posixpath.dirname(path).strip("/")
863
def iter_child_entries(self, path, file_id=None):
864
with self.lock_read():
865
parent_id = self.path2id(path)
867
for item_path, value in self.index.iteritems():
868
decoded_item_path = item_path.decode('utf-8')
869
if self.mapping.is_special_file(item_path):
871
if not osutils.is_inside(path, decoded_item_path):
874
subpath = posixpath.relpath(decoded_item_path, path)
876
dirname = subpath.split('/', 1)[0]
877
file_ie = self._get_dir_ie(
878
posixpath.join(path, dirname), parent_id)
880
(unused_parent, name) = posixpath.split(decoded_item_path)
881
file_ie = self._get_file_ie(
882
name, decoded_item_path, value, parent_id)
884
if not found_any and path != u'':
885
raise errors.NoSuchFile(path)
888
with self.lock_read():
889
conflicts = _mod_conflicts.ConflictList()
890
for item_path, value in self.index.iteritems():
891
if value.flags & FLAG_STAGEMASK:
892
conflicts.append(_mod_conflicts.TextConflict(
893
item_path.decode('utf-8')))
896
def set_conflicts(self, conflicts):
898
for conflict in conflicts:
899
if conflict.typestring in ('text conflict', 'contents conflict'):
900
by_path.add(conflict.path.encode('utf-8'))
902
raise errors.UnsupportedOperation(self.set_conflicts, self)
903
with self.lock_tree_write():
904
for path in self.index:
905
self._set_conflicted(path, path in by_path)
907
def _set_conflicted(self, path, conflicted):
908
trace.mutter('change conflict: %r -> %r', path, conflicted)
909
value = self.index[path]
910
self._index_dirty = True
912
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
914
self.index[path] = (value[:9] + (value[9] & ~ FLAG_STAGEMASK, ))
916
def add_conflicts(self, new_conflicts):
917
with self.lock_tree_write():
918
for conflict in new_conflicts:
919
if conflict.typestring in ('text conflict',
920
'contents conflict'):
922
self._set_conflicted(
923
conflict.path.encode('utf-8'), True)
925
raise errors.UnsupportedOperation(
926
self.add_conflicts, self)
928
raise errors.UnsupportedOperation(self.add_conflicts, self)
930
def walkdirs(self, prefix=""):
931
"""Walk the directories of this tree.
933
returns a generator which yields items in the form:
934
((curren_directory_path, fileid),
935
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
938
This API returns a generator, which is only valid during the current
939
tree transaction - within a single lock_read or lock_write duration.
941
If the tree is not locked, it may cause an error to be raised,
942
depending on the tree implementation.
944
from bisect import bisect_left
946
disk_top = self.abspath(prefix)
947
if disk_top.endswith('/'):
948
disk_top = disk_top[:-1]
949
top_strip_len = len(disk_top) + 1
950
inventory_iterator = self._walkdirs(prefix)
951
disk_iterator = osutils.walkdirs(disk_top, prefix)
953
current_disk = next(disk_iterator)
954
disk_finished = False
956
if not (e.errno == errno.ENOENT
957
or (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
962
current_inv = next(inventory_iterator)
964
except StopIteration:
967
while not inv_finished or not disk_finished:
969
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
970
cur_disk_dir_content) = current_disk
972
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
973
cur_disk_dir_content) = ((None, None), None)
974
if not disk_finished:
975
# strip out .bzr dirs
976
if (cur_disk_dir_path_from_top[top_strip_len:] == ''
977
and len(cur_disk_dir_content) > 0):
978
# osutils.walkdirs can be made nicer -
979
# yield the path-from-prefix rather than the pathjoined
981
bzrdir_loc = bisect_left(cur_disk_dir_content,
983
if (bzrdir_loc < len(cur_disk_dir_content) and
984
self.controldir.is_control_filename(
985
cur_disk_dir_content[bzrdir_loc][0])):
986
# we dont yield the contents of, or, .bzr itself.
987
del cur_disk_dir_content[bzrdir_loc]
989
# everything is unknown
992
# everything is missing
995
direction = ((current_inv[0][0] > cur_disk_dir_relpath)
996
- (current_inv[0][0] < cur_disk_dir_relpath))
998
# disk is before inventory - unknown
999
dirblock = [(relpath, basename, kind, stat, None, None) for
1000
relpath, basename, kind, stat, top_path in
1001
cur_disk_dir_content]
1002
yield (cur_disk_dir_relpath, None), dirblock
1004
current_disk = next(disk_iterator)
1005
except StopIteration:
1006
disk_finished = True
1008
# inventory is before disk - missing.
1009
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
1010
for relpath, basename, dkind, stat, fileid, kind in
1012
yield (current_inv[0][0], current_inv[0][1]), dirblock
1014
current_inv = next(inventory_iterator)
1015
except StopIteration:
1018
# versioned present directory
1019
# merge the inventory and disk data together
1021
for relpath, subiterator in itertools.groupby(sorted(
1022
current_inv[1] + cur_disk_dir_content,
1023
key=operator.itemgetter(0)), operator.itemgetter(1)):
1024
path_elements = list(subiterator)
1025
if len(path_elements) == 2:
1026
inv_row, disk_row = path_elements
1027
# versioned, present file
1028
dirblock.append((inv_row[0],
1029
inv_row[1], disk_row[2],
1030
disk_row[3], inv_row[4],
1032
elif len(path_elements[0]) == 5:
1035
(path_elements[0][0], path_elements[0][1],
1036
path_elements[0][2], path_elements[0][3],
1038
elif len(path_elements[0]) == 6:
1039
# versioned, absent file.
1041
(path_elements[0][0], path_elements[0][1],
1042
'unknown', None, path_elements[0][4],
1043
path_elements[0][5]))
1045
raise NotImplementedError('unreachable code')
1046
yield current_inv[0], dirblock
1048
current_inv = next(inventory_iterator)
1049
except StopIteration:
1052
current_disk = next(disk_iterator)
1053
except StopIteration:
1054
disk_finished = True
1056
def _walkdirs(self, prefix=u""):
1059
prefix = prefix.encode('utf-8')
1060
per_dir = defaultdict(set)
1062
per_dir[(u'', self.get_root_id())] = set()
1064
def add_entry(path, kind):
1065
if path == b'' or not path.startswith(prefix):
1067
(dirname, child_name) = posixpath.split(path)
1068
add_entry(dirname, 'directory')
1069
dirname = dirname.decode("utf-8")
1070
dir_file_id = self.path2id(dirname)
1071
if not isinstance(value, tuple) or len(value) != 10:
1072
raise ValueError(value)
1073
per_dir[(dirname, dir_file_id)].add(
1074
(path.decode("utf-8"), child_name.decode("utf-8"),
1076
self.path2id(path.decode("utf-8")),
1078
with self.lock_read():
1079
for path, value in self.index.iteritems():
1080
if self.mapping.is_special_file(path):
1082
if not path.startswith(prefix):
1084
add_entry(path, mode_kind(value.mode))
1085
return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
1087
def get_shelf_manager(self):
1088
raise workingtree.ShelvingUnsupported()
1090
def store_uncommitted(self):
1091
raise errors.StoringUncommittedNotSupported(self)
1093
def apply_inventory_delta(self, changes):
1094
for (old_path, new_path, file_id, ie) in changes:
1095
if old_path is not None:
1096
(index, old_subpath) = self._lookup_index(
1097
old_path.encode('utf-8'))
1099
self._index_del_entry(index, old_subpath)
1103
self._versioned_dirs = None
1104
if new_path is not None and ie.kind != 'directory':
1105
if ie.kind == 'tree-reference':
1106
self._index_add_entry(
1108
reference_revision=ie.reference_revision)
1110
self._index_add_entry(new_path, ie.kind)
1113
def annotate_iter(self, path, file_id=None,
1114
default_revision=_mod_revision.CURRENT_REVISION):
1115
"""See Tree.annotate_iter
1117
This implementation will use the basis tree implementation if possible.
1118
Lines not in the basis are attributed to CURRENT_REVISION
1120
If there are pending merges, lines added by those merges will be
1121
incorrectly attributed to CURRENT_REVISION (but after committing, the
1122
attribution will be correct).
1124
with self.lock_read():
1125
maybe_file_parent_keys = []
1126
for parent_id in self.get_parent_ids():
1128
parent_tree = self.revision_tree(parent_id)
1129
except errors.NoSuchRevisionInTree:
1130
parent_tree = self.branch.repository.revision_tree(
1132
with parent_tree.lock_read():
1133
# TODO(jelmer): Use rename/copy tracker to find path name
1137
kind = parent_tree.kind(parent_path)
1138
except errors.NoSuchFile:
1141
# Note: this is slightly unnecessary, because symlinks
1142
# and directories have a "text" which is the empty
1143
# text, and we know that won't mess up annotations. But
1148
parent_tree.get_file_revision(parent_path))
1149
if parent_text_key not in maybe_file_parent_keys:
1150
maybe_file_parent_keys.append(parent_text_key)
1151
# Now we have the parents of this content
1152
from breezy.annotate import Annotator
1153
from .annotate import AnnotateProvider
1154
annotate_provider = AnnotateProvider(
1155
self.branch.repository._file_change_scanner)
1156
annotator = Annotator(annotate_provider)
1158
from breezy.graph import Graph
1159
graph = Graph(annotate_provider)
1160
heads = graph.heads(maybe_file_parent_keys)
1161
file_parent_keys = []
1162
for key in maybe_file_parent_keys:
1164
file_parent_keys.append(key)
1166
text = self.get_file_text(path)
1167
this_key = (path, default_revision)
1168
annotator.add_special_text(this_key, file_parent_keys, text)
1169
annotations = [(key[-1], line)
1170
for key, line in annotator.annotate_flat(this_key)]
1173
def _rename_one(self, from_rel, to_rel):
1174
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1176
def _build_checkout_with_index(self):
1177
build_index_from_tree(
1178
self.user_transport.local_abspath('.'),
1179
self.control_transport.local_abspath("index"),
1182
if self.branch.head is None
1183
else self.store[self.branch.head].tree)
1185
def reset_state(self, revision_ids=None):
1186
"""Reset the state of the working tree.
1188
This does a hard-reset to a last-known-good state. This is a way to
1189
fix if something got corrupted (like the .git/index file)
1191
with self.lock_tree_write():
1192
if revision_ids is not None:
1193
self.set_parent_ids(revision_ids)
1195
self._index_dirty = True
1196
if self.branch.head is not None:
1197
for entry in self.store.iter_tree_contents(
1198
self.store[self.branch.head].tree):
1199
if not validate_path(entry.path):
1202
if S_ISGITLINK(entry.mode):
1203
pass # TODO(jelmer): record and return submodule paths
1205
# Let's at least try to use the working tree file:
1207
st = self._lstat(self.abspath(
1208
entry.path.decode('utf-8')))
1210
# But if it doesn't exist, we'll make something up.
1211
obj = self.store[entry.sha]
1212
st = os.stat_result((entry.mode, 0, 0, 0,
1214
obj.as_raw_string()), 0,
1216
(index, subpath) = self._lookup_index(entry.path)
1217
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1219
def pull(self, source, overwrite=False, stop_revision=None,
1220
change_reporter=None, possible_transports=None, local=False,
1222
with self.lock_write(), source.lock_read():
1223
old_revision = self.branch.last_revision()
1224
basis_tree = self.basis_tree()
1225
count = self.branch.pull(source, overwrite, stop_revision,
1226
possible_transports=possible_transports,
1228
new_revision = self.branch.last_revision()
1229
if new_revision != old_revision:
1230
with basis_tree.lock_read():
1231
new_basis_tree = self.branch.basis_tree()
1237
change_reporter=change_reporter,
1238
show_base=show_base)
1241
def add_reference(self, sub_tree):
1242
"""Add a TreeReference to the tree, pointing at sub_tree.
1244
:param sub_tree: subtree to add.
1246
with self.lock_tree_write():
1248
sub_tree_path = self.relpath(sub_tree.basedir)
1249
except errors.PathNotChild:
1250
raise BadReferenceTarget(
1251
self, sub_tree, 'Target not inside tree.')
1253
self._add([sub_tree_path], [None], ['tree-reference'])
1255
def _read_submodule_head(self, path):
1256
return read_submodule_head(self.abspath(path))
1258
def get_reference_revision(self, path, file_id=None):
1259
hexsha = self._read_submodule_head(path)
1261
return _mod_revision.NULL_REVISION
1262
return self.branch.lookup_foreign_revision_id(hexsha)
1264
def get_nested_tree(self, path, file_id=None):
1265
return workingtree.WorkingTree.open(self.abspath(path))
1267
def _directory_is_tree_reference(self, relpath):
1268
# as a special case, if a directory contains control files then
1269
# it's a tree reference, except that the root of the tree is not
1270
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1272
def extract(self, sub_path, file_id=None, format=None):
1273
"""Extract a subtree from this tree.
1275
A new branch will be created, relative to the path for this tree.
1278
segments = osutils.splitpath(path)
1279
transport = self.branch.controldir.root_transport
1280
for name in segments:
1281
transport = transport.clone(name)
1282
transport.ensure_base()
1285
with self.lock_tree_write():
1287
branch_transport = mkdirs(sub_path)
1289
format = self.controldir.cloning_metadir()
1290
branch_transport.ensure_base()
1291
branch_bzrdir = format.initialize_on_transport(branch_transport)
1293
repo = branch_bzrdir.find_repository()
1294
except errors.NoRepositoryPresent:
1295
repo = branch_bzrdir.create_repository()
1296
if not repo.supports_rich_root():
1297
raise errors.RootNotRich()
1298
new_branch = branch_bzrdir.create_branch()
1299
new_branch.pull(self.branch)
1300
for parent_id in self.get_parent_ids():
1301
new_branch.fetch(self.branch, parent_id)
1302
tree_transport = self.controldir.root_transport.clone(sub_path)
1303
if tree_transport.base != branch_transport.base:
1304
tree_bzrdir = format.initialize_on_transport(tree_transport)
1305
tree_bzrdir.set_branch_reference(new_branch)
1307
tree_bzrdir = branch_bzrdir
1308
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1309
wt.set_parent_ids(self.get_parent_ids())
1312
def _get_check_refs(self):
1313
"""Return the references needed to perform a check of this tree.
1315
The default implementation returns no refs, and is only suitable for
1316
trees that have no local caching and can commit on ghosts at any time.
1318
:seealso: breezy.check for details about check_refs.
1322
def copy_content_into(self, tree, revision_id=None):
1323
"""Copy the current content and user files of this tree into tree."""
1324
with self.lock_read():
1325
if revision_id is None:
1326
merge.transform_tree(tree, self)
1328
# TODO now merge from tree.last_revision to revision (to
1329
# preserve user local changes)
1331
other_tree = self.revision_tree(revision_id)
1332
except errors.NoSuchRevision:
1333
other_tree = self.branch.repository.revision_tree(
1336
merge.transform_tree(tree, other_tree)
1337
if revision_id == _mod_revision.NULL_REVISION:
1340
new_parents = [revision_id]
1341
tree.set_parent_ids(new_parents)
1344
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1346
_tree_class = GitWorkingTree
1348
supports_versioned_directories = False
1350
supports_setting_file_ids = False
1352
supports_store_uncommitted = False
1354
supports_leftmost_parent_id_as_ghost = False
1356
supports_righthand_parent_id_as_ghost = False
1358
requires_normalized_unicode_filenames = True
1360
supports_merge_modified = False
1363
def _matchingcontroldir(self):
1364
from .dir import LocalGitControlDirFormat
1365
return LocalGitControlDirFormat()
1367
def get_format_description(self):
1368
return "Git Working Tree"
1370
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1371
accelerator_tree=None, hardlink=False):
1372
"""See WorkingTreeFormat.initialize()."""
1373
if not isinstance(a_controldir, LocalGitDir):
1374
raise errors.IncompatibleFormat(self, a_controldir)
1375
branch = a_controldir.open_branch(nascent_ok=True)
1376
if revision_id is not None:
1377
branch.set_last_revision(revision_id)
1378
wt = GitWorkingTree(
1379
a_controldir, a_controldir.open_repository(), branch)
1380
for hook in MutableTree.hooks['post_build_tree']: