1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from collections import defaultdict
25
from dulwich.ignore import (
28
from dulwich.file import GitFile, FileLocked
29
from dulwich.index import (
32
build_index_from_tree,
34
index_entry_from_path,
35
index_entry_from_stat,
42
from dulwich.object_store import (
45
from dulwich.objects import (
52
from dulwich.repo import (
63
conflicts as _mod_conflicts,
65
controldir as _mod_controldir,
71
revision as _mod_revision,
73
transport as _mod_transport,
77
from ..decorators import (
83
from ..mutabletree import (
95
from .mapping import (
100
IGNORE_FILENAME = ".gitignore"
103
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
104
"""A Git working tree."""
106
def __init__(self, controldir, repo, branch):
107
MutableGitIndexTree.__init__(self)
108
basedir = controldir.root_transport.local_abspath('.')
109
self.basedir = osutils.realpath(basedir)
110
self.controldir = controldir
111
self.repository = repo
112
self.store = self.repository._git.object_store
113
self.mapping = self.repository.get_mapping()
114
self._branch = branch
115
self._transport = controldir.transport
116
self._format = GitWorkingTreeFormat()
118
self._index_file = None
119
self.views = self._make_views()
120
self._rules_searcher = None
121
self._detect_case_handling()
124
def supports_tree_reference(self):
127
def supports_rename_tracking(self):
130
def _read_index(self):
131
self.index = Index(self.control_transport.local_abspath('index'))
132
self._index_dirty = False
135
"""Lock the repository for read operations.
137
:return: A breezy.lock.LogicalLockResult.
139
if not self._lock_mode:
140
self._lock_mode = 'r'
144
self._lock_count += 1
145
self.branch.lock_read()
146
return lock.LogicalLockResult(self.unlock)
148
def _lock_write_tree(self):
149
# TODO(jelmer): Actually create index.lock
150
if not self._lock_mode:
151
self._lock_mode = 'w'
154
self._index_file = GitFile(self.control_transport.local_abspath('index'), 'wb')
156
raise errors.LockContention('index')
158
elif self._lock_mode == 'r':
159
raise errors.ReadOnlyError(self)
163
def lock_tree_write(self):
164
self.branch.lock_read()
166
self._lock_write_tree()
167
return lock.LogicalLockResult(self.unlock)
172
def lock_write(self, token=None):
173
self.branch.lock_write()
175
self._lock_write_tree()
176
return lock.LogicalLockResult(self.unlock)
182
return self._lock_count >= 1
184
def get_physical_lock_status(self):
187
def break_lock(self):
189
self.control_transport.delete('index.lock')
190
except errors.NoSuchFile:
192
self.branch.break_lock()
194
@only_raises(errors.LockNotHeld, errors.LockBroken)
196
if not self._lock_count:
197
return lock.cant_unlock_not_held(self)
200
self._lock_count -= 1
201
if self._lock_count > 0:
203
if self._index_file is not None:
204
if self._index_dirty:
205
self._flush(self._index_file)
206
self._index_file.close()
208
# Somebody else already wrote the index file
209
# by calling .flush()
210
self._index_file.abort()
211
self._index_file = None
212
self._lock_mode = None
220
def _detect_case_handling(self):
222
self._transport.stat(".git/cOnFiG")
223
except errors.NoSuchFile:
224
self.case_sensitive = True
226
self.case_sensitive = False
228
def merge_modified(self):
231
def set_merge_modified(self, modified_hashes):
232
raise errors.UnsupportedOperation(self.set_merge_modified, self)
234
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
235
self.set_parent_ids([p for p, t in parents_list])
237
def _set_merges_from_parent_ids(self, rhs_parent_ids):
239
merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
240
except errors.NoSuchRevision as e:
241
raise errors.GhostRevisionUnusableHere(e.revision)
243
self.control_transport.put_bytes('MERGE_HEAD', b'\n'.join(merges),
244
mode=self.controldir._get_file_mode())
247
self.control_transport.delete('MERGE_HEAD')
248
except errors.NoSuchFile:
251
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
252
"""Set the parent ids to revision_ids.
254
See also set_parent_trees. This api will try to retrieve the tree data
255
for each element of revision_ids from the trees repository. If you have
256
tree data already available, it is more efficient to use
257
set_parent_trees rather than set_parent_ids. set_parent_ids is however
258
an easier API to use.
260
:param revision_ids: The revision_ids to set as the parent ids of this
261
working tree. Any of these may be ghosts.
263
with self.lock_tree_write():
264
self._check_parents_for_ghosts(revision_ids,
265
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
266
for revision_id in revision_ids:
267
_mod_revision.check_not_reserved_id(revision_id)
269
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
271
if len(revision_ids) > 0:
272
self.set_last_revision(revision_ids[0])
274
self.set_last_revision(_mod_revision.NULL_REVISION)
276
self._set_merges_from_parent_ids(revision_ids[1:])
278
def get_parent_ids(self):
279
"""See Tree.get_parent_ids.
281
This implementation reads the pending merges list and last_revision
282
value and uses that to decide what the parents list should be.
284
last_rev = _mod_revision.ensure_null(self._last_revision())
285
if _mod_revision.NULL_REVISION == last_rev:
290
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
291
except errors.NoSuchFile:
294
for l in osutils.split_lines(merges_bytes):
295
revision_id = l.rstrip(b'\n')
296
parents.append(self.branch.lookup_foreign_revision_id(revision_id))
299
def check_state(self):
300
"""Check that the working state is/isn't valid."""
303
def remove(self, files, verbose=False, to_file=None, keep_files=True,
305
"""Remove nominated files from the working tree metadata.
307
:param files: File paths relative to the basedir.
308
:param keep_files: If true, the files will also be kept.
309
:param force: Delete files and directories, even if they are changed
310
and even if the directories are not empty.
312
if not isinstance(files, list):
318
def backup(file_to_backup):
319
abs_path = self.abspath(file_to_backup)
320
backup_name = self.controldir._available_backup_name(file_to_backup)
321
osutils.rename(abs_path, self.abspath(backup_name))
322
return "removed %s (but kept a copy: %s)" % (
323
file_to_backup, backup_name)
325
# Sort needed to first handle directory content before the directory
330
def recurse_directory_to_add_files(directory):
331
# Recurse directory and add all files
332
# so we can check if they have changed.
333
for parent_info, file_infos in self.walkdirs(directory):
334
for relpath, basename, kind, lstat, fileid, kind in file_infos:
335
# Is it versioned or ignored?
336
if self.is_versioned(relpath):
337
# Add nested content for deletion.
338
all_files.add(relpath)
340
# Files which are not versioned
341
# should be treated as unknown.
342
files_to_backup.append(relpath)
344
with self.lock_tree_write():
345
for filepath in files:
346
# Get file name into canonical form.
347
abspath = self.abspath(filepath)
348
filepath = self.relpath(abspath)
351
all_files.add(filepath)
352
recurse_directory_to_add_files(filepath)
354
files = list(all_files)
357
return # nothing to do
359
# Sort needed to first handle directory content before the directory
360
files.sort(reverse=True)
362
# Bail out if we are going to delete files we shouldn't
363
if not keep_files and not force:
364
for (file_id, path, content_change, versioned, parent_id, name,
365
kind, executable) in self.iter_changes(self.basis_tree(),
366
include_unchanged=True, require_versioned=False,
367
want_unversioned=True, specific_files=files):
368
if versioned[0] == False:
369
# The record is unknown or newly added
370
files_to_backup.append(path[1])
371
files_to_backup.extend(osutils.parent_directories(path[1]))
372
elif (content_change and (kind[1] is not None) and
373
osutils.is_inside_any(files, path[1])):
374
# Versioned and changed, but not deleted, and still
375
# in one of the dirs to be deleted.
376
files_to_backup.append(path[1])
377
files_to_backup.extend(osutils.parent_directories(path[1]))
385
except errors.NoSuchFile:
388
abs_path = self.abspath(f)
390
# having removed it, it must be either ignored or unknown
391
if self.is_ignored(f):
395
kind_ch = osutils.kind_marker(kind)
396
to_file.write(new_status + ' ' + f + kind_ch + '\n')
398
message = "%s does not exist" % (f, )
401
if f in files_to_backup and not force:
404
if kind == 'directory':
405
osutils.rmtree(abs_path)
407
osutils.delete_any(abs_path)
408
message = "deleted %s" % (f,)
410
message = "removed %s" % (f,)
411
self._unversion_path(f)
413
# print only one message (if any) per file.
414
if message is not None:
416
self._versioned_dirs = None
418
def smart_add(self, file_list, recurse=True, action=None, save=True):
422
# expand any symlinks in the directory part, while leaving the
424
# only expanding if symlinks are supported avoids windows path bugs
425
if osutils.has_symlinks():
426
file_list = list(map(osutils.normalizepath, file_list))
428
conflicts_related = set()
429
for c in self.conflicts():
430
conflicts_related.update(c.associated_filenames())
435
def call_action(filepath, kind):
436
if action is not None:
437
parent_path = posixpath.dirname(filepath)
438
parent_id = self.path2id(parent_path)
439
parent_ie = self._get_dir_ie(parent_path, parent_id)
440
file_id = action(self, parent_ie, filepath, kind)
441
if file_id is not None:
442
raise workingtree.SettingFileIdUnsupported()
444
with self.lock_tree_write():
445
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
446
filepath, can_access = osutils.normalized_filename(filepath)
448
raise errors.InvalidNormalization(filepath)
450
abspath = self.abspath(filepath)
451
kind = osutils.file_kind(abspath)
452
if kind in ("file", "symlink"):
453
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
457
call_action(filepath, kind)
459
self._index_add_entry(filepath, kind)
460
added.append(filepath)
461
elif kind == "directory":
462
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
463
if subpath not in index:
464
call_action(filepath, kind)
466
user_dirs.append(filepath)
468
raise errors.BadFileKindError(filename=abspath, kind=kind)
469
for user_dir in user_dirs:
470
abs_user_dir = self.abspath(user_dir)
473
transport = _mod_transport.get_transport_from_path(abs_user_dir)
474
_mod_controldir.ControlDirFormat.find_format(transport)
476
except errors.NotBranchError:
478
except errors.UnsupportedFormatError:
483
trace.warning('skipping nested tree %r', abs_user_dir)
486
for name in os.listdir(abs_user_dir):
487
subp = os.path.join(user_dir, name)
488
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
490
ignore_glob = self.is_ignored(subp)
491
if ignore_glob is not None:
492
ignored.setdefault(ignore_glob, []).append(subp)
494
abspath = self.abspath(subp)
495
kind = osutils.file_kind(abspath)
496
if kind == "directory":
497
user_dirs.append(subp)
499
if subp in self.index:
502
if subp in conflicts_related:
504
call_action(filepath, kind)
506
self._index_add_entry(subp, kind)
508
return added, ignored
510
def has_filename(self, filename):
511
return osutils.lexists(self.abspath(filename))
513
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
516
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
517
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
518
if self.controldir.is_control_filename(dir_relpath.decode(osutils._fs_enc)):
520
for name in list(dirnames):
521
if self.controldir.is_control_filename(name.decode(osutils._fs_enc)):
522
dirnames.remove(name)
524
relpath = os.path.join(dir_relpath, name)
527
yield relpath.decode(osutils._fs_enc)
528
except UnicodeDecodeError:
529
raise errors.BadFilenameEncoding(
530
relpath, osutils._fs_enc)
531
if not self._has_dir(relpath):
532
dirnames.remove(name)
533
for name in filenames:
534
if not self.mapping.is_special_file(name):
535
yp = os.path.join(dir_relpath, name)
537
yield yp.decode(osutils._fs_enc)
538
except UnicodeDecodeError:
539
raise errors.BadFilenameEncoding(
543
"""Yield all unversioned files in this WorkingTree.
545
with self.lock_read():
546
index_paths = set([p.decode('utf-8') for p, i in self._recurse_index_entries()])
547
all_paths = set(self._iter_files_recursive(include_dirs=True))
548
for p in (all_paths - index_paths):
549
if not self._has_dir(p.encode('utf-8')):
552
def _gather_kinds(self, files, kinds):
553
"""See MutableTree._gather_kinds."""
554
with self.lock_tree_write():
555
for pos, f in enumerate(files):
556
if kinds[pos] is None:
557
fullpath = osutils.normpath(self.abspath(f))
559
kind = osutils.file_kind(fullpath)
561
if e.errno == errno.ENOENT:
562
raise errors.NoSuchFile(fullpath)
563
if kind == 'directory' and f != '' and os.path.exists(os.path.join(fullpath, '.git')):
564
kind = 'tree-reference'
568
if self._lock_mode != 'w':
569
raise errors.NotWriteLocked(self)
570
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
571
# already in use and GitFile doesn't allow overriding the lock file name :(
572
f = open(self.control_transport.local_abspath('index'), 'wb')
573
# Note that _flush will close the file
579
write_index_dict(shaf, self.index)
584
self._index_dirty = False
586
def has_or_had_id(self, file_id):
587
if self.has_id(file_id):
589
if self.had_id(file_id):
593
def had_id(self, file_id):
594
path = self._basis_fileid_map.lookup_path(file_id)
596
head = self.repository._git.head()
598
# Assume no if basis is not accessible
601
root_tree = self.store[head].tree
605
tree_lookup_path(self.store.__getitem__, root_tree, path.encode('utf-8'))
611
def get_file_mtime(self, path, file_id=None):
612
"""See Tree.get_file_mtime."""
614
return self._lstat(path).st_mtime
616
if e.errno == errno.ENOENT:
617
raise errors.NoSuchFile(path)
620
def is_ignored(self, filename):
621
r"""Check whether the filename matches an ignore pattern.
623
If the file is ignored, returns the pattern which caused it to
624
be ignored, otherwise None. So this can simply be used as a
625
boolean if desired."""
626
if getattr(self, '_global_ignoreglobster', None) is None:
628
ignore_globs.update(ignores.get_runtime_ignores())
629
ignore_globs.update(ignores.get_user_ignores())
630
self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
631
match = self._global_ignoreglobster.match(filename)
632
if match is not None:
635
if self.kind(filename) == 'directory':
637
except errors.NoSuchFile:
639
filename = filename.lstrip('/')
640
ignore_manager = self._get_ignore_manager()
641
ps = list(ignore_manager.find_matching(filename))
644
if not ps[-1].is_exclude:
648
def _get_ignore_manager(self):
649
ignoremanager = getattr(self, '_ignoremanager', None)
650
if ignoremanager is not None:
653
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
654
self._ignoremanager = ignore_manager
655
return ignore_manager
657
def _flush_ignore_list_cache(self):
658
self._ignoremanager = None
660
def set_last_revision(self, revid):
661
if _mod_revision.is_null(revid):
662
self.branch.set_last_revision_info(0, revid)
664
_mod_revision.check_not_reserved_id(revid)
666
self.branch.generate_revision_history(revid)
667
except errors.NoSuchRevision:
668
raise errors.GhostRevisionUnusableHere(revid)
670
def _reset_data(self):
672
head = self.repository._git.head()
674
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
676
self._basis_fileid_map = self.mapping.get_fileid_map(
677
self.store.__getitem__, self.store[head].tree)
678
self._fileid_map = self._basis_fileid_map.copy()
680
def get_file_verifier(self, path, file_id=None, stat_value=None):
681
with self.lock_read():
682
(index, subpath) = self._lookup_index(path.encode('utf-8'))
684
return ("GIT", index[subpath].sha)
686
if self._has_dir(path):
688
raise errors.NoSuchFile(path)
690
def get_file_sha1(self, path, file_id=None, stat_value=None):
691
with self.lock_read():
692
if not self.is_versioned(path):
693
raise errors.NoSuchFile(path)
694
abspath = self.abspath(path)
696
return osutils.sha_file_by_name(abspath)
698
if e.errno in (errno.EISDIR, errno.ENOENT):
702
def revision_tree(self, revid):
703
return self.repository.revision_tree(revid)
705
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
706
mode = stat_result.st_mode
707
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
709
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
710
return self.basis_tree().is_executable(path)
712
def stored_kind(self, path, file_id=None):
713
with self.lock_read():
714
encoded_path = path.encode('utf-8')
715
(index, subpath) = self._lookup_index(encoded_path)
717
return mode_kind(index[subpath].mode)
719
# Maybe it's a directory?
720
if self._has_dir(encoded_path):
722
raise errors.NoSuchFile(path)
724
def _lstat(self, path):
725
return os.lstat(self.abspath(path))
727
def _live_entry(self, path):
728
return index_entry_from_path(self.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
730
def is_executable(self, path, file_id=None):
731
with self.lock_read():
732
if getattr(self, "_supports_executable", osutils.supports_executable)():
733
mode = self._lstat(path).st_mode
735
(index, subpath) = self._lookup_index(path.encode('utf-8'))
737
mode = index[subpath].mode
740
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
742
def _is_executable_from_path_and_stat(self, path, stat_result):
743
if getattr(self, "_supports_executable", osutils.supports_executable)():
744
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
746
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
748
def list_files(self, include_root=False, from_dir=None, recursive=True):
752
fk_entries = {'directory': tree.TreeDirectory,
753
'file': tree.TreeFile,
754
'symlink': tree.TreeLink,
755
'tree-reference': tree.TreeReference}
756
with self.lock_read():
757
root_ie = self._get_dir_ie(u"", None)
758
if include_root and not from_dir:
759
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
760
dir_ids[u""] = root_ie.file_id
762
path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
764
path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
765
os.listdir(self.abspath(from_dir).encode(osutils._fs_enc))
766
if not self.controldir.is_control_filename(name.decode(osutils._fs_enc))
767
and not self.mapping.is_special_file(name.decode(osutils._fs_enc))])
768
for path in path_iterator:
770
encoded_path = path.encode("utf-8")
771
except UnicodeEncodeError:
772
raise errors.BadFilenameEncoding(
773
path, osutils._fs_enc)
774
(index, index_path) = self._lookup_index(encoded_path)
776
value = index[index_path]
779
kind = self.kind(path)
780
parent, name = posixpath.split(path)
781
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
783
if kind in ('directory', 'tree-reference'):
785
if self._has_dir(encoded_path):
786
ie = self._get_dir_ie(path, self.path2id(path))
789
elif self.is_ignored(path):
791
ie = fk_entries[kind]()
795
ie = fk_entries[kind]()
797
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
799
if value is not None:
800
ie = self._get_file_ie(name, path, value, dir_ids[parent])
801
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
803
ie = fk_entries[kind]()
804
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
806
def all_file_ids(self):
807
with self.lock_read():
808
ids = {u"": self.path2id("")}
809
for path in self.index:
810
if self.mapping.is_special_file(path):
812
path = path.decode("utf-8")
813
parent = posixpath.dirname(path).strip("/")
814
for e in self._add_missing_parent_ids(parent, ids):
816
ids[path] = self.path2id(path)
817
return set(ids.values())
819
def all_versioned_paths(self):
820
with self.lock_read():
822
for path in self.index:
823
if self.mapping.is_special_file(path):
825
path = path.decode("utf-8")
828
path = posixpath.dirname(path).strip("/")
834
def iter_child_entries(self, path, file_id=None):
835
encoded_path = path.encode('utf-8')
836
with self.lock_read():
837
parent_id = self.path2id(path)
839
seen_children = set()
840
for item_path, value in self.index.iteritems():
841
decoded_item_path = item_path.decode('utf-8')
842
if self.mapping.is_special_file(item_path):
844
if not osutils.is_inside(path, decoded_item_path):
847
subpath = posixpath.relpath(decoded_item_path, path)
849
dirname = subpath.split('/', 1)[0]
850
file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
852
(unused_parent, name) = posixpath.split(decoded_item_path)
853
file_ie = self._get_file_ie(
854
name, decoded_item_path, value, parent_id)
856
if not found_any and path != u'':
857
raise errors.NoSuchFile(path)
860
with self.lock_read():
861
conflicts = _mod_conflicts.ConflictList()
862
for item_path, value in self.index.iteritems():
863
if value.flags & FLAG_STAGEMASK:
864
conflicts.append(_mod_conflicts.TextConflict(item_path.decode('utf-8')))
867
def set_conflicts(self, conflicts):
869
for conflict in conflicts:
870
if conflict.typestring in ('text conflict', 'contents conflict'):
871
by_path.add(conflict.path.encode('utf-8'))
873
raise errors.UnsupportedOperation(self.set_conflicts, self)
874
with self.lock_tree_write():
875
for path in self.index:
876
self._set_conflicted(path, path in by_path)
878
def _set_conflicted(self, path, conflicted):
879
trace.mutter('change conflict: %r -> %r', path, conflicted)
880
value = self.index[path]
881
self._index_dirty = True
883
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
885
self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
887
def add_conflicts(self, new_conflicts):
888
with self.lock_tree_write():
889
for conflict in new_conflicts:
890
if conflict.typestring in ('text conflict', 'contents conflict'):
892
self._set_conflicted(conflict.path.encode('utf-8'), True)
894
raise errors.UnsupportedOperation(self.add_conflicts, self)
896
raise errors.UnsupportedOperation(self.add_conflicts, self)
898
def walkdirs(self, prefix=""):
899
"""Walk the directories of this tree.
901
returns a generator which yields items in the form:
902
((curren_directory_path, fileid),
903
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
906
This API returns a generator, which is only valid during the current
907
tree transaction - within a single lock_read or lock_write duration.
909
If the tree is not locked, it may cause an error to be raised,
910
depending on the tree implementation.
912
from bisect import bisect_left
914
disk_top = self.abspath(prefix)
915
if disk_top.endswith('/'):
916
disk_top = disk_top[:-1]
917
top_strip_len = len(disk_top) + 1
918
inventory_iterator = self._walkdirs(prefix)
919
disk_iterator = osutils.walkdirs(disk_top, prefix)
921
current_disk = next(disk_iterator)
922
disk_finished = False
924
if not (e.errno == errno.ENOENT or
925
(sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
930
current_inv = next(inventory_iterator)
932
except StopIteration:
935
while not inv_finished or not disk_finished:
937
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
938
cur_disk_dir_content) = current_disk
940
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
941
cur_disk_dir_content) = ((None, None), None)
942
if not disk_finished:
943
# strip out .bzr dirs
944
if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
945
len(cur_disk_dir_content) > 0):
946
# osutils.walkdirs can be made nicer -
947
# yield the path-from-prefix rather than the pathjoined
949
bzrdir_loc = bisect_left(cur_disk_dir_content,
951
if (bzrdir_loc < len(cur_disk_dir_content)
952
and self.controldir.is_control_filename(
953
cur_disk_dir_content[bzrdir_loc][0])):
954
# we dont yield the contents of, or, .bzr itself.
955
del cur_disk_dir_content[bzrdir_loc]
957
# everything is unknown
960
# everything is missing
963
direction = ((current_inv[0][0] > cur_disk_dir_relpath) -
964
(current_inv[0][0] < cur_disk_dir_relpath))
966
# disk is before inventory - unknown
967
dirblock = [(relpath, basename, kind, stat, None, None) for
968
relpath, basename, kind, stat, top_path in
969
cur_disk_dir_content]
970
yield (cur_disk_dir_relpath, None), dirblock
972
current_disk = next(disk_iterator)
973
except StopIteration:
976
# inventory is before disk - missing.
977
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
978
for relpath, basename, dkind, stat, fileid, kind in
980
yield (current_inv[0][0], current_inv[0][1]), dirblock
982
current_inv = next(inventory_iterator)
983
except StopIteration:
986
# versioned present directory
987
# merge the inventory and disk data together
989
for relpath, subiterator in itertools.groupby(sorted(
990
current_inv[1] + cur_disk_dir_content,
991
key=operator.itemgetter(0)), operator.itemgetter(1)):
992
path_elements = list(subiterator)
993
if len(path_elements) == 2:
994
inv_row, disk_row = path_elements
995
# versioned, present file
996
dirblock.append((inv_row[0],
997
inv_row[1], disk_row[2],
998
disk_row[3], inv_row[4],
1000
elif len(path_elements[0]) == 5:
1002
dirblock.append((path_elements[0][0],
1003
path_elements[0][1], path_elements[0][2],
1004
path_elements[0][3], None, None))
1005
elif len(path_elements[0]) == 6:
1006
# versioned, absent file.
1007
dirblock.append((path_elements[0][0],
1008
path_elements[0][1], 'unknown', None,
1009
path_elements[0][4], path_elements[0][5]))
1011
raise NotImplementedError('unreachable code')
1012
yield current_inv[0], dirblock
1014
current_inv = next(inventory_iterator)
1015
except StopIteration:
1018
current_disk = next(disk_iterator)
1019
except StopIteration:
1020
disk_finished = True
1022
def _walkdirs(self, prefix=u""):
1025
prefix = prefix.encode('utf-8')
1026
per_dir = defaultdict(set)
1028
per_dir[(u'', self.get_root_id())] = set()
1029
def add_entry(path, kind):
1030
if path == b'' or not path.startswith(prefix):
1032
(dirname, child_name) = posixpath.split(path)
1033
add_entry(dirname, 'directory')
1034
dirname = dirname.decode("utf-8")
1035
dir_file_id = self.path2id(dirname)
1036
if not isinstance(value, tuple) or len(value) != 10:
1037
raise ValueError(value)
1038
per_dir[(dirname, dir_file_id)].add(
1039
(path.decode("utf-8"), child_name.decode("utf-8"),
1041
self.path2id(path.decode("utf-8")),
1043
with self.lock_read():
1044
for path, value in self.index.iteritems():
1045
if self.mapping.is_special_file(path):
1047
if not path.startswith(prefix):
1049
add_entry(path, mode_kind(value.mode))
1050
return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
1052
def get_shelf_manager(self):
1053
raise workingtree.ShelvingUnsupported()
1055
def store_uncommitted(self):
1056
raise errors.StoringUncommittedNotSupported(self)
1058
def apply_inventory_delta(self, changes):
1059
for (old_path, new_path, file_id, ie) in changes:
1060
if old_path is not None:
1061
(index, old_subpath) = self._lookup_index(old_path.encode('utf-8'))
1063
self._index_del_entry(index, old_subpath)
1067
self._versioned_dirs = None
1068
if new_path is not None and ie.kind != 'directory':
1069
if ie.kind == 'tree-reference':
1070
self._index_add_entry(
1072
reference_revision=ie.reference_revision)
1074
self._index_add_entry(new_path, ie.kind)
1077
def annotate_iter(self, path, file_id=None,
1078
default_revision=_mod_revision.CURRENT_REVISION):
1079
"""See Tree.annotate_iter
1081
This implementation will use the basis tree implementation if possible.
1082
Lines not in the basis are attributed to CURRENT_REVISION
1084
If there are pending merges, lines added by those merges will be
1085
incorrectly attributed to CURRENT_REVISION (but after committing, the
1086
attribution will be correct).
1088
with self.lock_read():
1089
maybe_file_parent_keys = []
1090
for parent_id in self.get_parent_ids():
1092
parent_tree = self.revision_tree(parent_id)
1093
except errors.NoSuchRevisionInTree:
1094
parent_tree = self.branch.repository.revision_tree(
1096
with parent_tree.lock_read():
1097
# TODO(jelmer): Use rename/copy tracker to find path name in parent
1100
kind = parent_tree.kind(parent_path)
1101
except errors.NoSuchFile:
1104
# Note: this is slightly unnecessary, because symlinks and
1105
# directories have a "text" which is the empty text, and we
1106
# know that won't mess up annotations. But it seems cleaner
1110
parent_tree.get_file_revision(parent_path))
1111
if parent_text_key not in maybe_file_parent_keys:
1112
maybe_file_parent_keys.append(parent_text_key)
1113
# Now we have the parents of this content
1114
from breezy.annotate import Annotator
1115
from .annotate import AnnotateProvider
1116
annotate_provider = AnnotateProvider(
1117
self.branch.repository._file_change_scanner)
1118
annotator = Annotator(annotate_provider)
1120
from breezy.graph import Graph
1121
graph = Graph(annotate_provider)
1122
heads = graph.heads(maybe_file_parent_keys)
1123
file_parent_keys = []
1124
for key in maybe_file_parent_keys:
1126
file_parent_keys.append(key)
1128
text = self.get_file_text(path)
1129
this_key = (path, default_revision)
1130
annotator.add_special_text(this_key, file_parent_keys, text)
1131
annotations = [(key[-1], line)
1132
for key, line in annotator.annotate_flat(this_key)]
1135
def _rename_one(self, from_rel, to_rel):
1136
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1138
def _build_checkout_with_index(self):
1139
build_index_from_tree(
1140
self.user_transport.local_abspath('.'),
1141
self.control_transport.local_abspath("index"),
1143
None if self.branch.head is None else self.store[self.branch.head].tree)
1145
def reset_state(self, revision_ids=None):
1146
"""Reset the state of the working tree.
1148
This does a hard-reset to a last-known-good state. This is a way to
1149
fix if something got corrupted (like the .git/index file)
1151
with self.lock_tree_write():
1152
if revision_ids is not None:
1153
self.set_parent_ids(revision_ids)
1155
self._index_dirty = True
1156
if self.branch.head is not None:
1157
for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
1158
if not validate_path(entry.path):
1161
if S_ISGITLINK(entry.mode):
1162
pass # TODO(jelmer): record and return submodule paths
1164
# Let's at least try to use the working tree file:
1166
st = self._lstat(self.abspath(entry.path.decode('utf-8')))
1168
# But if it doesn't exist, we'll make something up.
1169
obj = self.store[entry.sha]
1170
st = os.stat_result((entry.mode, 0, 0, 0,
1171
0, 0, len(obj.as_raw_string()), 0,
1173
(index, subpath) = self._lookup_index(entry.path)
1174
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1176
def pull(self, source, overwrite=False, stop_revision=None,
1177
change_reporter=None, possible_transports=None, local=False,
1179
with self.lock_write(), source.lock_read():
1180
old_revision = self.branch.last_revision()
1181
basis_tree = self.basis_tree()
1182
count = self.branch.pull(source, overwrite, stop_revision,
1183
possible_transports=possible_transports,
1185
new_revision = self.branch.last_revision()
1186
if new_revision != old_revision:
1187
with basis_tree.lock_read():
1188
new_basis_tree = self.branch.basis_tree()
1194
change_reporter=change_reporter,
1195
show_base=show_base)
1198
def add_reference(self, sub_tree):
1199
"""Add a TreeReference to the tree, pointing at sub_tree.
1201
:param sub_tree: subtree to add.
1203
with self.lock_tree_write():
1205
sub_tree_path = self.relpath(sub_tree.basedir)
1206
except errors.PathNotChild:
1207
raise BadReferenceTarget(
1208
self, sub_tree, 'Target not inside tree.')
1210
self._add([sub_tree_path], [None], ['tree-reference'])
1212
def _read_submodule_head(self, path):
1213
return read_submodule_head(self.abspath(path))
1215
def get_reference_revision(self, path, file_id=None):
1216
hexsha = self._read_submodule_head(path)
1218
return _mod_revision.NULL_REVISION
1219
return self.branch.lookup_foreign_revision_id(hexsha)
1221
def get_nested_tree(self, path, file_id=None):
1222
return workingtree.WorkingTree.open(self.abspath(path))
1224
def _directory_is_tree_reference(self, relpath):
1225
# as a special case, if a directory contains control files then
1226
# it's a tree reference, except that the root of the tree is not
1227
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1229
def extract(self, sub_path, file_id=None, format=None):
1230
"""Extract a subtree from this tree.
1232
A new branch will be created, relative to the path for this tree.
1235
segments = osutils.splitpath(path)
1236
transport = self.branch.controldir.root_transport
1237
for name in segments:
1238
transport = transport.clone(name)
1239
transport.ensure_base()
1242
with self.lock_tree_write():
1244
branch_transport = mkdirs(sub_path)
1246
format = self.controldir.cloning_metadir()
1247
branch_transport.ensure_base()
1248
branch_bzrdir = format.initialize_on_transport(branch_transport)
1250
repo = branch_bzrdir.find_repository()
1251
except errors.NoRepositoryPresent:
1252
repo = branch_bzrdir.create_repository()
1253
if not repo.supports_rich_root():
1254
raise errors.RootNotRich()
1255
new_branch = branch_bzrdir.create_branch()
1256
new_branch.pull(self.branch)
1257
for parent_id in self.get_parent_ids():
1258
new_branch.fetch(self.branch, parent_id)
1259
tree_transport = self.controldir.root_transport.clone(sub_path)
1260
if tree_transport.base != branch_transport.base:
1261
tree_bzrdir = format.initialize_on_transport(tree_transport)
1262
tree_bzrdir.set_branch_reference(new_branch)
1264
tree_bzrdir = branch_bzrdir
1265
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1266
wt.set_parent_ids(self.get_parent_ids())
1269
def _get_check_refs(self):
1270
"""Return the references needed to perform a check of this tree.
1272
The default implementation returns no refs, and is only suitable for
1273
trees that have no local caching and can commit on ghosts at any time.
1275
:seealso: breezy.check for details about check_refs.
1279
def copy_content_into(self, tree, revision_id=None):
1280
"""Copy the current content and user files of this tree into tree."""
1281
with self.lock_read():
1282
if revision_id is None:
1283
merge.transform_tree(tree, self)
1285
# TODO now merge from tree.last_revision to revision (to
1286
# preserve user local changes)
1288
other_tree = self.revision_tree(revision_id)
1289
except errors.NoSuchRevision:
1290
other_tree = self.branch.repository.revision_tree(
1293
merge.transform_tree(tree, other_tree)
1294
if revision_id == _mod_revision.NULL_REVISION:
1297
new_parents = [revision_id]
1298
tree.set_parent_ids(new_parents)
1301
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1303
_tree_class = GitWorkingTree
1305
supports_versioned_directories = False
1307
supports_setting_file_ids = False
1309
supports_store_uncommitted = False
1311
supports_leftmost_parent_id_as_ghost = False
1313
supports_righthand_parent_id_as_ghost = False
1315
requires_normalized_unicode_filenames = True
1317
supports_merge_modified = False
1320
def _matchingcontroldir(self):
1321
from .dir import LocalGitControlDirFormat
1322
return LocalGitControlDirFormat()
1324
def get_format_description(self):
1325
return "Git Working Tree"
1327
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1328
accelerator_tree=None, hardlink=False):
1329
"""See WorkingTreeFormat.initialize()."""
1330
if not isinstance(a_controldir, LocalGitDir):
1331
raise errors.IncompatibleFormat(self, a_controldir)
1332
branch = a_controldir.open_branch(nascent_ok=True)
1333
if revision_id is not None:
1334
branch.set_last_revision(revision_id)
1335
wt = GitWorkingTree(
1336
a_controldir, a_controldir.open_repository(), branch)
1337
for hook in MutableTree.hooks['post_build_tree']: