1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from collections import defaultdict
25
from dulwich.ignore import (
28
from dulwich.file import GitFile, FileLocked
29
from dulwich.index import (
32
build_index_from_tree,
34
index_entry_from_path,
35
index_entry_from_stat,
42
from dulwich.object_store import (
45
from dulwich.objects import (
52
from dulwich.repo import (
63
conflicts as _mod_conflicts,
65
controldir as _mod_controldir,
71
revision as _mod_revision,
73
transport as _mod_transport,
77
from ...decorators import (
83
from ...mutabletree import (
95
from .mapping import (
100
IGNORE_FILENAME = ".gitignore"
103
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
104
"""A Git working tree."""
106
def __init__(self, controldir, repo, branch):
107
MutableGitIndexTree.__init__(self)
108
basedir = controldir.root_transport.local_abspath('.')
109
self.basedir = osutils.realpath(basedir)
110
self.controldir = controldir
111
self.repository = repo
112
self.store = self.repository._git.object_store
113
self.mapping = self.repository.get_mapping()
114
self._branch = branch
115
self._transport = controldir.transport
116
self._format = GitWorkingTreeFormat()
118
self._index_file = None
119
self.views = self._make_views()
120
self._rules_searcher = None
121
self._detect_case_handling()
124
def supports_tree_reference(self):
127
def supports_rename_tracking(self):
130
def _read_index(self):
131
self.index = Index(self.control_transport.local_abspath('index'))
132
self._index_dirty = False
135
"""Lock the repository for read operations.
137
:return: A breezy.lock.LogicalLockResult.
139
if not self._lock_mode:
140
self._lock_mode = 'r'
144
self._lock_count += 1
145
self.branch.lock_read()
146
return lock.LogicalLockResult(self.unlock)
148
def _lock_write_tree(self):
149
# TODO(jelmer): Actually create index.lock
150
if not self._lock_mode:
151
self._lock_mode = 'w'
154
self._index_file = GitFile(self.control_transport.local_abspath('index'), 'wb')
156
raise errors.LockContention('index')
158
elif self._lock_mode == 'r':
159
raise errors.ReadOnlyError(self)
163
def lock_tree_write(self):
164
self.branch.lock_read()
166
self._lock_write_tree()
167
return lock.LogicalLockResult(self.unlock)
172
def lock_write(self, token=None):
173
self.branch.lock_write()
175
self._lock_write_tree()
176
return lock.LogicalLockResult(self.unlock)
182
return self._lock_count >= 1
184
def get_physical_lock_status(self):
187
def break_lock(self):
189
self.control_transport.delete('index.lock')
190
except errors.NoSuchFile:
192
self.branch.break_lock()
194
@only_raises(errors.LockNotHeld, errors.LockBroken)
196
if not self._lock_count:
197
return lock.cant_unlock_not_held(self)
200
self._lock_count -= 1
201
if self._lock_count > 0:
203
if self._index_file is not None:
204
if self._index_dirty:
205
self._flush(self._index_file)
206
self._index_file.close()
208
# Somebody else already wrote the index file
209
# by calling .flush()
210
self._index_file.abort()
211
self._index_file = None
212
self._lock_mode = None
220
def _detect_case_handling(self):
222
self._transport.stat(".git/cOnFiG")
223
except errors.NoSuchFile:
224
self.case_sensitive = True
226
self.case_sensitive = False
228
def merge_modified(self):
231
def set_merge_modified(self, modified_hashes):
232
raise errors.UnsupportedOperation(self.set_merge_modified, self)
234
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
235
self.set_parent_ids([p for p, t in parents_list])
237
def _set_merges_from_parent_ids(self, rhs_parent_ids):
239
merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
240
except errors.NoSuchRevision as e:
241
raise errors.GhostRevisionUnusableHere(e.revision)
243
self.control_transport.put_bytes('MERGE_HEAD', b'\n'.join(merges),
244
mode=self.controldir._get_file_mode())
247
self.control_transport.delete('MERGE_HEAD')
248
except errors.NoSuchFile:
251
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
252
"""Set the parent ids to revision_ids.
254
See also set_parent_trees. This api will try to retrieve the tree data
255
for each element of revision_ids from the trees repository. If you have
256
tree data already available, it is more efficient to use
257
set_parent_trees rather than set_parent_ids. set_parent_ids is however
258
an easier API to use.
260
:param revision_ids: The revision_ids to set as the parent ids of this
261
working tree. Any of these may be ghosts.
263
with self.lock_tree_write():
264
self._check_parents_for_ghosts(revision_ids,
265
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
266
for revision_id in revision_ids:
267
_mod_revision.check_not_reserved_id(revision_id)
269
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
271
if len(revision_ids) > 0:
272
self.set_last_revision(revision_ids[0])
274
self.set_last_revision(_mod_revision.NULL_REVISION)
276
self._set_merges_from_parent_ids(revision_ids[1:])
278
def get_parent_ids(self):
279
"""See Tree.get_parent_ids.
281
This implementation reads the pending merges list and last_revision
282
value and uses that to decide what the parents list should be.
284
last_rev = _mod_revision.ensure_null(self._last_revision())
285
if _mod_revision.NULL_REVISION == last_rev:
290
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
291
except errors.NoSuchFile:
294
for l in osutils.split_lines(merges_bytes):
295
revision_id = l.rstrip(b'\n')
296
parents.append(self.branch.lookup_foreign_revision_id(revision_id))
299
def check_state(self):
300
"""Check that the working state is/isn't valid."""
303
def remove(self, files, verbose=False, to_file=None, keep_files=True,
305
"""Remove nominated files from the working tree metadata.
307
:param files: File paths relative to the basedir.
308
:param keep_files: If true, the files will also be kept.
309
:param force: Delete files and directories, even if they are changed
310
and even if the directories are not empty.
312
if not isinstance(files, list):
318
def backup(file_to_backup):
319
abs_path = self.abspath(file_to_backup)
320
backup_name = self.controldir._available_backup_name(file_to_backup)
321
osutils.rename(abs_path, self.abspath(backup_name))
322
return "removed %s (but kept a copy: %s)" % (
323
file_to_backup, backup_name)
325
# Sort needed to first handle directory content before the directory
330
def recurse_directory_to_add_files(directory):
331
# Recurse directory and add all files
332
# so we can check if they have changed.
333
for parent_info, file_infos in self.walkdirs(directory):
334
for relpath, basename, kind, lstat, fileid, kind in file_infos:
335
# Is it versioned or ignored?
336
if self.is_versioned(relpath):
337
# Add nested content for deletion.
338
all_files.add(relpath)
340
# Files which are not versioned
341
# should be treated as unknown.
342
files_to_backup.append(relpath)
344
with self.lock_tree_write():
345
for filepath in files:
346
# Get file name into canonical form.
347
abspath = self.abspath(filepath)
348
filepath = self.relpath(abspath)
351
all_files.add(filepath)
352
recurse_directory_to_add_files(filepath)
354
files = list(all_files)
357
return # nothing to do
359
# Sort needed to first handle directory content before the directory
360
files.sort(reverse=True)
362
# Bail out if we are going to delete files we shouldn't
363
if not keep_files and not force:
364
for (file_id, path, content_change, versioned, parent_id, name,
365
kind, executable) in self.iter_changes(self.basis_tree(),
366
include_unchanged=True, require_versioned=False,
367
want_unversioned=True, specific_files=files):
368
if versioned[0] == False:
369
# The record is unknown or newly added
370
files_to_backup.append(path[1])
371
files_to_backup.extend(osutils.parent_directories(path[1]))
372
elif (content_change and (kind[1] is not None) and
373
osutils.is_inside_any(files, path[1])):
374
# Versioned and changed, but not deleted, and still
375
# in one of the dirs to be deleted.
376
files_to_backup.append(path[1])
377
files_to_backup.extend(osutils.parent_directories(path[1]))
385
except errors.NoSuchFile:
388
abs_path = self.abspath(f)
390
# having removed it, it must be either ignored or unknown
391
if self.is_ignored(f):
395
kind_ch = osutils.kind_marker(kind)
396
to_file.write(new_status + ' ' + f + kind_ch + '\n')
398
message = "%s does not exist" % (f, )
401
if f in files_to_backup and not force:
404
if kind == 'directory':
405
osutils.rmtree(abs_path)
407
osutils.delete_any(abs_path)
408
message = "deleted %s" % (f,)
410
message = "removed %s" % (f,)
411
self._unversion_path(f)
413
# print only one message (if any) per file.
414
if message is not None:
416
self._versioned_dirs = None
418
def smart_add(self, file_list, recurse=True, action=None, save=True):
422
# expand any symlinks in the directory part, while leaving the
424
# only expanding if symlinks are supported avoids windows path bugs
425
if osutils.has_symlinks():
426
file_list = list(map(osutils.normalizepath, file_list))
428
conflicts_related = set()
429
for c in self.conflicts():
430
conflicts_related.update(c.associated_filenames())
435
def call_action(filepath, kind):
436
if action is not None:
437
parent_path = posixpath.dirname(filepath)
438
parent_id = self.path2id(parent_path)
439
parent_ie = self._get_dir_ie(parent_path, parent_id)
440
file_id = action(self, parent_ie, filepath, kind)
441
if file_id is not None:
442
raise workingtree.SettingFileIdUnsupported()
444
with self.lock_tree_write():
445
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
446
filepath, can_access = osutils.normalized_filename(filepath)
448
raise errors.InvalidNormalization(filepath)
450
abspath = self.abspath(filepath)
451
kind = osutils.file_kind(abspath)
452
if kind in ("file", "symlink"):
453
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
457
call_action(filepath, kind)
459
self._index_add_entry(filepath, kind)
460
added.append(filepath)
461
elif kind == "directory":
462
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
463
if subpath not in index:
464
call_action(filepath, kind)
466
user_dirs.append(filepath)
468
raise errors.BadFileKindError(filename=abspath, kind=kind)
469
for user_dir in user_dirs:
470
abs_user_dir = self.abspath(user_dir)
473
transport = _mod_transport.get_transport_from_path(abs_user_dir)
474
_mod_controldir.ControlDirFormat.find_format(transport)
476
except errors.NotBranchError:
478
except errors.UnsupportedFormatError:
483
trace.warning('skipping nested tree %r', abs_user_dir)
486
for name in os.listdir(abs_user_dir):
487
subp = os.path.join(user_dir, name)
488
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
490
ignore_glob = self.is_ignored(subp)
491
if ignore_glob is not None:
492
ignored.setdefault(ignore_glob, []).append(subp)
494
abspath = self.abspath(subp)
495
kind = osutils.file_kind(abspath)
496
if kind == "directory":
497
user_dirs.append(subp)
499
if subp in self.index:
502
if subp in conflicts_related:
504
call_action(filepath, kind)
506
self._index_add_entry(subp, kind)
508
return added, ignored
510
def has_filename(self, filename):
511
return osutils.lexists(self.abspath(filename))
513
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
516
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
517
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
518
if self.controldir.is_control_filename(dir_relpath.decode(osutils._fs_enc)):
520
for name in list(dirnames):
521
if self.controldir.is_control_filename(name.decode(osutils._fs_enc)):
522
dirnames.remove(name)
524
relpath = os.path.join(dir_relpath, name)
527
yield relpath.decode(osutils._fs_enc)
528
except UnicodeDecodeError:
529
raise errors.BadFilenameEncoding(
530
relpath, osutils._fs_enc)
531
if not self._has_dir(relpath):
532
dirnames.remove(name)
533
for name in filenames:
534
if not self.mapping.is_special_file(name):
535
yp = os.path.join(dir_relpath, name)
537
yield yp.decode(osutils._fs_enc)
538
except UnicodeDecodeError:
539
raise errors.BadFilenameEncoding(
543
"""Yield all unversioned files in this WorkingTree.
545
with self.lock_read():
546
index_paths = set([p.decode('utf-8') for p, i in self._recurse_index_entries()])
547
all_paths = set(self._iter_files_recursive(include_dirs=True))
548
for p in (all_paths - index_paths):
549
if not self._has_dir(p.encode('utf-8')):
552
def _gather_kinds(self, files, kinds):
553
"""See MutableTree._gather_kinds."""
554
with self.lock_tree_write():
555
for pos, f in enumerate(files):
556
if kinds[pos] is None:
557
fullpath = osutils.normpath(self.abspath(f))
559
kind = osutils.file_kind(fullpath)
561
if e.errno == errno.ENOENT:
562
raise errors.NoSuchFile(fullpath)
563
if kind == 'directory' and f != '' and os.path.exists(os.path.join(fullpath, '.git')):
564
kind = 'tree-reference'
568
if self._lock_mode != 'w':
569
raise errors.NotWriteLocked(self)
570
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
571
# already in use and GitFile doesn't allow overriding the lock file name :(
572
f = open(self.control_transport.local_abspath('index'), 'wb')
573
# Note that _flush will close the file
579
write_index_dict(shaf, self.index)
584
self._index_dirty = False
586
def has_or_had_id(self, file_id):
587
if self.has_id(file_id):
589
if self.had_id(file_id):
593
def had_id(self, file_id):
594
path = self._basis_fileid_map.lookup_path(file_id)
596
head = self.repository._git.head()
598
# Assume no if basis is not accessible
601
root_tree = self.store[head].tree
605
tree_lookup_path(self.store.__getitem__, root_tree, path)
611
def get_file_mtime(self, path, file_id=None):
612
"""See Tree.get_file_mtime."""
614
return self._lstat(path).st_mtime
617
if num == errno.ENOENT:
618
raise errors.NoSuchFile(path)
621
def is_ignored(self, filename):
622
r"""Check whether the filename matches an ignore pattern.
624
If the file is ignored, returns the pattern which caused it to
625
be ignored, otherwise None. So this can simply be used as a
626
boolean if desired."""
627
if getattr(self, '_global_ignoreglobster', None) is None:
629
ignore_globs.update(ignores.get_runtime_ignores())
630
ignore_globs.update(ignores.get_user_ignores())
631
self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
632
match = self._global_ignoreglobster.match(filename)
633
if match is not None:
635
encoded_filename = filename.encode('utf-8')
637
if self.kind(filename) == 'directory':
638
encoded_filename += b'/'
639
except errors.NoSuchFile:
641
encoded_filename = encoded_filename.lstrip(b'/')
642
ignore_manager = self._get_ignore_manager()
643
ps = list(ignore_manager.find_matching(filename))
646
if not ps[-1].is_exclude:
650
def _get_ignore_manager(self):
651
ignoremanager = getattr(self, '_ignoremanager', None)
652
if ignoremanager is not None:
655
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
656
self._ignoremanager = ignore_manager
657
return ignore_manager
659
def _flush_ignore_list_cache(self):
660
self._ignoremanager = None
662
def set_last_revision(self, revid):
663
if _mod_revision.is_null(revid):
664
self.branch.set_last_revision_info(0, revid)
666
_mod_revision.check_not_reserved_id(revid)
668
self.branch.generate_revision_history(revid)
669
except errors.NoSuchRevision:
670
raise errors.GhostRevisionUnusableHere(revid)
672
def _reset_data(self):
674
head = self.repository._git.head()
676
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
678
self._basis_fileid_map = self.mapping.get_fileid_map(
679
self.store.__getitem__, self.store[head].tree)
680
self._fileid_map = self._basis_fileid_map.copy()
682
def get_file_verifier(self, path, file_id=None, stat_value=None):
683
with self.lock_read():
684
(index, subpath) = self._lookup_index(path.encode('utf-8'))
686
return ("GIT", index[subpath].sha)
688
if self._has_dir(path):
690
raise errors.NoSuchFile(path)
692
def get_file_sha1(self, path, file_id=None, stat_value=None):
693
with self.lock_read():
694
if not self.is_versioned(path):
695
raise errors.NoSuchFile(path)
696
abspath = self.abspath(path)
698
return osutils.sha_file_by_name(abspath)
701
if num in (errno.EISDIR, errno.ENOENT):
705
def revision_tree(self, revid):
706
return self.repository.revision_tree(revid)
708
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
709
mode = stat_result.st_mode
710
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
712
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
713
return self.basis_tree().is_executable(path)
715
def stored_kind(self, path, file_id=None):
716
with self.lock_read():
717
encoded_path = path.encode('utf-8')
718
(index, subpath) = self._lookup_index(encoded_path)
720
return mode_kind(index[subpath].mode)
722
# Maybe it's a directory?
723
if self._has_dir(encoded_path):
725
raise errors.NoSuchFile(path)
727
def _lstat(self, path):
728
return os.lstat(self.abspath(path))
730
def _live_entry(self, path):
731
return index_entry_from_path(self.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
733
def is_executable(self, path, file_id=None):
734
with self.lock_read():
735
if getattr(self, "_supports_executable", osutils.supports_executable)():
736
mode = self._lstat(path).st_mode
738
(index, subpath) = self._lookup_index(path.encode('utf-8'))
740
mode = index[subpath].mode
743
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
745
def _is_executable_from_path_and_stat(self, path, stat_result):
746
if getattr(self, "_supports_executable", osutils.supports_executable)():
747
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
749
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
751
def list_files(self, include_root=False, from_dir=None, recursive=True):
755
fk_entries = {'directory': tree.TreeDirectory,
756
'file': tree.TreeFile,
757
'symlink': tree.TreeLink,
758
'tree-reference': tree.TreeReference}
759
with self.lock_read():
760
root_ie = self._get_dir_ie(u"", None)
761
if include_root and not from_dir:
762
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
763
dir_ids[u""] = root_ie.file_id
765
path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
767
path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
768
os.listdir(self.abspath(from_dir).encode(osutils._fs_enc))
769
if not self.controldir.is_control_filename(name.decode(osutils._fs_enc))
770
and not self.mapping.is_special_file(name.decode(osutils._fs_enc))])
771
for path in path_iterator:
773
encoded_path = path.encode("utf-8")
774
except UnicodeEncodeError:
775
raise errors.BadFilenameEncoding(
776
path, osutils._fs_enc)
777
(index, index_path) = self._lookup_index(encoded_path)
779
value = index[index_path]
782
kind = self.kind(path)
783
parent, name = posixpath.split(path)
784
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
786
if kind in ('directory', 'tree-reference'):
788
if self._has_dir(encoded_path):
789
ie = self._get_dir_ie(path, self.path2id(path))
792
elif self.is_ignored(path):
794
ie = fk_entries[kind]()
798
ie = fk_entries[kind]()
800
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
802
if value is not None:
803
ie = self._get_file_ie(name, path, value, dir_ids[parent])
804
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
806
ie = fk_entries[kind]()
807
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
809
def all_file_ids(self):
810
with self.lock_read():
811
ids = {u"": self.path2id("")}
812
for path in self.index:
813
if self.mapping.is_special_file(path):
815
path = path.decode("utf-8")
816
parent = posixpath.dirname(path).strip("/")
817
for e in self._add_missing_parent_ids(parent, ids):
819
ids[path] = self.path2id(path)
820
return set(ids.values())
822
def all_versioned_paths(self):
823
with self.lock_read():
825
for path in self.index:
826
if self.mapping.is_special_file(path):
828
path = path.decode("utf-8")
831
path = posixpath.dirname(path).strip("/")
837
def iter_child_entries(self, path, file_id=None):
838
encoded_path = path.encode('utf-8')
839
with self.lock_read():
840
parent_id = self.path2id(path)
842
seen_children = set()
843
for item_path, value in self.index.iteritems():
844
if self.mapping.is_special_file(item_path):
846
if not osutils.is_inside(encoded_path, item_path):
849
subpath = posixpath.relpath(item_path, encoded_path)
851
dirname = subpath.split(b'/', 1)[0]
852
file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
854
(parent, name) = posixpath.split(item_path)
855
file_ie = self._get_file_ie(
856
name.decode('utf-8'),
857
item_path.decode('utf-8'), value, parent_id)
859
if not found_any and path != u'':
860
raise errors.NoSuchFile(path)
863
with self.lock_read():
864
conflicts = _mod_conflicts.ConflictList()
865
for item_path, value in self.index.iteritems():
866
if value.flags & FLAG_STAGEMASK:
867
conflicts.append(_mod_conflicts.TextConflict(item_path.decode('utf-8')))
870
def set_conflicts(self, conflicts):
872
for conflict in conflicts:
873
if conflict.typestring in ('text conflict', 'contents conflict'):
874
by_path.add(conflict.path.encode('utf-8'))
876
raise errors.UnsupportedOperation(self.set_conflicts, self)
877
with self.lock_tree_write():
878
for path in self.index:
879
self._set_conflicted(path, path in by_path)
881
def _set_conflicted(self, path, conflicted):
882
trace.mutter('change conflict: %r -> %r', path, conflicted)
883
value = self.index[path]
884
self._index_dirty = True
886
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
888
self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
890
def add_conflicts(self, new_conflicts):
891
with self.lock_tree_write():
892
for conflict in new_conflicts:
893
if conflict.typestring in ('text conflict', 'contents conflict'):
895
self._set_conflicted(conflict.path.encode('utf-8'), True)
897
raise errors.UnsupportedOperation(self.add_conflicts, self)
899
raise errors.UnsupportedOperation(self.add_conflicts, self)
901
def walkdirs(self, prefix=""):
902
"""Walk the directories of this tree.
904
returns a generator which yields items in the form:
905
((curren_directory_path, fileid),
906
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
909
This API returns a generator, which is only valid during the current
910
tree transaction - within a single lock_read or lock_write duration.
912
If the tree is not locked, it may cause an error to be raised,
913
depending on the tree implementation.
915
from bisect import bisect_left
917
disk_top = self.abspath(prefix)
918
if disk_top.endswith('/'):
919
disk_top = disk_top[:-1]
920
top_strip_len = len(disk_top) + 1
921
inventory_iterator = self._walkdirs(prefix)
922
disk_iterator = osutils.walkdirs(disk_top, prefix)
924
current_disk = next(disk_iterator)
925
disk_finished = False
927
if not (e.errno == errno.ENOENT or
928
(sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
933
current_inv = next(inventory_iterator)
935
except StopIteration:
938
while not inv_finished or not disk_finished:
940
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
941
cur_disk_dir_content) = current_disk
943
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
944
cur_disk_dir_content) = ((None, None), None)
945
if not disk_finished:
946
# strip out .bzr dirs
947
if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
948
len(cur_disk_dir_content) > 0):
949
# osutils.walkdirs can be made nicer -
950
# yield the path-from-prefix rather than the pathjoined
952
bzrdir_loc = bisect_left(cur_disk_dir_content,
954
if (bzrdir_loc < len(cur_disk_dir_content)
955
and self.controldir.is_control_filename(
956
cur_disk_dir_content[bzrdir_loc][0])):
957
# we dont yield the contents of, or, .bzr itself.
958
del cur_disk_dir_content[bzrdir_loc]
960
# everything is unknown
963
# everything is missing
966
direction = ((current_inv[0][0] > cur_disk_dir_relpath) -
967
(current_inv[0][0] < cur_disk_dir_relpath))
969
# disk is before inventory - unknown
970
dirblock = [(relpath, basename, kind, stat, None, None) for
971
relpath, basename, kind, stat, top_path in
972
cur_disk_dir_content]
973
yield (cur_disk_dir_relpath, None), dirblock
975
current_disk = next(disk_iterator)
976
except StopIteration:
979
# inventory is before disk - missing.
980
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
981
for relpath, basename, dkind, stat, fileid, kind in
983
yield (current_inv[0][0], current_inv[0][1]), dirblock
985
current_inv = next(inventory_iterator)
986
except StopIteration:
989
# versioned present directory
990
# merge the inventory and disk data together
992
for relpath, subiterator in itertools.groupby(sorted(
993
current_inv[1] + cur_disk_dir_content,
994
key=operator.itemgetter(0)), operator.itemgetter(1)):
995
path_elements = list(subiterator)
996
if len(path_elements) == 2:
997
inv_row, disk_row = path_elements
998
# versioned, present file
999
dirblock.append((inv_row[0],
1000
inv_row[1], disk_row[2],
1001
disk_row[3], inv_row[4],
1003
elif len(path_elements[0]) == 5:
1005
dirblock.append((path_elements[0][0],
1006
path_elements[0][1], path_elements[0][2],
1007
path_elements[0][3], None, None))
1008
elif len(path_elements[0]) == 6:
1009
# versioned, absent file.
1010
dirblock.append((path_elements[0][0],
1011
path_elements[0][1], 'unknown', None,
1012
path_elements[0][4], path_elements[0][5]))
1014
raise NotImplementedError('unreachable code')
1015
yield current_inv[0], dirblock
1017
current_inv = next(inventory_iterator)
1018
except StopIteration:
1021
current_disk = next(disk_iterator)
1022
except StopIteration:
1023
disk_finished = True
1025
def _walkdirs(self, prefix=u""):
1028
prefix = prefix.encode('utf-8')
1029
per_dir = defaultdict(set)
1031
per_dir[(u'', self.get_root_id())] = set()
1032
def add_entry(path, kind):
1033
if path == b'' or not path.startswith(prefix):
1035
(dirname, child_name) = posixpath.split(path)
1036
add_entry(dirname, 'directory')
1037
dirname = dirname.decode("utf-8")
1038
dir_file_id = self.path2id(dirname)
1039
if not isinstance(value, tuple) or len(value) != 10:
1040
raise ValueError(value)
1041
per_dir[(dirname, dir_file_id)].add(
1042
(path.decode("utf-8"), child_name.decode("utf-8"),
1044
self.path2id(path.decode("utf-8")),
1046
with self.lock_read():
1047
for path, value in self.index.iteritems():
1048
if self.mapping.is_special_file(path):
1050
if not path.startswith(prefix):
1052
add_entry(path, mode_kind(value.mode))
1053
return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
1055
def get_shelf_manager(self):
1056
raise workingtree.ShelvingUnsupported()
1058
def store_uncommitted(self):
1059
raise errors.StoringUncommittedNotSupported(self)
1061
def apply_inventory_delta(self, changes):
1062
for (old_path, new_path, file_id, ie) in changes:
1063
if old_path is not None:
1064
(index, old_subpath) = self._lookup_index(old_path.encode('utf-8'))
1066
self._index_del_entry(index, old_subpath)
1070
self._versioned_dirs = None
1071
if new_path is not None and ie.kind != 'directory':
1072
if ie.kind == 'tree-reference':
1073
self._index_add_entry(
1075
reference_revision=ie.reference_revision)
1077
self._index_add_entry(new_path, ie.kind)
1080
def annotate_iter(self, path, file_id=None,
1081
default_revision=_mod_revision.CURRENT_REVISION):
1082
"""See Tree.annotate_iter
1084
This implementation will use the basis tree implementation if possible.
1085
Lines not in the basis are attributed to CURRENT_REVISION
1087
If there are pending merges, lines added by those merges will be
1088
incorrectly attributed to CURRENT_REVISION (but after committing, the
1089
attribution will be correct).
1091
with self.lock_read():
1092
maybe_file_parent_keys = []
1093
for parent_id in self.get_parent_ids():
1095
parent_tree = self.revision_tree(parent_id)
1096
except errors.NoSuchRevisionInTree:
1097
parent_tree = self.branch.repository.revision_tree(
1099
with parent_tree.lock_read():
1100
# TODO(jelmer): Use rename/copy tracker to find path name in parent
1103
kind = parent_tree.kind(parent_path)
1104
except errors.NoSuchFile:
1107
# Note: this is slightly unnecessary, because symlinks and
1108
# directories have a "text" which is the empty text, and we
1109
# know that won't mess up annotations. But it seems cleaner
1113
parent_tree.get_file_revision(parent_path))
1114
if parent_text_key not in maybe_file_parent_keys:
1115
maybe_file_parent_keys.append(parent_text_key)
1116
graph = self.branch.repository.get_file_graph()
1117
heads = graph.heads(maybe_file_parent_keys)
1118
file_parent_keys = []
1119
for key in maybe_file_parent_keys:
1121
file_parent_keys.append(key)
1123
# Now we have the parents of this content
1124
from breezy.annotate import Annotator
1125
from .annotate import AnnotateProvider
1126
annotator = Annotator(AnnotateProvider(
1127
self.branch.repository._file_change_scanner))
1128
text = self.get_file_text(path)
1129
this_key = (path, default_revision)
1130
annotator.add_special_text(this_key, file_parent_keys, text)
1131
annotations = [(key[-1], line)
1132
for key, line in annotator.annotate_flat(this_key)]
1135
def _rename_one(self, from_rel, to_rel):
1136
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1138
def _build_checkout_with_index(self):
1139
build_index_from_tree(
1140
self.user_transport.local_abspath('.'),
1141
self.control_transport.local_abspath("index"),
1143
None if self.branch.head is None else self.store[self.branch.head].tree)
1145
def reset_state(self, revision_ids=None):
1146
"""Reset the state of the working tree.
1148
This does a hard-reset to a last-known-good state. This is a way to
1149
fix if something got corrupted (like the .git/index file)
1151
with self.lock_tree_write():
1152
if revision_ids is not None:
1153
self.set_parent_ids(revision_ids)
1155
self._index_dirty = True
1156
if self.branch.head is not None:
1157
for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
1158
if not validate_path(entry.path):
1161
if S_ISGITLINK(entry.mode):
1162
pass # TODO(jelmer): record and return submodule paths
1164
# Let's at least try to use the working tree file:
1166
st = self._lstat(self.abspath(entry.path))
1168
# But if it doesn't exist, we'll make something up.
1169
obj = self.store[entry.sha]
1170
st = os.stat_result((entry.mode, 0, 0, 0,
1171
0, 0, len(obj.as_raw_string()), 0,
1173
(index, subpath) = self._lookup_index(entry.path)
1174
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1176
def pull(self, source, overwrite=False, stop_revision=None,
1177
change_reporter=None, possible_transports=None, local=False,
1179
with self.lock_write(), source.lock_read():
1180
old_revision = self.branch.last_revision()
1181
basis_tree = self.basis_tree()
1182
count = self.branch.pull(source, overwrite, stop_revision,
1183
possible_transports=possible_transports,
1185
new_revision = self.branch.last_revision()
1186
if new_revision != old_revision:
1187
with basis_tree.lock_read():
1188
new_basis_tree = self.branch.basis_tree()
1194
change_reporter=change_reporter,
1195
show_base=show_base)
1198
def add_reference(self, sub_tree):
1199
"""Add a TreeReference to the tree, pointing at sub_tree.
1201
:param sub_tree: subtree to add.
1203
with self.lock_tree_write():
1205
sub_tree_path = self.relpath(sub_tree.basedir)
1206
except errors.PathNotChild:
1207
raise BadReferenceTarget(
1208
self, sub_tree, 'Target not inside tree.')
1210
self._add([sub_tree_path], [None], ['tree-reference'])
1212
def _read_submodule_head(self, path):
1213
return read_submodule_head(self.abspath(path))
1215
def get_reference_revision(self, path, file_id=None):
1216
hexsha = self._read_submodule_head(path)
1218
return _mod_revision.NULL_REVISION
1219
return self.branch.lookup_foreign_revision_id(hexsha)
1221
def get_nested_tree(self, path, file_id=None):
1222
return workingtree.WorkingTree.open(self.abspath(path))
1224
def _directory_is_tree_reference(self, relpath):
1225
# as a special case, if a directory contains control files then
1226
# it's a tree reference, except that the root of the tree is not
1227
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1229
def extract(self, sub_path, file_id=None, format=None):
1230
"""Extract a subtree from this tree.
1232
A new branch will be created, relative to the path for this tree.
1235
segments = osutils.splitpath(path)
1236
transport = self.branch.controldir.root_transport
1237
for name in segments:
1238
transport = transport.clone(name)
1239
transport.ensure_base()
1242
with self.lock_tree_write():
1244
branch_transport = mkdirs(sub_path)
1246
format = self.controldir.cloning_metadir()
1247
branch_transport.ensure_base()
1248
branch_bzrdir = format.initialize_on_transport(branch_transport)
1250
repo = branch_bzrdir.find_repository()
1251
except errors.NoRepositoryPresent:
1252
repo = branch_bzrdir.create_repository()
1253
if not repo.supports_rich_root():
1254
raise errors.RootNotRich()
1255
new_branch = branch_bzrdir.create_branch()
1256
new_branch.pull(self.branch)
1257
for parent_id in self.get_parent_ids():
1258
new_branch.fetch(self.branch, parent_id)
1259
tree_transport = self.controldir.root_transport.clone(sub_path)
1260
if tree_transport.base != branch_transport.base:
1261
tree_bzrdir = format.initialize_on_transport(tree_transport)
1262
tree_bzrdir.set_branch_reference(new_branch)
1264
tree_bzrdir = branch_bzrdir
1265
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1266
wt.set_parent_ids(self.get_parent_ids())
1269
def _get_check_refs(self):
1270
"""Return the references needed to perform a check of this tree.
1272
The default implementation returns no refs, and is only suitable for
1273
trees that have no local caching and can commit on ghosts at any time.
1275
:seealso: breezy.check for details about check_refs.
1279
def copy_content_into(self, tree, revision_id=None):
1280
"""Copy the current content and user files of this tree into tree."""
1281
with self.lock_read():
1282
if revision_id is None:
1283
merge.transform_tree(tree, self)
1285
# TODO now merge from tree.last_revision to revision (to
1286
# preserve user local changes)
1288
other_tree = self.revision_tree(revision_id)
1289
except errors.NoSuchRevision:
1290
other_tree = self.branch.repository.revision_tree(
1293
merge.transform_tree(tree, other_tree)
1294
if revision_id == _mod_revision.NULL_REVISION:
1297
new_parents = [revision_id]
1298
tree.set_parent_ids(new_parents)
1301
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1303
_tree_class = GitWorkingTree
1305
supports_versioned_directories = False
1307
supports_setting_file_ids = False
1309
supports_store_uncommitted = False
1311
supports_leftmost_parent_id_as_ghost = False
1313
supports_righthand_parent_id_as_ghost = False
1315
requires_normalized_unicode_filenames = True
1317
supports_merge_modified = False
1320
def _matchingcontroldir(self):
1321
from .dir import LocalGitControlDirFormat
1322
return LocalGitControlDirFormat()
1324
def get_format_description(self):
1325
return "Git Working Tree"
1327
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1328
accelerator_tree=None, hardlink=False):
1329
"""See WorkingTreeFormat.initialize()."""
1330
if not isinstance(a_controldir, LocalGitDir):
1331
raise errors.IncompatibleFormat(self, a_controldir)
1332
branch = a_controldir.open_branch(nascent_ok=True)
1333
if revision_id is not None:
1334
branch.set_last_revision(revision_id)
1335
wt = GitWorkingTree(
1336
a_controldir, a_controldir.open_repository(), branch)
1337
for hook in MutableTree.hooks['post_build_tree']: