1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from collections import defaultdict
25
from dulwich.ignore import (
28
from dulwich.file import GitFile, FileLocked
29
from dulwich.index import (
32
build_index_from_tree,
34
index_entry_from_path,
35
index_entry_from_stat,
42
from dulwich.object_store import (
45
from dulwich.objects import (
52
from dulwich.repo import (
63
conflicts as _mod_conflicts,
65
controldir as _mod_controldir,
71
revision as _mod_revision,
73
transport as _mod_transport,
77
from ..decorators import (
83
from ..mutabletree import (
95
from .mapping import (
100
IGNORE_FILENAME = ".gitignore"
103
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
104
"""A Git working tree."""
106
def __init__(self, controldir, repo, branch):
107
MutableGitIndexTree.__init__(self)
108
basedir = controldir.root_transport.local_abspath('.')
109
self.basedir = osutils.realpath(basedir)
110
self.controldir = controldir
111
self.repository = repo
112
self.store = self.repository._git.object_store
113
self.mapping = self.repository.get_mapping()
114
self._branch = branch
115
self._transport = controldir.transport
116
self._format = GitWorkingTreeFormat()
118
self._index_file = None
119
self.views = self._make_views()
120
self._rules_searcher = None
121
self._detect_case_handling()
124
def supports_tree_reference(self):
127
def supports_rename_tracking(self):
130
def _read_index(self):
131
self.index = Index(self.control_transport.local_abspath('index'))
132
self._index_dirty = False
135
"""Lock the repository for read operations.
137
:return: A breezy.lock.LogicalLockResult.
139
if not self._lock_mode:
140
self._lock_mode = 'r'
144
self._lock_count += 1
145
self.branch.lock_read()
146
return lock.LogicalLockResult(self.unlock)
148
def _lock_write_tree(self):
149
# TODO(jelmer): Actually create index.lock
150
if not self._lock_mode:
151
self._lock_mode = 'w'
154
self._index_file = GitFile(self.control_transport.local_abspath('index'), 'wb')
156
raise errors.LockContention('index')
158
elif self._lock_mode == 'r':
159
raise errors.ReadOnlyError(self)
163
def lock_tree_write(self):
164
self.branch.lock_read()
166
self._lock_write_tree()
167
return lock.LogicalLockResult(self.unlock)
172
def lock_write(self, token=None):
173
self.branch.lock_write()
175
self._lock_write_tree()
176
return lock.LogicalLockResult(self.unlock)
182
return self._lock_count >= 1
184
def get_physical_lock_status(self):
187
def break_lock(self):
189
self.control_transport.delete('index.lock')
190
except errors.NoSuchFile:
192
self.branch.break_lock()
194
@only_raises(errors.LockNotHeld, errors.LockBroken)
196
if not self._lock_count:
197
return lock.cant_unlock_not_held(self)
200
self._lock_count -= 1
201
if self._lock_count > 0:
203
if self._index_file is not None:
204
if self._index_dirty:
205
self._flush(self._index_file)
206
self._index_file.close()
208
# Somebody else already wrote the index file
209
# by calling .flush()
210
self._index_file.abort()
211
self._index_file = None
212
self._lock_mode = None
220
def _detect_case_handling(self):
222
self._transport.stat(".git/cOnFiG")
223
except errors.NoSuchFile:
224
self.case_sensitive = True
226
self.case_sensitive = False
228
def merge_modified(self):
231
def set_merge_modified(self, modified_hashes):
232
raise errors.UnsupportedOperation(self.set_merge_modified, self)
234
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
235
self.set_parent_ids([p for p, t in parents_list])
237
def _set_merges_from_parent_ids(self, rhs_parent_ids):
239
merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
240
except errors.NoSuchRevision as e:
241
raise errors.GhostRevisionUnusableHere(e.revision)
243
self.control_transport.put_bytes('MERGE_HEAD', b'\n'.join(merges),
244
mode=self.controldir._get_file_mode())
247
self.control_transport.delete('MERGE_HEAD')
248
except errors.NoSuchFile:
251
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
252
"""Set the parent ids to revision_ids.
254
See also set_parent_trees. This api will try to retrieve the tree data
255
for each element of revision_ids from the trees repository. If you have
256
tree data already available, it is more efficient to use
257
set_parent_trees rather than set_parent_ids. set_parent_ids is however
258
an easier API to use.
260
:param revision_ids: The revision_ids to set as the parent ids of this
261
working tree. Any of these may be ghosts.
263
with self.lock_tree_write():
264
self._check_parents_for_ghosts(revision_ids,
265
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
266
for revision_id in revision_ids:
267
_mod_revision.check_not_reserved_id(revision_id)
269
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
271
if len(revision_ids) > 0:
272
self.set_last_revision(revision_ids[0])
274
self.set_last_revision(_mod_revision.NULL_REVISION)
276
self._set_merges_from_parent_ids(revision_ids[1:])
278
def get_parent_ids(self):
279
"""See Tree.get_parent_ids.
281
This implementation reads the pending merges list and last_revision
282
value and uses that to decide what the parents list should be.
284
last_rev = _mod_revision.ensure_null(self._last_revision())
285
if _mod_revision.NULL_REVISION == last_rev:
290
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
291
except errors.NoSuchFile:
294
for l in osutils.split_lines(merges_bytes):
295
revision_id = l.rstrip(b'\n')
296
parents.append(self.branch.lookup_foreign_revision_id(revision_id))
299
def check_state(self):
300
"""Check that the working state is/isn't valid."""
303
def remove(self, files, verbose=False, to_file=None, keep_files=True,
305
"""Remove nominated files from the working tree metadata.
307
:param files: File paths relative to the basedir.
308
:param keep_files: If true, the files will also be kept.
309
:param force: Delete files and directories, even if they are changed
310
and even if the directories are not empty.
312
if not isinstance(files, list):
318
def backup(file_to_backup):
319
abs_path = self.abspath(file_to_backup)
320
backup_name = self.controldir._available_backup_name(file_to_backup)
321
osutils.rename(abs_path, self.abspath(backup_name))
322
return "removed %s (but kept a copy: %s)" % (
323
file_to_backup, backup_name)
325
# Sort needed to first handle directory content before the directory
330
def recurse_directory_to_add_files(directory):
331
# Recurse directory and add all files
332
# so we can check if they have changed.
333
for parent_info, file_infos in self.walkdirs(directory):
334
for relpath, basename, kind, lstat, fileid, kind in file_infos:
335
# Is it versioned or ignored?
336
if self.is_versioned(relpath):
337
# Add nested content for deletion.
338
all_files.add(relpath)
340
# Files which are not versioned
341
# should be treated as unknown.
342
files_to_backup.append(relpath)
344
with self.lock_tree_write():
345
for filepath in files:
346
# Get file name into canonical form.
347
abspath = self.abspath(filepath)
348
filepath = self.relpath(abspath)
351
all_files.add(filepath)
352
recurse_directory_to_add_files(filepath)
354
files = list(all_files)
357
return # nothing to do
359
# Sort needed to first handle directory content before the directory
360
files.sort(reverse=True)
362
# Bail out if we are going to delete files we shouldn't
363
if not keep_files and not force:
364
for (file_id, path, content_change, versioned, parent_id, name,
365
kind, executable) in self.iter_changes(self.basis_tree(),
366
include_unchanged=True, require_versioned=False,
367
want_unversioned=True, specific_files=files):
368
if versioned[0] == False:
369
# The record is unknown or newly added
370
files_to_backup.append(path[1])
371
files_to_backup.extend(osutils.parent_directories(path[1]))
372
elif (content_change and (kind[1] is not None) and
373
osutils.is_inside_any(files, path[1])):
374
# Versioned and changed, but not deleted, and still
375
# in one of the dirs to be deleted.
376
files_to_backup.append(path[1])
377
files_to_backup.extend(osutils.parent_directories(path[1]))
385
except errors.NoSuchFile:
388
abs_path = self.abspath(f)
390
# having removed it, it must be either ignored or unknown
391
if self.is_ignored(f):
395
kind_ch = osutils.kind_marker(kind)
396
to_file.write(new_status + ' ' + f + kind_ch + '\n')
398
message = "%s does not exist" % (f, )
401
if f in files_to_backup and not force:
404
if kind == 'directory':
405
osutils.rmtree(abs_path)
407
osutils.delete_any(abs_path)
408
message = "deleted %s" % (f,)
410
message = "removed %s" % (f,)
411
self._unversion_path(f)
413
# print only one message (if any) per file.
414
if message is not None:
416
self._versioned_dirs = None
418
def smart_add(self, file_list, recurse=True, action=None, save=True):
422
# expand any symlinks in the directory part, while leaving the
424
# only expanding if symlinks are supported avoids windows path bugs
425
if osutils.has_symlinks():
426
file_list = list(map(osutils.normalizepath, file_list))
428
conflicts_related = set()
429
for c in self.conflicts():
430
conflicts_related.update(c.associated_filenames())
435
def call_action(filepath, kind):
436
if action is not None:
437
parent_path = posixpath.dirname(filepath)
438
parent_id = self.path2id(parent_path)
439
parent_ie = self._get_dir_ie(parent_path, parent_id)
440
file_id = action(self, parent_ie, filepath, kind)
441
if file_id is not None:
442
raise workingtree.SettingFileIdUnsupported()
444
with self.lock_tree_write():
445
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
446
filepath, can_access = osutils.normalized_filename(filepath)
448
raise errors.InvalidNormalization(filepath)
450
abspath = self.abspath(filepath)
451
kind = osutils.file_kind(abspath)
452
if kind in ("file", "symlink"):
453
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
457
call_action(filepath, kind)
459
self._index_add_entry(filepath, kind)
460
added.append(filepath)
461
elif kind == "directory":
462
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
463
if subpath not in index:
464
call_action(filepath, kind)
466
user_dirs.append(filepath)
468
raise errors.BadFileKindError(filename=abspath, kind=kind)
469
for user_dir in user_dirs:
470
abs_user_dir = self.abspath(user_dir)
473
transport = _mod_transport.get_transport_from_path(abs_user_dir)
474
_mod_controldir.ControlDirFormat.find_format(transport)
476
except errors.NotBranchError:
478
except errors.UnsupportedFormatError:
483
trace.warning('skipping nested tree %r', abs_user_dir)
486
for name in os.listdir(abs_user_dir):
487
subp = os.path.join(user_dir, name)
488
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
490
ignore_glob = self.is_ignored(subp)
491
if ignore_glob is not None:
492
ignored.setdefault(ignore_glob, []).append(subp)
494
abspath = self.abspath(subp)
495
kind = osutils.file_kind(abspath)
496
if kind == "directory":
497
user_dirs.append(subp)
499
if subp in self.index:
502
if subp in conflicts_related:
504
call_action(filepath, kind)
506
self._index_add_entry(subp, kind)
508
return added, ignored
510
def has_filename(self, filename):
511
return osutils.lexists(self.abspath(filename))
513
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
516
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
517
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
518
if self.controldir.is_control_filename(dir_relpath.decode(osutils._fs_enc)):
520
for name in list(dirnames):
521
if self.controldir.is_control_filename(name.decode(osutils._fs_enc)):
522
dirnames.remove(name)
524
relpath = os.path.join(dir_relpath, name)
527
yield relpath.decode(osutils._fs_enc)
528
except UnicodeDecodeError:
529
raise errors.BadFilenameEncoding(
530
relpath, osutils._fs_enc)
531
if not self._has_dir(relpath):
532
dirnames.remove(name)
533
for name in filenames:
534
if not self.mapping.is_special_file(name):
535
yp = os.path.join(dir_relpath, name)
537
yield yp.decode(osutils._fs_enc)
538
except UnicodeDecodeError:
539
raise errors.BadFilenameEncoding(
543
"""Yield all unversioned files in this WorkingTree.
545
with self.lock_read():
546
index_paths = set([p.decode('utf-8') for p, i in self._recurse_index_entries()])
547
all_paths = set(self._iter_files_recursive(include_dirs=True))
548
for p in (all_paths - index_paths):
549
if not self._has_dir(p.encode('utf-8')):
552
def _gather_kinds(self, files, kinds):
553
"""See MutableTree._gather_kinds."""
554
with self.lock_tree_write():
555
for pos, f in enumerate(files):
556
if kinds[pos] is None:
557
fullpath = osutils.normpath(self.abspath(f))
559
kind = osutils.file_kind(fullpath)
561
if e.errno == errno.ENOENT:
562
raise errors.NoSuchFile(fullpath)
563
if kind == 'directory' and f != '' and os.path.exists(os.path.join(fullpath, '.git')):
564
kind = 'tree-reference'
568
if self._lock_mode != 'w':
569
raise errors.NotWriteLocked(self)
570
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
571
# already in use and GitFile doesn't allow overriding the lock file name :(
572
f = open(self.control_transport.local_abspath('index'), 'wb')
573
# Note that _flush will close the file
579
write_index_dict(shaf, self.index)
584
self._index_dirty = False
586
def has_or_had_id(self, file_id):
587
if self.has_id(file_id):
589
if self.had_id(file_id):
593
def had_id(self, file_id):
594
path = self._basis_fileid_map.lookup_path(file_id)
596
head = self.repository._git.head()
598
# Assume no if basis is not accessible
601
root_tree = self.store[head].tree
605
tree_lookup_path(self.store.__getitem__, root_tree, path.encode('utf-8'))
611
def get_file_mtime(self, path, file_id=None):
612
"""See Tree.get_file_mtime."""
614
return self._lstat(path).st_mtime
616
if e.errno == errno.ENOENT:
617
raise errors.NoSuchFile(path)
620
def is_ignored(self, filename):
621
r"""Check whether the filename matches an ignore pattern.
623
If the file is ignored, returns the pattern which caused it to
624
be ignored, otherwise None. So this can simply be used as a
625
boolean if desired."""
626
if getattr(self, '_global_ignoreglobster', None) is None:
628
ignore_globs.update(ignores.get_runtime_ignores())
629
ignore_globs.update(ignores.get_user_ignores())
630
self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
631
match = self._global_ignoreglobster.match(filename)
632
if match is not None:
634
encoded_filename = filename.encode('utf-8')
636
if self.kind(filename) == 'directory':
637
encoded_filename += b'/'
638
except errors.NoSuchFile:
640
encoded_filename = encoded_filename.lstrip(b'/')
641
ignore_manager = self._get_ignore_manager()
642
ps = list(ignore_manager.find_matching(filename))
645
if not ps[-1].is_exclude:
649
def _get_ignore_manager(self):
650
ignoremanager = getattr(self, '_ignoremanager', None)
651
if ignoremanager is not None:
654
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
655
self._ignoremanager = ignore_manager
656
return ignore_manager
658
def _flush_ignore_list_cache(self):
659
self._ignoremanager = None
661
def set_last_revision(self, revid):
662
if _mod_revision.is_null(revid):
663
self.branch.set_last_revision_info(0, revid)
665
_mod_revision.check_not_reserved_id(revid)
667
self.branch.generate_revision_history(revid)
668
except errors.NoSuchRevision:
669
raise errors.GhostRevisionUnusableHere(revid)
671
def _reset_data(self):
673
head = self.repository._git.head()
675
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
677
self._basis_fileid_map = self.mapping.get_fileid_map(
678
self.store.__getitem__, self.store[head].tree)
679
self._fileid_map = self._basis_fileid_map.copy()
681
def get_file_verifier(self, path, file_id=None, stat_value=None):
682
with self.lock_read():
683
(index, subpath) = self._lookup_index(path.encode('utf-8'))
685
return ("GIT", index[subpath].sha)
687
if self._has_dir(path):
689
raise errors.NoSuchFile(path)
691
def get_file_sha1(self, path, file_id=None, stat_value=None):
692
with self.lock_read():
693
if not self.is_versioned(path):
694
raise errors.NoSuchFile(path)
695
abspath = self.abspath(path)
697
return osutils.sha_file_by_name(abspath)
699
if e.errno in (errno.EISDIR, errno.ENOENT):
703
def revision_tree(self, revid):
704
return self.repository.revision_tree(revid)
706
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
707
mode = stat_result.st_mode
708
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
710
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
711
return self.basis_tree().is_executable(path)
713
def stored_kind(self, path, file_id=None):
714
with self.lock_read():
715
encoded_path = path.encode('utf-8')
716
(index, subpath) = self._lookup_index(encoded_path)
718
return mode_kind(index[subpath].mode)
720
# Maybe it's a directory?
721
if self._has_dir(encoded_path):
723
raise errors.NoSuchFile(path)
725
def _lstat(self, path):
726
return os.lstat(self.abspath(path))
728
def _live_entry(self, path):
729
return index_entry_from_path(self.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
731
def is_executable(self, path, file_id=None):
732
with self.lock_read():
733
if getattr(self, "_supports_executable", osutils.supports_executable)():
734
mode = self._lstat(path).st_mode
736
(index, subpath) = self._lookup_index(path.encode('utf-8'))
738
mode = index[subpath].mode
741
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
743
def _is_executable_from_path_and_stat(self, path, stat_result):
744
if getattr(self, "_supports_executable", osutils.supports_executable)():
745
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
747
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
749
def list_files(self, include_root=False, from_dir=None, recursive=True):
753
fk_entries = {'directory': tree.TreeDirectory,
754
'file': tree.TreeFile,
755
'symlink': tree.TreeLink,
756
'tree-reference': tree.TreeReference}
757
with self.lock_read():
758
root_ie = self._get_dir_ie(u"", None)
759
if include_root and not from_dir:
760
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
761
dir_ids[u""] = root_ie.file_id
763
path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
765
path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
766
os.listdir(self.abspath(from_dir).encode(osutils._fs_enc))
767
if not self.controldir.is_control_filename(name.decode(osutils._fs_enc))
768
and not self.mapping.is_special_file(name.decode(osutils._fs_enc))])
769
for path in path_iterator:
771
encoded_path = path.encode("utf-8")
772
except UnicodeEncodeError:
773
raise errors.BadFilenameEncoding(
774
path, osutils._fs_enc)
775
(index, index_path) = self._lookup_index(encoded_path)
777
value = index[index_path]
780
kind = self.kind(path)
781
parent, name = posixpath.split(path)
782
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
784
if kind in ('directory', 'tree-reference'):
786
if self._has_dir(encoded_path):
787
ie = self._get_dir_ie(path, self.path2id(path))
790
elif self.is_ignored(path):
792
ie = fk_entries[kind]()
796
ie = fk_entries[kind]()
798
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
800
if value is not None:
801
ie = self._get_file_ie(name, path, value, dir_ids[parent])
802
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
804
ie = fk_entries[kind]()
805
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
807
def all_file_ids(self):
808
with self.lock_read():
809
ids = {u"": self.path2id("")}
810
for path in self.index:
811
if self.mapping.is_special_file(path):
813
path = path.decode("utf-8")
814
parent = posixpath.dirname(path).strip("/")
815
for e in self._add_missing_parent_ids(parent, ids):
817
ids[path] = self.path2id(path)
818
return set(ids.values())
820
def all_versioned_paths(self):
821
with self.lock_read():
823
for path in self.index:
824
if self.mapping.is_special_file(path):
826
path = path.decode("utf-8")
829
path = posixpath.dirname(path).strip("/")
835
def iter_child_entries(self, path, file_id=None):
836
encoded_path = path.encode('utf-8')
837
with self.lock_read():
838
parent_id = self.path2id(path)
840
seen_children = set()
841
for item_path, value in self.index.iteritems():
842
decoded_item_path = item_path.decode('utf-8')
843
if self.mapping.is_special_file(item_path):
845
if not osutils.is_inside(path, decoded_item_path):
848
subpath = posixpath.relpath(decoded_item_path, path)
850
dirname = subpath.split('/', 1)[0]
851
file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
853
(unused_parent, name) = posixpath.split(decoded_item_path)
854
file_ie = self._get_file_ie(
855
name, decoded_item_path, value, parent_id)
857
if not found_any and path != u'':
858
raise errors.NoSuchFile(path)
861
with self.lock_read():
862
conflicts = _mod_conflicts.ConflictList()
863
for item_path, value in self.index.iteritems():
864
if value.flags & FLAG_STAGEMASK:
865
conflicts.append(_mod_conflicts.TextConflict(item_path.decode('utf-8')))
868
def set_conflicts(self, conflicts):
870
for conflict in conflicts:
871
if conflict.typestring in ('text conflict', 'contents conflict'):
872
by_path.add(conflict.path.encode('utf-8'))
874
raise errors.UnsupportedOperation(self.set_conflicts, self)
875
with self.lock_tree_write():
876
for path in self.index:
877
self._set_conflicted(path, path in by_path)
879
def _set_conflicted(self, path, conflicted):
880
trace.mutter('change conflict: %r -> %r', path, conflicted)
881
value = self.index[path]
882
self._index_dirty = True
884
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
886
self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
888
def add_conflicts(self, new_conflicts):
889
with self.lock_tree_write():
890
for conflict in new_conflicts:
891
if conflict.typestring in ('text conflict', 'contents conflict'):
893
self._set_conflicted(conflict.path.encode('utf-8'), True)
895
raise errors.UnsupportedOperation(self.add_conflicts, self)
897
raise errors.UnsupportedOperation(self.add_conflicts, self)
899
def walkdirs(self, prefix=""):
900
"""Walk the directories of this tree.
902
returns a generator which yields items in the form:
903
((curren_directory_path, fileid),
904
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
907
This API returns a generator, which is only valid during the current
908
tree transaction - within a single lock_read or lock_write duration.
910
If the tree is not locked, it may cause an error to be raised,
911
depending on the tree implementation.
913
from bisect import bisect_left
915
disk_top = self.abspath(prefix)
916
if disk_top.endswith('/'):
917
disk_top = disk_top[:-1]
918
top_strip_len = len(disk_top) + 1
919
inventory_iterator = self._walkdirs(prefix)
920
disk_iterator = osutils.walkdirs(disk_top, prefix)
922
current_disk = next(disk_iterator)
923
disk_finished = False
925
if not (e.errno == errno.ENOENT or
926
(sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
931
current_inv = next(inventory_iterator)
933
except StopIteration:
936
while not inv_finished or not disk_finished:
938
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
939
cur_disk_dir_content) = current_disk
941
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
942
cur_disk_dir_content) = ((None, None), None)
943
if not disk_finished:
944
# strip out .bzr dirs
945
if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
946
len(cur_disk_dir_content) > 0):
947
# osutils.walkdirs can be made nicer -
948
# yield the path-from-prefix rather than the pathjoined
950
bzrdir_loc = bisect_left(cur_disk_dir_content,
952
if (bzrdir_loc < len(cur_disk_dir_content)
953
and self.controldir.is_control_filename(
954
cur_disk_dir_content[bzrdir_loc][0])):
955
# we dont yield the contents of, or, .bzr itself.
956
del cur_disk_dir_content[bzrdir_loc]
958
# everything is unknown
961
# everything is missing
964
direction = ((current_inv[0][0] > cur_disk_dir_relpath) -
965
(current_inv[0][0] < cur_disk_dir_relpath))
967
# disk is before inventory - unknown
968
dirblock = [(relpath, basename, kind, stat, None, None) for
969
relpath, basename, kind, stat, top_path in
970
cur_disk_dir_content]
971
yield (cur_disk_dir_relpath, None), dirblock
973
current_disk = next(disk_iterator)
974
except StopIteration:
977
# inventory is before disk - missing.
978
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
979
for relpath, basename, dkind, stat, fileid, kind in
981
yield (current_inv[0][0], current_inv[0][1]), dirblock
983
current_inv = next(inventory_iterator)
984
except StopIteration:
987
# versioned present directory
988
# merge the inventory and disk data together
990
for relpath, subiterator in itertools.groupby(sorted(
991
current_inv[1] + cur_disk_dir_content,
992
key=operator.itemgetter(0)), operator.itemgetter(1)):
993
path_elements = list(subiterator)
994
if len(path_elements) == 2:
995
inv_row, disk_row = path_elements
996
# versioned, present file
997
dirblock.append((inv_row[0],
998
inv_row[1], disk_row[2],
999
disk_row[3], inv_row[4],
1001
elif len(path_elements[0]) == 5:
1003
dirblock.append((path_elements[0][0],
1004
path_elements[0][1], path_elements[0][2],
1005
path_elements[0][3], None, None))
1006
elif len(path_elements[0]) == 6:
1007
# versioned, absent file.
1008
dirblock.append((path_elements[0][0],
1009
path_elements[0][1], 'unknown', None,
1010
path_elements[0][4], path_elements[0][5]))
1012
raise NotImplementedError('unreachable code')
1013
yield current_inv[0], dirblock
1015
current_inv = next(inventory_iterator)
1016
except StopIteration:
1019
current_disk = next(disk_iterator)
1020
except StopIteration:
1021
disk_finished = True
1023
def _walkdirs(self, prefix=u""):
1026
prefix = prefix.encode('utf-8')
1027
per_dir = defaultdict(set)
1029
per_dir[(u'', self.get_root_id())] = set()
1030
def add_entry(path, kind):
1031
if path == b'' or not path.startswith(prefix):
1033
(dirname, child_name) = posixpath.split(path)
1034
add_entry(dirname, 'directory')
1035
dirname = dirname.decode("utf-8")
1036
dir_file_id = self.path2id(dirname)
1037
if not isinstance(value, tuple) or len(value) != 10:
1038
raise ValueError(value)
1039
per_dir[(dirname, dir_file_id)].add(
1040
(path.decode("utf-8"), child_name.decode("utf-8"),
1042
self.path2id(path.decode("utf-8")),
1044
with self.lock_read():
1045
for path, value in self.index.iteritems():
1046
if self.mapping.is_special_file(path):
1048
if not path.startswith(prefix):
1050
add_entry(path, mode_kind(value.mode))
1051
return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
1053
def get_shelf_manager(self):
1054
raise workingtree.ShelvingUnsupported()
1056
def store_uncommitted(self):
1057
raise errors.StoringUncommittedNotSupported(self)
1059
def apply_inventory_delta(self, changes):
1060
for (old_path, new_path, file_id, ie) in changes:
1061
if old_path is not None:
1062
(index, old_subpath) = self._lookup_index(old_path.encode('utf-8'))
1064
self._index_del_entry(index, old_subpath)
1068
self._versioned_dirs = None
1069
if new_path is not None and ie.kind != 'directory':
1070
if ie.kind == 'tree-reference':
1071
self._index_add_entry(
1073
reference_revision=ie.reference_revision)
1075
self._index_add_entry(new_path, ie.kind)
1078
def annotate_iter(self, path, file_id=None,
1079
default_revision=_mod_revision.CURRENT_REVISION):
1080
"""See Tree.annotate_iter
1082
This implementation will use the basis tree implementation if possible.
1083
Lines not in the basis are attributed to CURRENT_REVISION
1085
If there are pending merges, lines added by those merges will be
1086
incorrectly attributed to CURRENT_REVISION (but after committing, the
1087
attribution will be correct).
1089
with self.lock_read():
1090
maybe_file_parent_keys = []
1091
for parent_id in self.get_parent_ids():
1093
parent_tree = self.revision_tree(parent_id)
1094
except errors.NoSuchRevisionInTree:
1095
parent_tree = self.branch.repository.revision_tree(
1097
with parent_tree.lock_read():
1098
# TODO(jelmer): Use rename/copy tracker to find path name in parent
1101
kind = parent_tree.kind(parent_path)
1102
except errors.NoSuchFile:
1105
# Note: this is slightly unnecessary, because symlinks and
1106
# directories have a "text" which is the empty text, and we
1107
# know that won't mess up annotations. But it seems cleaner
1111
parent_tree.get_file_revision(parent_path))
1112
if parent_text_key not in maybe_file_parent_keys:
1113
maybe_file_parent_keys.append(parent_text_key)
1114
# Now we have the parents of this content
1115
from breezy.annotate import Annotator
1116
from .annotate import AnnotateProvider
1117
annotate_provider = AnnotateProvider(
1118
self.branch.repository._file_change_scanner)
1119
annotator = Annotator(annotate_provider)
1121
from breezy.graph import Graph
1122
graph = Graph(annotate_provider)
1123
heads = graph.heads(maybe_file_parent_keys)
1124
file_parent_keys = []
1125
for key in maybe_file_parent_keys:
1127
file_parent_keys.append(key)
1129
text = self.get_file_text(path)
1130
this_key = (path, default_revision)
1131
annotator.add_special_text(this_key, file_parent_keys, text)
1132
annotations = [(key[-1], line)
1133
for key, line in annotator.annotate_flat(this_key)]
1136
def _rename_one(self, from_rel, to_rel):
1137
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1139
def _build_checkout_with_index(self):
1140
build_index_from_tree(
1141
self.user_transport.local_abspath('.'),
1142
self.control_transport.local_abspath("index"),
1144
None if self.branch.head is None else self.store[self.branch.head].tree)
1146
def reset_state(self, revision_ids=None):
1147
"""Reset the state of the working tree.
1149
This does a hard-reset to a last-known-good state. This is a way to
1150
fix if something got corrupted (like the .git/index file)
1152
with self.lock_tree_write():
1153
if revision_ids is not None:
1154
self.set_parent_ids(revision_ids)
1156
self._index_dirty = True
1157
if self.branch.head is not None:
1158
for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
1159
if not validate_path(entry.path):
1162
if S_ISGITLINK(entry.mode):
1163
pass # TODO(jelmer): record and return submodule paths
1165
# Let's at least try to use the working tree file:
1167
st = self._lstat(self.abspath(entry.path.decode('utf-8')))
1169
# But if it doesn't exist, we'll make something up.
1170
obj = self.store[entry.sha]
1171
st = os.stat_result((entry.mode, 0, 0, 0,
1172
0, 0, len(obj.as_raw_string()), 0,
1174
(index, subpath) = self._lookup_index(entry.path)
1175
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1177
def pull(self, source, overwrite=False, stop_revision=None,
1178
change_reporter=None, possible_transports=None, local=False,
1180
with self.lock_write(), source.lock_read():
1181
old_revision = self.branch.last_revision()
1182
basis_tree = self.basis_tree()
1183
count = self.branch.pull(source, overwrite, stop_revision,
1184
possible_transports=possible_transports,
1186
new_revision = self.branch.last_revision()
1187
if new_revision != old_revision:
1188
with basis_tree.lock_read():
1189
new_basis_tree = self.branch.basis_tree()
1195
change_reporter=change_reporter,
1196
show_base=show_base)
1199
def add_reference(self, sub_tree):
1200
"""Add a TreeReference to the tree, pointing at sub_tree.
1202
:param sub_tree: subtree to add.
1204
with self.lock_tree_write():
1206
sub_tree_path = self.relpath(sub_tree.basedir)
1207
except errors.PathNotChild:
1208
raise BadReferenceTarget(
1209
self, sub_tree, 'Target not inside tree.')
1211
self._add([sub_tree_path], [None], ['tree-reference'])
1213
def _read_submodule_head(self, path):
1214
return read_submodule_head(self.abspath(path))
1216
def get_reference_revision(self, path, file_id=None):
1217
hexsha = self._read_submodule_head(path)
1219
return _mod_revision.NULL_REVISION
1220
return self.branch.lookup_foreign_revision_id(hexsha)
1222
def get_nested_tree(self, path, file_id=None):
1223
return workingtree.WorkingTree.open(self.abspath(path))
1225
def _directory_is_tree_reference(self, relpath):
1226
# as a special case, if a directory contains control files then
1227
# it's a tree reference, except that the root of the tree is not
1228
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1230
def extract(self, sub_path, file_id=None, format=None):
1231
"""Extract a subtree from this tree.
1233
A new branch will be created, relative to the path for this tree.
1236
segments = osutils.splitpath(path)
1237
transport = self.branch.controldir.root_transport
1238
for name in segments:
1239
transport = transport.clone(name)
1240
transport.ensure_base()
1243
with self.lock_tree_write():
1245
branch_transport = mkdirs(sub_path)
1247
format = self.controldir.cloning_metadir()
1248
branch_transport.ensure_base()
1249
branch_bzrdir = format.initialize_on_transport(branch_transport)
1251
repo = branch_bzrdir.find_repository()
1252
except errors.NoRepositoryPresent:
1253
repo = branch_bzrdir.create_repository()
1254
if not repo.supports_rich_root():
1255
raise errors.RootNotRich()
1256
new_branch = branch_bzrdir.create_branch()
1257
new_branch.pull(self.branch)
1258
for parent_id in self.get_parent_ids():
1259
new_branch.fetch(self.branch, parent_id)
1260
tree_transport = self.controldir.root_transport.clone(sub_path)
1261
if tree_transport.base != branch_transport.base:
1262
tree_bzrdir = format.initialize_on_transport(tree_transport)
1263
tree_bzrdir.set_branch_reference(new_branch)
1265
tree_bzrdir = branch_bzrdir
1266
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1267
wt.set_parent_ids(self.get_parent_ids())
1270
def _get_check_refs(self):
1271
"""Return the references needed to perform a check of this tree.
1273
The default implementation returns no refs, and is only suitable for
1274
trees that have no local caching and can commit on ghosts at any time.
1276
:seealso: breezy.check for details about check_refs.
1280
def copy_content_into(self, tree, revision_id=None):
1281
"""Copy the current content and user files of this tree into tree."""
1282
with self.lock_read():
1283
if revision_id is None:
1284
merge.transform_tree(tree, self)
1286
# TODO now merge from tree.last_revision to revision (to
1287
# preserve user local changes)
1289
other_tree = self.revision_tree(revision_id)
1290
except errors.NoSuchRevision:
1291
other_tree = self.branch.repository.revision_tree(
1294
merge.transform_tree(tree, other_tree)
1295
if revision_id == _mod_revision.NULL_REVISION:
1298
new_parents = [revision_id]
1299
tree.set_parent_ids(new_parents)
1302
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1304
_tree_class = GitWorkingTree
1306
supports_versioned_directories = False
1308
supports_setting_file_ids = False
1310
supports_store_uncommitted = False
1312
supports_leftmost_parent_id_as_ghost = False
1314
supports_righthand_parent_id_as_ghost = False
1316
requires_normalized_unicode_filenames = True
1318
supports_merge_modified = False
1321
def _matchingcontroldir(self):
1322
from .dir import LocalGitControlDirFormat
1323
return LocalGitControlDirFormat()
1325
def get_format_description(self):
1326
return "Git Working Tree"
1328
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1329
accelerator_tree=None, hardlink=False):
1330
"""See WorkingTreeFormat.initialize()."""
1331
if not isinstance(a_controldir, LocalGitDir):
1332
raise errors.IncompatibleFormat(self, a_controldir)
1333
branch = a_controldir.open_branch(nascent_ok=True)
1334
if revision_id is not None:
1335
branch.set_last_revision(revision_id)
1336
wt = GitWorkingTree(
1337
a_controldir, a_controldir.open_repository(), branch)
1338
for hook in MutableTree.hooks['post_build_tree']: