1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from collections import defaultdict
25
from dulwich.ignore import (
28
from dulwich.file import GitFile, FileLocked
29
from dulwich.index import (
32
build_index_from_tree,
36
index_entry_from_path,
37
index_entry_from_stat,
39
blob_from_path_and_stat,
45
from dulwich.object_store import (
48
from dulwich.objects import (
55
from dulwich.repo import (
66
conflicts as _mod_conflicts,
68
controldir as _mod_controldir,
74
revision as _mod_revision,
76
transport as _mod_transport,
80
from ...decorators import (
86
from ...mutabletree import (
96
changes_from_git_changes,
97
tree_delta_from_git_changes,
101
from .mapping import (
106
IGNORE_FILENAME = ".gitignore"
109
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
110
"""A Git working tree."""
112
def __init__(self, controldir, repo, branch):
113
MutableGitIndexTree.__init__(self)
114
basedir = controldir.root_transport.local_abspath('.')
115
self.basedir = osutils.realpath(basedir)
116
self.controldir = controldir
117
self.repository = repo
118
self.store = self.repository._git.object_store
119
self.mapping = self.repository.get_mapping()
120
self._branch = branch
121
self._transport = controldir.transport
122
self._format = GitWorkingTreeFormat()
124
self._index_file = None
125
self.views = self._make_views()
126
self._rules_searcher = None
127
self._detect_case_handling()
130
def supports_tree_reference(self):
133
def supports_rename_tracking(self):
136
def _read_index(self):
137
self.index = Index(self.control_transport.local_abspath('index'))
138
self._index_dirty = False
141
"""Lock the repository for read operations.
143
:return: A breezy.lock.LogicalLockResult.
145
if not self._lock_mode:
146
self._lock_mode = 'r'
150
self._lock_count += 1
151
self.branch.lock_read()
152
return lock.LogicalLockResult(self.unlock)
154
def _lock_write_tree(self):
155
# TODO(jelmer): Actually create index.lock
156
if not self._lock_mode:
157
self._lock_mode = 'w'
160
self._index_file = GitFile(self.control_transport.local_abspath('index'), 'wb')
162
raise errors.LockContention('index')
164
elif self._lock_mode == 'r':
165
raise errors.ReadOnlyError(self)
169
def lock_tree_write(self):
170
self.branch.lock_read()
172
self._lock_write_tree()
173
return lock.LogicalLockResult(self.unlock)
178
def lock_write(self, token=None):
179
self.branch.lock_write()
181
self._lock_write_tree()
182
return lock.LogicalLockResult(self.unlock)
188
return self._lock_count >= 1
190
def get_physical_lock_status(self):
193
def break_lock(self):
195
self.control_transport.delete('index.lock')
196
except errors.NoSuchFile:
198
self.branch.break_lock()
200
@only_raises(errors.LockNotHeld, errors.LockBroken)
202
if not self._lock_count:
203
return lock.cant_unlock_not_held(self)
206
self._lock_count -= 1
207
if self._lock_count > 0:
209
if self._index_file is not None:
210
if self._index_dirty:
211
self._flush(self._index_file)
212
self._index_file.close()
214
# Somebody else already wrote the index file
215
# by calling .flush()
216
self._index_file.abort()
217
self._index_file = None
218
self._lock_mode = None
226
def _detect_case_handling(self):
228
self._transport.stat(".git/cOnFiG")
229
except errors.NoSuchFile:
230
self.case_sensitive = True
232
self.case_sensitive = False
234
def merge_modified(self):
237
def set_merge_modified(self, modified_hashes):
238
raise errors.UnsupportedOperation(self.set_merge_modified, self)
240
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
241
self.set_parent_ids([p for p, t in parents_list])
243
def _set_merges_from_parent_ids(self, rhs_parent_ids):
245
merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
246
except errors.NoSuchRevision as e:
247
raise errors.GhostRevisionUnusableHere(e.revision)
249
self.control_transport.put_bytes('MERGE_HEAD', '\n'.join(merges),
250
mode=self.controldir._get_file_mode())
253
self.control_transport.delete('MERGE_HEAD')
254
except errors.NoSuchFile:
257
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
258
"""Set the parent ids to revision_ids.
260
See also set_parent_trees. This api will try to retrieve the tree data
261
for each element of revision_ids from the trees repository. If you have
262
tree data already available, it is more efficient to use
263
set_parent_trees rather than set_parent_ids. set_parent_ids is however
264
an easier API to use.
266
:param revision_ids: The revision_ids to set as the parent ids of this
267
working tree. Any of these may be ghosts.
269
with self.lock_tree_write():
270
self._check_parents_for_ghosts(revision_ids,
271
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
272
for revision_id in revision_ids:
273
_mod_revision.check_not_reserved_id(revision_id)
275
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
277
if len(revision_ids) > 0:
278
self.set_last_revision(revision_ids[0])
280
self.set_last_revision(_mod_revision.NULL_REVISION)
282
self._set_merges_from_parent_ids(revision_ids[1:])
284
def get_parent_ids(self):
285
"""See Tree.get_parent_ids.
287
This implementation reads the pending merges list and last_revision
288
value and uses that to decide what the parents list should be.
290
last_rev = _mod_revision.ensure_null(self._last_revision())
291
if _mod_revision.NULL_REVISION == last_rev:
296
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
297
except errors.NoSuchFile:
300
for l in osutils.split_lines(merges_bytes):
301
revision_id = l.rstrip('\n')
302
parents.append(self.branch.lookup_foreign_revision_id(revision_id))
305
def check_state(self):
306
"""Check that the working state is/isn't valid."""
309
def remove(self, files, verbose=False, to_file=None, keep_files=True,
311
"""Remove nominated files from the working tree metadata.
313
:param files: File paths relative to the basedir.
314
:param keep_files: If true, the files will also be kept.
315
:param force: Delete files and directories, even if they are changed
316
and even if the directories are not empty.
318
if isinstance(files, basestring):
324
def backup(file_to_backup):
325
abs_path = self.abspath(file_to_backup)
326
backup_name = self.controldir._available_backup_name(file_to_backup)
327
osutils.rename(abs_path, self.abspath(backup_name))
328
return "removed %s (but kept a copy: %s)" % (
329
file_to_backup, backup_name)
331
# Sort needed to first handle directory content before the directory
336
def recurse_directory_to_add_files(directory):
337
# Recurse directory and add all files
338
# so we can check if they have changed.
339
for parent_info, file_infos in self.walkdirs(directory):
340
for relpath, basename, kind, lstat, fileid, kind in file_infos:
341
# Is it versioned or ignored?
342
if self.is_versioned(relpath):
343
# Add nested content for deletion.
344
all_files.add(relpath)
346
# Files which are not versioned
347
# should be treated as unknown.
348
files_to_backup.append(relpath)
350
with self.lock_tree_write():
351
for filepath in files:
352
# Get file name into canonical form.
353
abspath = self.abspath(filepath)
354
filepath = self.relpath(abspath)
357
all_files.add(filepath)
358
recurse_directory_to_add_files(filepath)
360
files = list(all_files)
363
return # nothing to do
365
# Sort needed to first handle directory content before the directory
366
files.sort(reverse=True)
368
# Bail out if we are going to delete files we shouldn't
369
if not keep_files and not force:
370
for (file_id, path, content_change, versioned, parent_id, name,
371
kind, executable) in self.iter_changes(self.basis_tree(),
372
include_unchanged=True, require_versioned=False,
373
want_unversioned=True, specific_files=files):
374
if versioned[0] == False:
375
# The record is unknown or newly added
376
files_to_backup.append(path[1])
377
files_to_backup.extend(osutils.parent_directories(path[1]))
378
elif (content_change and (kind[1] is not None) and
379
osutils.is_inside_any(files, path[1])):
380
# Versioned and changed, but not deleted, and still
381
# in one of the dirs to be deleted.
382
files_to_backup.append(path[1])
383
files_to_backup.extend(osutils.parent_directories(path[1]))
391
except errors.NoSuchFile:
394
abs_path = self.abspath(f)
396
# having removed it, it must be either ignored or unknown
397
if self.is_ignored(f):
401
kind_ch = osutils.kind_marker(kind)
402
to_file.write(new_status + ' ' + f + kind_ch + '\n')
404
message = "%s does not exist" % (f, )
407
if f in files_to_backup and not force:
410
if kind == 'directory':
411
osutils.rmtree(abs_path)
413
osutils.delete_any(abs_path)
414
message = "deleted %s" % (f,)
416
message = "removed %s" % (f,)
417
self._unversion_path(f)
419
# print only one message (if any) per file.
420
if message is not None:
422
self._versioned_dirs = None
424
def smart_add(self, file_list, recurse=True, action=None, save=True):
428
# expand any symlinks in the directory part, while leaving the
430
# only expanding if symlinks are supported avoids windows path bugs
431
if osutils.has_symlinks():
432
file_list = list(map(osutils.normalizepath, file_list))
434
conflicts_related = set()
435
for c in self.conflicts():
436
conflicts_related.update(c.associated_filenames())
441
def call_action(filepath, kind):
442
if action is not None:
443
parent_path = posixpath.dirname(filepath)
444
parent_id = self.path2id(parent_path)
445
parent_ie = self._get_dir_ie(parent_path, parent_id)
446
file_id = action(self, parent_ie, filepath, kind)
447
if file_id is not None:
448
raise workingtree.SettingFileIdUnsupported()
450
with self.lock_tree_write():
451
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
452
filepath, can_access = osutils.normalized_filename(filepath)
454
raise errors.InvalidNormalization(filepath)
456
abspath = self.abspath(filepath)
457
kind = osutils.file_kind(abspath)
458
if kind in ("file", "symlink"):
459
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
463
call_action(filepath, kind)
465
self._index_add_entry(filepath, kind)
466
added.append(filepath)
467
elif kind == "directory":
468
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
469
if subpath not in index:
470
call_action(filepath, kind)
472
user_dirs.append(filepath)
474
raise errors.BadFileKindError(filename=abspath, kind=kind)
475
for user_dir in user_dirs:
476
abs_user_dir = self.abspath(user_dir)
479
transport = _mod_transport.get_transport_from_path(abs_user_dir)
480
_mod_controldir.ControlDirFormat.find_format(transport)
482
except errors.NotBranchError:
484
except errors.UnsupportedFormatError:
489
trace.warning('skipping nested tree %r', abs_user_dir)
492
for name in os.listdir(abs_user_dir):
493
subp = os.path.join(user_dir, name)
494
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
496
ignore_glob = self.is_ignored(subp)
497
if ignore_glob is not None:
498
ignored.setdefault(ignore_glob, []).append(subp)
500
abspath = self.abspath(subp)
501
kind = osutils.file_kind(abspath)
502
if kind == "directory":
503
user_dirs.append(subp)
505
if subp in self.index:
508
if subp in conflicts_related:
510
call_action(filepath, kind)
512
self._index_add_entry(subp, kind)
514
return added, ignored
516
def has_filename(self, filename):
517
return osutils.lexists(self.abspath(filename))
519
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
522
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
523
dir_relpath = dirpath[len(self.basedir):].strip("/")
524
if self.controldir.is_control_filename(dir_relpath):
526
for name in list(dirnames):
527
if self.controldir.is_control_filename(name):
528
dirnames.remove(name)
530
relpath = os.path.join(dir_relpath, name)
533
yield relpath.decode(osutils._fs_enc)
534
except UnicodeDecodeError:
535
raise errors.BadFilenameEncoding(
536
relpath, osutils._fs_enc)
537
if not self._has_dir(relpath):
538
dirnames.remove(name)
539
for name in filenames:
540
if not self.mapping.is_special_file(name):
541
yp = os.path.join(dir_relpath, name)
543
yield yp.decode(osutils._fs_enc)
544
except UnicodeDecodeError:
545
raise errors.BadFilenameEncoding(
549
"""Yield all unversioned files in this WorkingTree.
551
with self.lock_read():
552
index_paths = set([p.decode('utf-8') for p, i in self._recurse_index_entries()])
553
all_paths = set(self._iter_files_recursive(include_dirs=True))
554
for p in (all_paths - index_paths):
555
if not self._has_dir(p):
558
def _gather_kinds(self, files, kinds):
559
"""See MutableTree._gather_kinds."""
560
with self.lock_tree_write():
561
for pos, f in enumerate(files):
562
if kinds[pos] is None:
563
fullpath = osutils.normpath(self.abspath(f))
565
kind = osutils.file_kind(fullpath)
567
if e.errno == errno.ENOENT:
568
raise errors.NoSuchFile(fullpath)
569
if kind == 'directory' and f != '' and os.path.exists(os.path.join(fullpath, '.git')):
570
kind = 'tree-reference'
574
if self._lock_mode != 'w':
575
raise errors.NotWriteLocked(self)
576
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
577
# already in use and GitFile doesn't allow overriding the lock file name :(
578
f = open(self.control_transport.local_abspath('index'), 'wb')
579
# Note that _flush will close the file
585
write_index_dict(shaf, self.index)
590
self._index_dirty = False
592
def has_or_had_id(self, file_id):
593
if self.has_id(file_id):
595
if self.had_id(file_id):
599
def had_id(self, file_id):
600
path = self._basis_fileid_map.lookup_file_id(file_id)
602
head = self.repository._git.head()
604
# Assume no if basis is not accessible
607
root_tree = self.store[head].tree
611
tree_lookup_path(self.store.__getitem__, root_tree, path)
617
def get_file_mtime(self, path, file_id=None):
618
"""See Tree.get_file_mtime."""
620
return self._lstat(path).st_mtime
623
if num == errno.ENOENT:
624
raise errors.NoSuchFile(path)
627
def is_ignored(self, filename):
628
r"""Check whether the filename matches an ignore pattern.
630
If the file is ignored, returns the pattern which caused it to
631
be ignored, otherwise None. So this can simply be used as a
632
boolean if desired."""
633
if getattr(self, '_global_ignoreglobster', None) is None:
635
ignore_globs.update(ignores.get_runtime_ignores())
636
ignore_globs.update(ignores.get_user_ignores())
637
self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
638
match = self._global_ignoreglobster.match(filename)
639
if match is not None:
642
if self.kind(filename) == 'directory':
644
except errors.NoSuchFile:
646
filename = filename.lstrip(b'/')
647
ignore_manager = self._get_ignore_manager()
648
ps = list(ignore_manager.find_matching(filename))
651
if not ps[-1].is_exclude:
655
def _get_ignore_manager(self):
656
ignoremanager = getattr(self, '_ignoremanager', None)
657
if ignoremanager is not None:
660
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
661
self._ignoremanager = ignore_manager
662
return ignore_manager
664
def _flush_ignore_list_cache(self):
665
self._ignoremanager = None
667
def set_last_revision(self, revid):
668
if _mod_revision.is_null(revid):
669
self.branch.set_last_revision_info(0, revid)
671
_mod_revision.check_not_reserved_id(revid)
673
self.branch.generate_revision_history(revid)
674
except errors.NoSuchRevision:
675
raise errors.GhostRevisionUnusableHere(revid)
677
def _reset_data(self):
679
head = self.repository._git.head()
681
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
683
self._basis_fileid_map = self.mapping.get_fileid_map(
684
self.store.__getitem__, self.store[head].tree)
685
self._fileid_map = self._basis_fileid_map.copy()
687
def get_file_verifier(self, path, file_id=None, stat_value=None):
688
with self.lock_read():
689
(index, subpath) = self._lookup_index(path.encode('utf-8'))
691
return ("GIT", index[subpath].sha)
693
if self._has_dir(path):
695
raise errors.NoSuchFile(path)
697
def get_file_sha1(self, path, file_id=None, stat_value=None):
698
with self.lock_read():
699
if not self.is_versioned(path):
700
raise errors.NoSuchFile(path)
701
abspath = self.abspath(path)
703
return osutils.sha_file_by_name(abspath)
706
if num in (errno.EISDIR, errno.ENOENT):
710
def revision_tree(self, revid):
711
return self.repository.revision_tree(revid)
713
def filter_unversioned_files(self, files):
714
return set([p for p in files if not self.is_versioned(p)])
716
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
717
mode = stat_result.st_mode
718
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
720
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
721
return self.basis_tree().is_executable(path)
723
def stored_kind(self, path, file_id=None):
724
with self.lock_read():
725
(index, subpath) = self._lookup_index(path.encode('utf-8'))
727
return mode_kind(index[subpath].mode)
729
# Maybe it's a directory?
730
if self._has_dir(path):
732
raise errors.NoSuchFile(path)
734
def _lstat(self, path):
735
return os.lstat(self.abspath(path))
737
def is_executable(self, path, file_id=None):
738
with self.lock_read():
739
if getattr(self, "_supports_executable", osutils.supports_executable)():
740
mode = self._lstat(path).st_mode
742
(index, subpath) = self._lookup_index(path.encode('utf-8'))
744
mode = index[subpath].mode
747
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
749
def _is_executable_from_path_and_stat(self, path, stat_result):
750
if getattr(self, "_supports_executable", osutils.supports_executable)():
751
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
753
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
755
def list_files(self, include_root=False, from_dir=None, recursive=True):
759
fk_entries = {'directory': tree.TreeDirectory,
760
'file': tree.TreeFile,
761
'symlink': tree.TreeLink,
762
'tree-reference': tree.TreeReference}
763
with self.lock_read():
764
root_ie = self._get_dir_ie(u"", None)
765
if include_root and not from_dir:
766
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
767
dir_ids[u""] = root_ie.file_id
769
path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
771
path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
772
os.listdir(self.abspath(from_dir).encode(osutils._fs_enc)) if not self.controldir.is_control_filename(name)
773
and not self.mapping.is_special_file(name)])
774
for path in path_iterator:
776
encoded_path = path.encode("utf-8")
777
except UnicodeEncodeError:
778
raise errors.BadFilenameEncoding(
779
path, osutils._fs_enc)
780
(index, index_path) = self._lookup_index(encoded_path)
782
value = index[index_path]
785
kind = self.kind(path)
786
parent, name = posixpath.split(path)
787
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
789
if kind in ('directory', 'tree-reference'):
791
if self._has_dir(path):
792
ie = self._get_dir_ie(path, self.path2id(path))
795
elif self.is_ignored(path):
797
ie = fk_entries[kind]()
801
ie = fk_entries[kind]()
803
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
805
if value is not None:
806
ie = self._get_file_ie(name, path, value, dir_ids[parent])
807
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
809
ie = fk_entries[kind]()
810
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
812
def all_file_ids(self):
813
with self.lock_read():
814
ids = {u"": self.path2id("")}
815
for path in self.index:
816
if self.mapping.is_special_file(path):
818
path = path.decode("utf-8")
819
parent = posixpath.dirname(path).strip("/")
820
for e in self._add_missing_parent_ids(parent, ids):
822
ids[path] = self.path2id(path)
823
return set(ids.values())
825
def all_versioned_paths(self):
826
with self.lock_read():
828
for path in self.index:
829
if self.mapping.is_special_file(path):
831
path = path.decode("utf-8")
834
path = posixpath.dirname(path).strip("/")
840
def iter_child_entries(self, path, file_id=None):
841
encoded_path = path.encode('utf-8')
842
with self.lock_read():
843
parent_id = self.path2id(path)
845
seen_children = set()
846
for item_path, value in self.index.iteritems():
847
if self.mapping.is_special_file(item_path):
849
if not osutils.is_inside(encoded_path, item_path):
852
subpath = posixpath.relpath(item_path, encoded_path)
854
dirname = subpath.split(b'/', 1)[0]
855
file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
857
(parent, name) = posixpath.split(item_path)
858
file_ie = self._get_file_ie(
859
name.decode('utf-8'),
860
item_path.decode('utf-8'), value, parent_id)
862
if not found_any and path != u'':
863
raise errors.NoSuchFile(path)
866
with self.lock_read():
867
conflicts = _mod_conflicts.ConflictList()
868
for item_path, value in self.index.iteritems():
869
if value.flags & FLAG_STAGEMASK:
870
conflicts.append(_mod_conflicts.TextConflict(item_path.decode('utf-8')))
873
def set_conflicts(self, conflicts):
875
for conflict in conflicts:
876
if conflict.typestring in ('text conflict', 'contents conflict'):
877
by_path.add(conflict.path.encode('utf-8'))
879
raise errors.UnsupportedOperation(self.set_conflicts, self)
880
with self.lock_tree_write():
881
for path in self.index:
882
self._set_conflicted(path, path in by_path)
884
def _set_conflicted(self, path, conflicted):
885
trace.mutter('change conflict: %r -> %r', path, conflicted)
886
value = self.index[path]
887
self._index_dirty = True
889
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
891
self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
893
def add_conflicts(self, new_conflicts):
894
with self.lock_tree_write():
895
for conflict in new_conflicts:
896
if conflict.typestring in ('text conflict', 'contents conflict'):
898
self._set_conflicted(conflict.path.encode('utf-8'), True)
900
raise errors.UnsupportedOperation(self.add_conflicts, self)
902
raise errors.UnsupportedOperation(self.add_conflicts, self)
904
def walkdirs(self, prefix=""):
905
"""Walk the directories of this tree.
907
returns a generator which yields items in the form:
908
((curren_directory_path, fileid),
909
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
912
This API returns a generator, which is only valid during the current
913
tree transaction - within a single lock_read or lock_write duration.
915
If the tree is not locked, it may cause an error to be raised,
916
depending on the tree implementation.
918
from bisect import bisect_left
920
disk_top = self.abspath(prefix)
921
if disk_top.endswith('/'):
922
disk_top = disk_top[:-1]
923
top_strip_len = len(disk_top) + 1
924
inventory_iterator = self._walkdirs(prefix)
925
disk_iterator = osutils.walkdirs(disk_top, prefix)
927
current_disk = next(disk_iterator)
928
disk_finished = False
930
if not (e.errno == errno.ENOENT or
931
(sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
936
current_inv = next(inventory_iterator)
938
except StopIteration:
941
while not inv_finished or not disk_finished:
943
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
944
cur_disk_dir_content) = current_disk
946
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
947
cur_disk_dir_content) = ((None, None), None)
948
if not disk_finished:
949
# strip out .bzr dirs
950
if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
951
len(cur_disk_dir_content) > 0):
952
# osutils.walkdirs can be made nicer -
953
# yield the path-from-prefix rather than the pathjoined
955
bzrdir_loc = bisect_left(cur_disk_dir_content,
957
if (bzrdir_loc < len(cur_disk_dir_content)
958
and self.controldir.is_control_filename(
959
cur_disk_dir_content[bzrdir_loc][0])):
960
# we dont yield the contents of, or, .bzr itself.
961
del cur_disk_dir_content[bzrdir_loc]
963
# everything is unknown
966
# everything is missing
969
direction = cmp(current_inv[0][0], cur_disk_dir_relpath)
971
# disk is before inventory - unknown
972
dirblock = [(relpath, basename, kind, stat, None, None) for
973
relpath, basename, kind, stat, top_path in
974
cur_disk_dir_content]
975
yield (cur_disk_dir_relpath, None), dirblock
977
current_disk = next(disk_iterator)
978
except StopIteration:
981
# inventory is before disk - missing.
982
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
983
for relpath, basename, dkind, stat, fileid, kind in
985
yield (current_inv[0][0], current_inv[0][1]), dirblock
987
current_inv = next(inventory_iterator)
988
except StopIteration:
991
# versioned present directory
992
# merge the inventory and disk data together
994
for relpath, subiterator in itertools.groupby(sorted(
995
current_inv[1] + cur_disk_dir_content,
996
key=operator.itemgetter(0)), operator.itemgetter(1)):
997
path_elements = list(subiterator)
998
if len(path_elements) == 2:
999
inv_row, disk_row = path_elements
1000
# versioned, present file
1001
dirblock.append((inv_row[0],
1002
inv_row[1], disk_row[2],
1003
disk_row[3], inv_row[4],
1005
elif len(path_elements[0]) == 5:
1007
dirblock.append((path_elements[0][0],
1008
path_elements[0][1], path_elements[0][2],
1009
path_elements[0][3], None, None))
1010
elif len(path_elements[0]) == 6:
1011
# versioned, absent file.
1012
dirblock.append((path_elements[0][0],
1013
path_elements[0][1], 'unknown', None,
1014
path_elements[0][4], path_elements[0][5]))
1016
raise NotImplementedError('unreachable code')
1017
yield current_inv[0], dirblock
1019
current_inv = next(inventory_iterator)
1020
except StopIteration:
1023
current_disk = next(disk_iterator)
1024
except StopIteration:
1025
disk_finished = True
1027
def _walkdirs(self, prefix=""):
1030
prefix = prefix.encode('utf-8')
1031
per_dir = defaultdict(set)
1033
per_dir[('', self.get_root_id())] = set()
1034
def add_entry(path, kind):
1035
if path == '' or not path.startswith(prefix):
1037
(dirname, child_name) = posixpath.split(path)
1038
add_entry(dirname, 'directory')
1039
dirname = dirname.decode("utf-8")
1040
dir_file_id = self.path2id(dirname)
1041
if not isinstance(value, tuple) or len(value) != 10:
1042
raise ValueError(value)
1043
per_dir[(dirname, dir_file_id)].add(
1044
(path.decode("utf-8"), child_name.decode("utf-8"),
1046
self.path2id(path.decode("utf-8")),
1048
with self.lock_read():
1049
for path, value in self.index.iteritems():
1050
if self.mapping.is_special_file(path):
1052
if not path.startswith(prefix):
1054
add_entry(path, mode_kind(value.mode))
1055
return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
1057
def get_shelf_manager(self):
1058
raise workingtree.ShelvingUnsupported()
1060
def store_uncommitted(self):
1061
raise errors.StoringUncommittedNotSupported(self)
1063
def apply_inventory_delta(self, changes):
1064
for (old_path, new_path, file_id, ie) in changes:
1065
if old_path is not None:
1066
(index, old_subpath) = self._lookup_index(old_path.encode('utf-8'))
1068
self._index_del_entry(index, old_subpath)
1072
self._versioned_dirs = None
1073
if new_path is not None and ie.kind != 'directory':
1074
if ie.kind == 'tree-reference':
1075
self._index_add_entry(
1077
reference_revision=ie.reference_revision)
1079
self._index_add_entry(new_path, ie.kind)
1082
def annotate_iter(self, path, file_id=None,
1083
default_revision=_mod_revision.CURRENT_REVISION):
1084
"""See Tree.annotate_iter
1086
This implementation will use the basis tree implementation if possible.
1087
Lines not in the basis are attributed to CURRENT_REVISION
1089
If there are pending merges, lines added by those merges will be
1090
incorrectly attributed to CURRENT_REVISION (but after committing, the
1091
attribution will be correct).
1093
with self.lock_read():
1094
maybe_file_parent_keys = []
1095
for parent_id in self.get_parent_ids():
1097
parent_tree = self.revision_tree(parent_id)
1098
except errors.NoSuchRevisionInTree:
1099
parent_tree = self.branch.repository.revision_tree(
1101
with parent_tree.lock_read():
1102
# TODO(jelmer): Use rename/copy tracker to find path name in parent
1105
kind = parent_tree.kind(parent_path)
1106
except errors.NoSuchFile:
1109
# Note: this is slightly unnecessary, because symlinks and
1110
# directories have a "text" which is the empty text, and we
1111
# know that won't mess up annotations. But it seems cleaner
1115
parent_tree.get_file_revision(parent_path))
1116
if parent_text_key not in maybe_file_parent_keys:
1117
maybe_file_parent_keys.append(parent_text_key)
1118
graph = self.branch.repository.get_file_graph()
1119
heads = graph.heads(maybe_file_parent_keys)
1120
file_parent_keys = []
1121
for key in maybe_file_parent_keys:
1123
file_parent_keys.append(key)
1125
# Now we have the parents of this content
1126
from breezy.annotate import Annotator
1127
from .annotate import AnnotateProvider
1128
annotator = Annotator(AnnotateProvider(
1129
self.branch.repository._file_change_scanner))
1130
text = self.get_file_text(path)
1131
this_key = (path, default_revision)
1132
annotator.add_special_text(this_key, file_parent_keys, text)
1133
annotations = [(key[-1], line)
1134
for key, line in annotator.annotate_flat(this_key)]
1137
def _rename_one(self, from_rel, to_rel):
1138
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1140
def _build_checkout_with_index(self):
1141
build_index_from_tree(
1142
self.user_transport.local_abspath('.'),
1143
self.control_transport.local_abspath("index"),
1145
None if self.branch.head is None else self.store[self.branch.head].tree)
1147
def reset_state(self, revision_ids=None):
1148
"""Reset the state of the working tree.
1150
This does a hard-reset to a last-known-good state. This is a way to
1151
fix if something got corrupted (like the .git/index file)
1153
with self.lock_tree_write():
1154
if revision_ids is not None:
1155
self.set_parent_ids(revision_ids)
1157
self._index_dirty = True
1158
if self.branch.head is not None:
1159
for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
1160
if not validate_path(entry.path):
1163
if S_ISGITLINK(entry.mode):
1164
pass # TODO(jelmer): record and return submodule paths
1166
# Let's at least try to use the working tree file:
1168
st = self._lstat(self.abspath(entry.path))
1170
# But if it doesn't exist, we'll make something up.
1171
obj = self.store[entry.sha]
1172
st = os.stat_result((entry.mode, 0, 0, 0,
1173
0, 0, len(obj.as_raw_string()), 0,
1175
(index, subpath) = self._lookup_index(entry.path)
1176
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1178
def pull(self, source, overwrite=False, stop_revision=None,
1179
change_reporter=None, possible_transports=None, local=False,
1181
with self.lock_write(), source.lock_read():
1182
old_revision = self.branch.last_revision()
1183
basis_tree = self.basis_tree()
1184
count = self.branch.pull(source, overwrite, stop_revision,
1185
possible_transports=possible_transports,
1187
new_revision = self.branch.last_revision()
1188
if new_revision != old_revision:
1189
with basis_tree.lock_read():
1190
new_basis_tree = self.branch.basis_tree()
1196
change_reporter=change_reporter,
1197
show_base=show_base)
1200
def add_reference(self, sub_tree):
1201
"""Add a TreeReference to the tree, pointing at sub_tree.
1203
:param sub_tree: subtree to add.
1205
with self.lock_tree_write():
1207
sub_tree_path = self.relpath(sub_tree.basedir)
1208
except errors.PathNotChild:
1209
raise BadReferenceTarget(
1210
self, sub_tree, 'Target not inside tree.')
1212
self._add([sub_tree_path], [None], ['tree-reference'])
1214
def _read_submodule_head(self, path):
1215
return read_submodule_head(self.abspath(path))
1217
def get_reference_revision(self, path, file_id=None):
1218
hexsha = self._read_submodule_head(path)
1220
return _mod_revision.NULL_REVISION
1221
return self.branch.lookup_foreign_revision_id(hexsha)
1223
def get_nested_tree(self, path, file_id=None):
1224
return workingtree.WorkingTree.open(self.abspath(path))
1226
def _directory_is_tree_reference(self, relpath):
1227
# as a special case, if a directory contains control files then
1228
# it's a tree reference, except that the root of the tree is not
1229
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1231
def extract(self, sub_path, file_id=None, format=None):
1232
"""Extract a subtree from this tree.
1234
A new branch will be created, relative to the path for this tree.
1237
segments = osutils.splitpath(path)
1238
transport = self.branch.controldir.root_transport
1239
for name in segments:
1240
transport = transport.clone(name)
1241
transport.ensure_base()
1244
with self.lock_tree_write():
1246
branch_transport = mkdirs(sub_path)
1248
format = self.controldir.cloning_metadir()
1249
branch_transport.ensure_base()
1250
branch_bzrdir = format.initialize_on_transport(branch_transport)
1252
repo = branch_bzrdir.find_repository()
1253
except errors.NoRepositoryPresent:
1254
repo = branch_bzrdir.create_repository()
1255
if not repo.supports_rich_root():
1256
raise errors.RootNotRich()
1257
new_branch = branch_bzrdir.create_branch()
1258
new_branch.pull(self.branch)
1259
for parent_id in self.get_parent_ids():
1260
new_branch.fetch(self.branch, parent_id)
1261
tree_transport = self.controldir.root_transport.clone(sub_path)
1262
if tree_transport.base != branch_transport.base:
1263
tree_bzrdir = format.initialize_on_transport(tree_transport)
1264
tree_bzrdir.set_branch_reference(new_branch)
1266
tree_bzrdir = branch_bzrdir
1267
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1268
wt.set_parent_ids(self.get_parent_ids())
1271
def _get_check_refs(self):
1272
"""Return the references needed to perform a check of this tree.
1274
The default implementation returns no refs, and is only suitable for
1275
trees that have no local caching and can commit on ghosts at any time.
1277
:seealso: breezy.check for details about check_refs.
1281
def copy_content_into(self, tree, revision_id=None):
1282
"""Copy the current content and user files of this tree into tree."""
1283
with self.lock_read():
1284
if revision_id is None:
1285
merge.transform_tree(tree, self)
1287
# TODO now merge from tree.last_revision to revision (to
1288
# preserve user local changes)
1290
other_tree = self.revision_tree(revision_id)
1291
except errors.NoSuchRevision:
1292
other_tree = self.branch.repository.revision_tree(
1295
merge.transform_tree(tree, other_tree)
1296
if revision_id == _mod_revision.NULL_REVISION:
1299
new_parents = [revision_id]
1300
tree.set_parent_ids(new_parents)
1303
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1305
_tree_class = GitWorkingTree
1307
supports_versioned_directories = False
1309
supports_setting_file_ids = False
1311
supports_store_uncommitted = False
1313
supports_leftmost_parent_id_as_ghost = False
1315
supports_righthand_parent_id_as_ghost = False
1317
requires_normalized_unicode_filenames = True
1319
supports_merge_modified = False
1322
def _matchingcontroldir(self):
1323
from .dir import LocalGitControlDirFormat
1324
return LocalGitControlDirFormat()
1326
def get_format_description(self):
1327
return "Git Working Tree"
1329
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1330
accelerator_tree=None, hardlink=False):
1331
"""See WorkingTreeFormat.initialize()."""
1332
if not isinstance(a_controldir, LocalGitDir):
1333
raise errors.IncompatibleFormat(self, a_controldir)
1334
branch = a_controldir.open_branch(nascent_ok=True)
1335
if revision_id is not None:
1336
branch.set_last_revision(revision_id)
1337
wt = GitWorkingTree(
1338
a_controldir, a_controldir.open_repository(), branch)
1339
for hook in MutableTree.hooks['post_build_tree']:
1344
class InterIndexGitTree(InterGitTrees):
1345
"""InterTree that works between a Git revision tree and an index."""
1347
def __init__(self, source, target):
1348
super(InterIndexGitTree, self).__init__(source, target)
1349
self._index = target.index
1352
def is_compatible(cls, source, target):
1353
from .repository import GitRevisionTree
1354
return (isinstance(source, GitRevisionTree) and
1355
isinstance(target, GitWorkingTree))
1357
def _iter_git_changes(self, want_unchanged=False, specific_files=None,
1358
require_versioned=False, extra_trees=None,
1359
want_unversioned=False):
1360
trees = [self.source]
1361
if extra_trees is not None:
1362
trees.extend(extra_trees)
1363
if specific_files is not None:
1364
specific_files = self.target.find_related_paths_across_trees(
1365
specific_files, trees,
1366
require_versioned=require_versioned)
1367
# TODO(jelmer): Restrict to specific_files, for performance reasons.
1368
with self.lock_read():
1369
return changes_between_git_tree_and_working_copy(
1370
self.source.store, self.source.tree,
1371
self.target, want_unchanged=want_unchanged,
1372
want_unversioned=want_unversioned)
1375
tree.InterTree.register_optimiser(InterIndexGitTree)
1378
def changes_between_git_tree_and_working_copy(store, from_tree_sha, target,
1379
want_unchanged=False, want_unversioned=False):
1380
"""Determine the changes between a git tree and a working tree with index.
1385
# Report dirified directories to commit_tree first, so that they can be
1386
# replaced with non-empty directories if they have contents.
1388
target_root_path = target.abspath('.').encode(sys.getfilesystemencoding())
1389
for path, index_entry in target._recurse_index_entries():
1391
live_entry = index_entry_from_path(
1392
target.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
1393
except EnvironmentError as e:
1394
if e.errno == errno.ENOENT:
1395
# Entry was removed; keep it listed, but mark it as gone.
1396
blobs[path] = (ZERO_SHA, 0)
1397
elif e.errno == errno.EISDIR:
1398
# Entry was turned into a directory
1399
dirified.append((path, Tree().id, stat.S_IFDIR))
1400
store.add_object(Tree())
1404
blobs[path] = (live_entry.sha, cleanup_mode(live_entry.mode))
1405
if want_unversioned:
1406
for e in target.extras():
1407
ap = target.abspath(e)
1410
np, accessible = osutils.normalized_filename(e)
1411
except UnicodeDecodeError:
1412
raise errors.BadFilenameEncoding(
1414
if stat.S_ISDIR(st.st_mode):
1417
blob = blob_from_path_and_stat(ap.encode('utf-8'), st)
1418
store.add_object(blob)
1419
np = np.encode('utf-8')
1420
blobs[np] = (blob.id, cleanup_mode(st.st_mode))
1422
to_tree_sha = commit_tree(store, dirified + [(p, s, m) for (p, (s, m)) in blobs.items()])
1423
return store.tree_changes(
1424
from_tree_sha, to_tree_sha, include_trees=True,
1425
want_unchanged=want_unchanged, change_type_same=True), extras