1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from collections import defaultdict
25
from dulwich.ignore import (
28
from dulwich.config import ConfigFile as GitConfigFile
29
from dulwich.file import GitFile, FileLocked
30
from dulwich.index import (
33
build_index_from_tree,
34
index_entry_from_path,
35
index_entry_from_stat,
41
from dulwich.object_store import (
44
from dulwich.objects import (
54
branch as _mod_branch,
55
conflicts as _mod_conflicts,
57
controldir as _mod_controldir,
61
revision as _mod_revision,
63
transport as _mod_transport,
68
from ..decorators import (
71
from ..mutabletree import (
75
from ..sixish import text_type
84
from .mapping import (
91
CONFLICT_SUFFIXES = ['.BASE', '.OTHER', '.THIS']
94
# TODO: There should be a base revid attribute to better inform the user about
95
# how the conflicts were generated.
96
class TextConflict(_mod_conflicts.Conflict):
97
"""The merge algorithm could not resolve all differences encountered."""
101
typestring = 'text conflict'
103
_conflict_re = re.compile(b'^(<{7}|={7}|>{7})')
105
def associated_filenames(self):
106
return [self.path + suffix for suffix in CONFLICT_SUFFIXES]
108
def _resolve(self, tt, winner_suffix):
109
"""Resolve the conflict by copying one of .THIS or .OTHER into file.
111
:param tt: The TreeTransform where the conflict is resolved.
112
:param winner_suffix: Either 'THIS' or 'OTHER'
114
The resolution is symmetric, when taking THIS, item.THIS is renamed
115
into item and vice-versa. This takes one of the files as a whole
116
ignoring every difference that could have been merged cleanly.
118
# To avoid useless copies, we switch item and item.winner_suffix, only
119
# item will exist after the conflict has been resolved anyway.
120
item_tid = tt.trans_id_tree_path(self.path)
121
item_parent_tid = tt.get_tree_parent(item_tid)
122
winner_path = self.path + '.' + winner_suffix
123
winner_tid = tt.trans_id_tree_path(winner_path)
124
winner_parent_tid = tt.get_tree_parent(winner_tid)
125
# Switch the paths to preserve the content
126
tt.adjust_path(osutils.basename(self.path),
127
winner_parent_tid, winner_tid)
128
tt.adjust_path(osutils.basename(winner_path),
129
item_parent_tid, item_tid)
130
tt.unversion_file(item_tid)
131
tt.version_file(winner_tid)
134
def action_auto(self, tree):
135
# GZ 2012-07-27: Using NotImplementedError to signal that a conflict
136
# can't be auto resolved does not seem ideal.
138
kind = tree.kind(self.path)
139
except errors.NoSuchFile:
142
raise NotImplementedError("Conflict is not a file")
143
conflict_markers_in_line = self._conflict_re.search
144
with tree.get_file(self.path) as f:
146
if conflict_markers_in_line(line):
147
raise NotImplementedError("Conflict markers present")
149
def _resolve_with_cleanups(self, tree, *args, **kwargs):
150
with tree.transform() as tt:
151
self._resolve(tt, *args, **kwargs)
153
def action_take_this(self, tree):
154
self._resolve_with_cleanups(tree, 'THIS')
156
def action_take_other(self, tree):
157
self._resolve_with_cleanups(tree, 'OTHER')
159
def do(self, action, tree):
160
"""Apply the specified action to the conflict.
162
:param action: The method name to call.
164
:param tree: The tree passed as a parameter to the method.
166
meth = getattr(self, 'action_%s' % action, None)
168
raise NotImplementedError(self.__class__.__name__ + '.' + action)
171
def action_done(self, tree):
172
"""Mark the conflict as solved once it has been handled."""
173
# This method does nothing but simplifies the design of upper levels.
177
return 'Text conflict in %(path)s' % self.__dict__
180
return self.describe()
183
return "%s(%r)" % (type(self).__name__, self.path)
186
class GitWorkingTree(MutableGitIndexTree, workingtree.WorkingTree):
187
"""A Git working tree."""
189
def __init__(self, controldir, repo, branch):
190
MutableGitIndexTree.__init__(self)
191
basedir = controldir.root_transport.local_abspath('.')
192
self.basedir = osutils.realpath(basedir)
193
self.controldir = controldir
194
self.repository = repo
195
self.store = self.repository._git.object_store
196
self.mapping = self.repository.get_mapping()
197
self._branch = branch
198
self._transport = self.repository._git._controltransport
199
self._format = GitWorkingTreeFormat()
201
self._index_file = None
202
self.views = self._make_views()
203
self._rules_searcher = None
204
self._detect_case_handling()
207
def supports_tree_reference(self):
210
def supports_rename_tracking(self):
213
def _read_index(self):
214
self.index = Index(self.control_transport.local_abspath('index'))
215
self._index_dirty = False
217
def _get_submodule_index(self, relpath):
218
if not isinstance(relpath, bytes):
219
raise TypeError(relpath)
221
info = self._submodule_info()[relpath]
223
index_path = os.path.join(self.basedir, decode_git_path(relpath), '.git', 'index')
225
index_path = self.control_transport.local_abspath(
226
posixpath.join('modules', decode_git_path(info[1]), 'index'))
227
return Index(index_path)
230
"""Lock the repository for read operations.
232
:return: A breezy.lock.LogicalLockResult.
234
if not self._lock_mode:
235
self._lock_mode = 'r'
239
self._lock_count += 1
240
self.branch.lock_read()
241
return lock.LogicalLockResult(self.unlock)
243
def _lock_write_tree(self):
244
if not self._lock_mode:
245
self._lock_mode = 'w'
248
self._index_file = GitFile(
249
self.control_transport.local_abspath('index'), 'wb')
251
raise errors.LockContention('index')
253
elif self._lock_mode == 'r':
254
raise errors.ReadOnlyError(self)
256
self._lock_count += 1
258
def lock_tree_write(self):
259
self.branch.lock_read()
261
self._lock_write_tree()
262
return lock.LogicalLockResult(self.unlock)
263
except BaseException:
267
def lock_write(self, token=None):
268
self.branch.lock_write()
270
self._lock_write_tree()
271
return lock.LogicalLockResult(self.unlock)
272
except BaseException:
277
return self._lock_count >= 1
279
def get_physical_lock_status(self):
282
def break_lock(self):
284
self.control_transport.delete('index.lock')
285
except errors.NoSuchFile:
287
self.branch.break_lock()
289
@only_raises(errors.LockNotHeld, errors.LockBroken)
291
if not self._lock_count:
292
return lock.cant_unlock_not_held(self)
295
self._lock_count -= 1
296
if self._lock_count > 0:
298
if self._index_file is not None:
299
if self._index_dirty:
300
self._flush(self._index_file)
301
self._index_file.close()
303
# Something else already triggered a write of the index
304
# file by calling .flush()
305
self._index_file.abort()
306
self._index_file = None
307
self._lock_mode = None
315
def _detect_case_handling(self):
317
self._transport.stat(".git/cOnFiG")
318
except errors.NoSuchFile:
319
self.case_sensitive = True
321
self.case_sensitive = False
323
def merge_modified(self):
326
def set_merge_modified(self, modified_hashes):
327
raise errors.UnsupportedOperation(self.set_merge_modified, self)
329
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
330
self.set_parent_ids([p for p, t in parents_list])
332
def _set_merges_from_parent_ids(self, rhs_parent_ids):
334
merges = [self.branch.lookup_bzr_revision_id(
335
revid)[0] for revid in rhs_parent_ids]
336
except errors.NoSuchRevision as e:
337
raise errors.GhostRevisionUnusableHere(e.revision)
339
self.control_transport.put_bytes(
340
'MERGE_HEAD', b'\n'.join(merges),
341
mode=self.controldir._get_file_mode())
344
self.control_transport.delete('MERGE_HEAD')
345
except errors.NoSuchFile:
348
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
349
"""Set the parent ids to revision_ids.
351
See also set_parent_trees. This api will try to retrieve the tree data
352
for each element of revision_ids from the trees repository. If you have
353
tree data already available, it is more efficient to use
354
set_parent_trees rather than set_parent_ids. set_parent_ids is however
355
an easier API to use.
357
:param revision_ids: The revision_ids to set as the parent ids of this
358
working tree. Any of these may be ghosts.
360
with self.lock_tree_write():
361
self._check_parents_for_ghosts(
362
revision_ids, allow_leftmost_as_ghost=allow_leftmost_as_ghost)
363
for revision_id in revision_ids:
364
_mod_revision.check_not_reserved_id(revision_id)
366
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
368
if len(revision_ids) > 0:
369
self.set_last_revision(revision_ids[0])
371
self.set_last_revision(_mod_revision.NULL_REVISION)
373
self._set_merges_from_parent_ids(revision_ids[1:])
375
def get_parent_ids(self):
376
"""See Tree.get_parent_ids.
378
This implementation reads the pending merges list and last_revision
379
value and uses that to decide what the parents list should be.
381
last_rev = _mod_revision.ensure_null(self._last_revision())
382
if _mod_revision.NULL_REVISION == last_rev:
387
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
388
except errors.NoSuchFile:
391
for l in osutils.split_lines(merges_bytes):
392
revision_id = l.rstrip(b'\n')
394
self.branch.lookup_foreign_revision_id(revision_id))
397
def check_state(self):
398
"""Check that the working state is/isn't valid."""
401
def remove(self, files, verbose=False, to_file=None, keep_files=True,
403
"""Remove nominated files from the working tree metadata.
405
:param files: File paths relative to the basedir.
406
:param keep_files: If true, the files will also be kept.
407
:param force: Delete files and directories, even if they are changed
408
and even if the directories are not empty.
410
if not isinstance(files, list):
416
def backup(file_to_backup):
417
abs_path = self.abspath(file_to_backup)
418
backup_name = self.controldir._available_backup_name(
420
osutils.rename(abs_path, self.abspath(backup_name))
421
return "removed %s (but kept a copy: %s)" % (
422
file_to_backup, backup_name)
424
# Sort needed to first handle directory content before the directory
429
def recurse_directory_to_add_files(directory):
430
# Recurse directory and add all files
431
# so we can check if they have changed.
432
for parent_path, file_infos in self.walkdirs(directory):
433
for relpath, basename, kind, lstat, kind in file_infos:
434
# Is it versioned or ignored?
435
if self.is_versioned(relpath):
436
# Add nested content for deletion.
437
all_files.add(relpath)
439
# Files which are not versioned
440
# should be treated as unknown.
441
files_to_backup.append(relpath)
443
with self.lock_tree_write():
444
for filepath in files:
445
# Get file name into canonical form.
446
abspath = self.abspath(filepath)
447
filepath = self.relpath(abspath)
450
all_files.add(filepath)
451
recurse_directory_to_add_files(filepath)
453
files = list(all_files)
456
return # nothing to do
458
# Sort needed to first handle directory content before the
460
files.sort(reverse=True)
462
# Bail out if we are going to delete files we shouldn't
463
if not keep_files and not force:
464
for change in self.iter_changes(
465
self.basis_tree(), include_unchanged=True,
466
require_versioned=False, want_unversioned=True,
467
specific_files=files):
468
if change.versioned[0] is False:
469
# The record is unknown or newly added
470
files_to_backup.append(change.path[1])
471
files_to_backup.extend(
472
osutils.parent_directories(change.path[1]))
473
elif (change.changed_content and (change.kind[1] is not None)
474
and osutils.is_inside_any(files, change.path[1])):
475
# Versioned and changed, but not deleted, and still
476
# in one of the dirs to be deleted.
477
files_to_backup.append(change.path[1])
478
files_to_backup.extend(
479
osutils.parent_directories(change.path[1]))
487
except errors.NoSuchFile:
490
abs_path = self.abspath(f)
492
# having removed it, it must be either ignored or unknown
493
if self.is_ignored(f):
497
kind_ch = osutils.kind_marker(kind)
498
to_file.write(new_status + ' ' + f + kind_ch + '\n')
500
message = "%s does not exist" % (f, )
503
if f in files_to_backup and not force:
506
if kind == 'directory':
507
osutils.rmtree(abs_path)
509
osutils.delete_any(abs_path)
510
message = "deleted %s" % (f,)
512
message = "removed %s" % (f,)
513
self._unversion_path(f)
515
# print only one message (if any) per file.
516
if message is not None:
518
self._versioned_dirs = None
520
def smart_add(self, file_list, recurse=True, action=None, save=True):
524
# expand any symlinks in the directory part, while leaving the
526
# only expanding if symlinks are supported avoids windows path bugs
527
if self.supports_symlinks():
528
file_list = list(map(osutils.normalizepath, file_list))
530
conflicts_related = set()
531
for c in self.conflicts():
532
conflicts_related.update(c.associated_filenames())
538
def call_action(filepath, kind):
541
if action is not None:
542
parent_path = posixpath.dirname(filepath)
543
parent_id = self.path2id(parent_path)
544
parent_ie = self._get_dir_ie(parent_path, parent_id)
545
file_id = action(self, parent_ie, filepath, kind)
546
if file_id is not None:
547
raise workingtree.SettingFileIdUnsupported()
549
with self.lock_tree_write():
550
for filepath in osutils.canonical_relpaths(
551
self.basedir, file_list):
552
filepath, can_access = osutils.normalized_filename(filepath)
554
raise errors.InvalidNormalization(filepath)
556
abspath = self.abspath(filepath)
557
kind = osutils.file_kind(abspath)
558
if kind in ("file", "symlink"):
559
(index, subpath) = self._lookup_index(
560
encode_git_path(filepath))
564
call_action(filepath, kind)
566
self._index_add_entry(filepath, kind)
567
added.append(filepath)
568
elif kind == "directory":
569
(index, subpath) = self._lookup_index(
570
encode_git_path(filepath))
571
if subpath not in index:
572
call_action(filepath, kind)
574
user_dirs.append(filepath)
576
raise errors.BadFileKindError(filename=abspath, kind=kind)
577
for user_dir in user_dirs:
578
abs_user_dir = self.abspath(user_dir)
581
transport = _mod_transport.get_transport_from_path(
583
_mod_controldir.ControlDirFormat.find_format(transport)
585
except errors.NotBranchError:
587
except errors.UnsupportedFormatError:
592
trace.warning('skipping nested tree %r', abs_user_dir)
595
for name in os.listdir(abs_user_dir):
596
subp = os.path.join(user_dir, name)
597
if (self.is_control_filename(subp) or
598
self.mapping.is_special_file(subp)):
600
ignore_glob = self.is_ignored(subp)
601
if ignore_glob is not None:
602
ignored.setdefault(ignore_glob, []).append(subp)
604
abspath = self.abspath(subp)
605
kind = osutils.file_kind(abspath)
606
if kind == "directory":
607
user_dirs.append(subp)
609
(index, subpath) = self._lookup_index(
610
encode_git_path(subp))
614
if subp in conflicts_related:
616
call_action(subp, kind)
618
self._index_add_entry(subp, kind)
620
return added, ignored
622
def has_filename(self, filename):
623
return osutils.lexists(self.abspath(filename))
625
def _iter_files_recursive(self, from_dir=None, include_dirs=False,
626
recurse_nested=False):
629
if not isinstance(from_dir, text_type):
630
raise TypeError(from_dir)
631
encoded_from_dir = self.abspath(from_dir).encode(osutils._fs_enc)
632
for (dirpath, dirnames, filenames) in os.walk(encoded_from_dir):
633
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
634
if self.controldir.is_control_filename(
635
dir_relpath.decode(osutils._fs_enc)):
637
for name in list(dirnames):
638
if self.controldir.is_control_filename(
639
name.decode(osutils._fs_enc)):
640
dirnames.remove(name)
642
relpath = os.path.join(dir_relpath, name)
643
if not recurse_nested and self._directory_is_tree_reference(relpath.decode(osutils._fs_enc)):
644
dirnames.remove(name)
647
yield relpath.decode(osutils._fs_enc)
648
except UnicodeDecodeError:
649
raise errors.BadFilenameEncoding(
650
relpath, osutils._fs_enc)
651
if not self.is_versioned(relpath.decode(osutils._fs_enc)):
652
dirnames.remove(name)
653
for name in filenames:
654
if self.mapping.is_special_file(name):
656
if self.controldir.is_control_filename(
657
name.decode(osutils._fs_enc, 'replace')):
659
yp = os.path.join(dir_relpath, name)
661
yield yp.decode(osutils._fs_enc)
662
except UnicodeDecodeError:
663
raise errors.BadFilenameEncoding(
667
"""Yield all unversioned files in this WorkingTree.
669
with self.lock_read():
671
[decode_git_path(p) for p, sha, mode in self.iter_git_objects()])
672
all_paths = set(self._iter_files_recursive(include_dirs=False))
673
return iter(all_paths - index_paths)
675
def _gather_kinds(self, files, kinds):
676
"""See MutableTree._gather_kinds."""
677
with self.lock_tree_write():
678
for pos, f in enumerate(files):
679
if kinds[pos] is None:
680
fullpath = osutils.normpath(self.abspath(f))
682
kind = osutils.file_kind(fullpath)
684
if e.errno == errno.ENOENT:
685
raise errors.NoSuchFile(fullpath)
686
if f != '' and self._directory_is_tree_reference(f):
687
kind = 'tree-reference'
691
if self._lock_mode != 'w':
692
raise errors.NotWriteLocked(self)
693
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
694
# already in use and GitFile doesn't allow overriding the lock file
696
f = open(self.control_transport.local_abspath('index'), 'wb')
697
# Note that _flush will close the file
703
write_index_dict(shaf, self.index)
705
except BaseException:
708
self._index_dirty = False
710
def get_file_mtime(self, path):
711
"""See Tree.get_file_mtime."""
713
return self._lstat(path).st_mtime
715
if e.errno == errno.ENOENT:
716
raise errors.NoSuchFile(path)
719
def is_ignored(self, filename):
720
r"""Check whether the filename matches an ignore pattern.
722
If the file is ignored, returns the pattern which caused it to
723
be ignored, otherwise None. So this can simply be used as a
724
boolean if desired."""
725
if getattr(self, '_global_ignoreglobster', None) is None:
726
from breezy import ignores
728
ignore_globs.update(ignores.get_runtime_ignores())
729
ignore_globs.update(ignores.get_user_ignores())
730
self._global_ignoreglobster = globbing.ExceptionGlobster(
732
match = self._global_ignoreglobster.match(filename)
733
if match is not None:
736
if self.kind(filename) == 'directory':
738
except errors.NoSuchFile:
740
filename = filename.lstrip('/')
741
ignore_manager = self._get_ignore_manager()
742
ps = list(ignore_manager.find_matching(filename))
745
if not ps[-1].is_exclude:
749
def _get_ignore_manager(self):
750
ignoremanager = getattr(self, '_ignoremanager', None)
751
if ignoremanager is not None:
754
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
755
self._ignoremanager = ignore_manager
756
return ignore_manager
758
def _flush_ignore_list_cache(self):
759
self._ignoremanager = None
761
def set_last_revision(self, revid):
762
if _mod_revision.is_null(revid):
763
self.branch.set_last_revision_info(0, revid)
765
_mod_revision.check_not_reserved_id(revid)
767
self.branch.generate_revision_history(revid)
768
except errors.NoSuchRevision:
769
raise errors.GhostRevisionUnusableHere(revid)
771
def _reset_data(self):
774
def get_file_verifier(self, path, stat_value=None):
775
with self.lock_read():
776
(index, subpath) = self._lookup_index(encode_git_path(path))
778
return ("GIT", index[subpath].sha)
780
if self._has_dir(path):
782
raise errors.NoSuchFile(path)
784
def get_file_sha1(self, path, stat_value=None):
785
with self.lock_read():
786
if not self.is_versioned(path):
787
raise errors.NoSuchFile(path)
788
abspath = self.abspath(path)
790
return osutils.sha_file_by_name(abspath)
792
if e.errno in (errno.EISDIR, errno.ENOENT):
796
def revision_tree(self, revid):
797
return self.repository.revision_tree(revid)
799
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
800
mode = stat_result.st_mode
801
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
803
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
804
return self.basis_tree().is_executable(path)
806
def stored_kind(self, path):
807
with self.lock_read():
808
encoded_path = encode_git_path(path)
809
(index, subpath) = self._lookup_index(encoded_path)
811
return mode_kind(index[subpath].mode)
813
# Maybe it's a directory?
814
if self._has_dir(encoded_path):
816
raise errors.NoSuchFile(path)
818
def _lstat(self, path):
819
return os.lstat(self.abspath(path))
821
def _live_entry(self, path):
822
encoded_path = self.abspath(decode_git_path(path)).encode(
824
return index_entry_from_path(encoded_path)
826
def is_executable(self, path):
827
with self.lock_read():
828
if self._supports_executable():
829
mode = self._lstat(path).st_mode
831
(index, subpath) = self._lookup_index(encode_git_path(path))
833
mode = index[subpath].mode
836
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
838
def _is_executable_from_path_and_stat(self, path, stat_result):
839
if self._supports_executable():
840
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
842
return self._is_executable_from_path_and_stat_from_basis(
845
def list_files(self, include_root=False, from_dir=None, recursive=True,
846
recurse_nested=False):
847
if from_dir is None or from_dir == '.':
850
fk_entries = {'directory': tree.TreeDirectory,
851
'file': tree.TreeFile,
852
'symlink': tree.TreeLink,
853
'tree-reference': tree.TreeReference}
854
with self.lock_read():
855
root_ie = self._get_dir_ie(u"", None)
856
if include_root and not from_dir:
857
yield "", "V", root_ie.kind, root_ie
858
dir_ids[u""] = root_ie.file_id
860
path_iterator = sorted(
861
self._iter_files_recursive(
862
from_dir, include_dirs=True,
863
recurse_nested=recurse_nested))
865
encoded_from_dir = self.abspath(from_dir).encode(
867
path_iterator = sorted(
868
[os.path.join(from_dir, name.decode(osutils._fs_enc))
869
for name in os.listdir(encoded_from_dir)
870
if not self.controldir.is_control_filename(
871
name.decode(osutils._fs_enc)) and
872
not self.mapping.is_special_file(
873
name.decode(osutils._fs_enc))])
874
for path in path_iterator:
876
encoded_path = encode_git_path(path)
877
except UnicodeEncodeError:
878
raise errors.BadFilenameEncoding(
879
path, osutils._fs_enc)
880
(index, index_path) = self._lookup_index(encoded_path)
882
value = index[index_path]
885
kind = self.kind(path)
886
parent, name = posixpath.split(path)
887
for dir_path, dir_ie in self._add_missing_parent_ids(
890
if kind == 'tree-reference' and recurse_nested:
891
ie = self._get_dir_ie(path, self.path2id(path))
892
yield (posixpath.relpath(path, from_dir), 'V', 'directory',
895
if kind == 'directory':
897
if self._has_dir(encoded_path):
898
ie = self._get_dir_ie(path, self.path2id(path))
900
elif self.is_ignored(path):
902
ie = fk_entries[kind]()
905
ie = fk_entries[kind]()
906
yield (posixpath.relpath(path, from_dir), status, kind,
909
if value is not None:
910
ie = self._get_file_ie(name, path, value, dir_ids[parent])
911
yield (posixpath.relpath(path, from_dir), "V", ie.kind, ie)
914
ie = fk_entries[kind]()
918
yield (posixpath.relpath(path, from_dir),
919
("I" if self.is_ignored(path) else "?"), kind, ie)
921
def all_file_ids(self):
922
raise errors.UnsupportedOperation(self.all_file_ids, self)
924
def all_versioned_paths(self):
925
with self.lock_read():
927
for path in self.index:
928
if self.mapping.is_special_file(path):
930
path = decode_git_path(path)
933
path = posixpath.dirname(path).strip("/")
939
def iter_child_entries(self, path):
940
encoded_path = encode_git_path(path)
941
with self.lock_read():
942
parent_id = self.path2id(path)
944
for item_path, value in self.index.iteritems():
945
decoded_item_path = decode_git_path(item_path)
946
if self.mapping.is_special_file(item_path):
948
if not osutils.is_inside(path, decoded_item_path):
951
subpath = posixpath.relpath(decoded_item_path, path)
953
dirname = subpath.split('/', 1)[0]
954
file_ie = self._get_dir_ie(
955
posixpath.join(path, dirname), parent_id)
957
(unused_parent, name) = posixpath.split(decoded_item_path)
958
file_ie = self._get_file_ie(
959
name, decoded_item_path, value, parent_id)
961
if not found_any and path != u'':
962
raise errors.NoSuchFile(path)
965
with self.lock_read():
966
conflicts = _mod_conflicts.ConflictList()
967
for item_path, value in self.index.iteritems():
968
if value.flags & FLAG_STAGEMASK:
969
conflicts.append(TextConflict(decode_git_path(item_path)))
972
def set_conflicts(self, conflicts):
974
for conflict in conflicts:
975
if conflict.typestring in ('text conflict', 'contents conflict'):
976
by_path.add(encode_git_path(conflict.path))
978
raise errors.UnsupportedOperation(self.set_conflicts, self)
979
with self.lock_tree_write():
980
for path in self.index:
981
self._set_conflicted(path, path in by_path)
983
def _set_conflicted(self, path, conflicted):
984
value = self.index[path]
985
self._index_dirty = True
987
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
989
self.index[path] = (value[:9] + (value[9] & ~ FLAG_STAGEMASK, ))
991
def add_conflicts(self, new_conflicts):
992
with self.lock_tree_write():
993
for conflict in new_conflicts:
994
if conflict.typestring in ('text conflict',
995
'contents conflict'):
997
self._set_conflicted(
998
encode_git_path(conflict.path), True)
1000
raise errors.UnsupportedOperation(
1001
self.add_conflicts, self)
1003
raise errors.UnsupportedOperation(self.add_conflicts, self)
1005
def walkdirs(self, prefix=""):
1006
"""Walk the directories of this tree.
1008
returns a generator which yields items in the form:
1009
(current_directory_path,
1010
[(file1_path, file1_name, file1_kind, (lstat),
1013
This API returns a generator, which is only valid during the current
1014
tree transaction - within a single lock_read or lock_write duration.
1016
If the tree is not locked, it may cause an error to be raised,
1017
depending on the tree implementation.
1019
from bisect import bisect_left
1021
disk_top = self.abspath(prefix)
1022
if disk_top.endswith('/'):
1023
disk_top = disk_top[:-1]
1024
top_strip_len = len(disk_top) + 1
1025
inventory_iterator = self._walkdirs(prefix)
1026
disk_iterator = osutils.walkdirs(disk_top, prefix)
1028
current_disk = next(disk_iterator)
1029
disk_finished = False
1030
except OSError as e:
1031
if not (e.errno == errno.ENOENT
1032
or (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
1035
disk_finished = True
1037
current_inv = next(inventory_iterator)
1038
inv_finished = False
1039
except StopIteration:
1042
while not inv_finished or not disk_finished:
1044
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
1045
cur_disk_dir_content) = current_disk
1047
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
1048
cur_disk_dir_content) = ((None, None), None)
1049
if not disk_finished:
1050
# strip out .bzr dirs
1051
if (cur_disk_dir_path_from_top[top_strip_len:] == ''
1052
and len(cur_disk_dir_content) > 0):
1053
# osutils.walkdirs can be made nicer -
1054
# yield the path-from-prefix rather than the pathjoined
1056
bzrdir_loc = bisect_left(cur_disk_dir_content,
1058
if (bzrdir_loc < len(cur_disk_dir_content) and
1059
self.controldir.is_control_filename(
1060
cur_disk_dir_content[bzrdir_loc][0])):
1061
# we dont yield the contents of, or, .bzr itself.
1062
del cur_disk_dir_content[bzrdir_loc]
1064
# everything is unknown
1067
# everything is missing
1070
direction = ((current_inv[0][0] > cur_disk_dir_relpath)
1071
- (current_inv[0][0] < cur_disk_dir_relpath))
1073
# disk is before inventory - unknown
1074
dirblock = [(relpath, basename, kind, stat, None) for
1075
relpath, basename, kind, stat, top_path in
1076
cur_disk_dir_content]
1077
yield cur_disk_dir_relpath, dirblock
1079
current_disk = next(disk_iterator)
1080
except StopIteration:
1081
disk_finished = True
1083
# inventory is before disk - missing.
1084
dirblock = [(relpath, basename, 'unknown', None, kind)
1085
for relpath, basename, dkind, stat, fileid, kind in
1087
yield current_inv[0][0], dirblock
1089
current_inv = next(inventory_iterator)
1090
except StopIteration:
1093
# versioned present directory
1094
# merge the inventory and disk data together
1096
for relpath, subiterator in itertools.groupby(sorted(
1097
current_inv[1] + cur_disk_dir_content,
1098
key=operator.itemgetter(0)), operator.itemgetter(1)):
1099
path_elements = list(subiterator)
1100
if len(path_elements) == 2:
1101
inv_row, disk_row = path_elements
1102
# versioned, present file
1103
dirblock.append((inv_row[0],
1104
inv_row[1], disk_row[2],
1105
disk_row[3], inv_row[5]))
1106
elif len(path_elements[0]) == 5:
1109
(path_elements[0][0], path_elements[0][1],
1110
path_elements[0][2], path_elements[0][3],
1112
elif len(path_elements[0]) == 6:
1113
# versioned, absent file.
1115
(path_elements[0][0], path_elements[0][1],
1117
path_elements[0][5]))
1119
raise NotImplementedError('unreachable code')
1120
yield current_inv[0][0], dirblock
1122
current_inv = next(inventory_iterator)
1123
except StopIteration:
1126
current_disk = next(disk_iterator)
1127
except StopIteration:
1128
disk_finished = True
1130
def _walkdirs(self, prefix=u""):
1133
prefix = encode_git_path(prefix)
1134
per_dir = defaultdict(set)
1136
per_dir[(u'', self.path2id(''))] = set()
1138
def add_entry(path, kind):
1139
if path == b'' or not path.startswith(prefix):
1141
(dirname, child_name) = posixpath.split(path)
1142
add_entry(dirname, 'directory')
1143
dirname = decode_git_path(dirname)
1144
dir_file_id = self.path2id(dirname)
1145
if not isinstance(value, tuple) or len(value) != 10:
1146
raise ValueError(value)
1147
per_dir[(dirname, dir_file_id)].add(
1148
(decode_git_path(path), decode_git_path(child_name),
1150
self.path2id(decode_git_path(path)),
1152
with self.lock_read():
1153
for path, value in self.index.iteritems():
1154
if self.mapping.is_special_file(path):
1156
if not path.startswith(prefix):
1158
add_entry(path, mode_kind(value.mode))
1159
return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
1161
def get_shelf_manager(self):
1162
raise workingtree.ShelvingUnsupported()
1164
def store_uncommitted(self):
1165
raise errors.StoringUncommittedNotSupported(self)
1167
def annotate_iter(self, path,
1168
default_revision=_mod_revision.CURRENT_REVISION):
1169
"""See Tree.annotate_iter
1171
This implementation will use the basis tree implementation if possible.
1172
Lines not in the basis are attributed to CURRENT_REVISION
1174
If there are pending merges, lines added by those merges will be
1175
incorrectly attributed to CURRENT_REVISION (but after committing, the
1176
attribution will be correct).
1178
with self.lock_read():
1179
maybe_file_parent_keys = []
1180
for parent_id in self.get_parent_ids():
1182
parent_tree = self.revision_tree(parent_id)
1183
except errors.NoSuchRevisionInTree:
1184
parent_tree = self.branch.repository.revision_tree(
1186
with parent_tree.lock_read():
1187
# TODO(jelmer): Use rename/copy tracker to find path name
1191
kind = parent_tree.kind(parent_path)
1192
except errors.NoSuchFile:
1195
# Note: this is slightly unnecessary, because symlinks
1196
# and directories have a "text" which is the empty
1197
# text, and we know that won't mess up annotations. But
1202
parent_tree.get_file_revision(parent_path))
1203
if parent_text_key not in maybe_file_parent_keys:
1204
maybe_file_parent_keys.append(parent_text_key)
1205
# Now we have the parents of this content
1206
from breezy.annotate import Annotator
1207
from .annotate import AnnotateProvider
1208
annotate_provider = AnnotateProvider(
1209
self.branch.repository._file_change_scanner)
1210
annotator = Annotator(annotate_provider)
1212
from breezy.graph import Graph
1213
graph = Graph(annotate_provider)
1214
heads = graph.heads(maybe_file_parent_keys)
1215
file_parent_keys = []
1216
for key in maybe_file_parent_keys:
1218
file_parent_keys.append(key)
1220
text = self.get_file_text(path)
1221
this_key = (path, default_revision)
1222
annotator.add_special_text(this_key, file_parent_keys, text)
1223
annotations = [(key[-1], line)
1224
for key, line in annotator.annotate_flat(this_key)]
1227
def _rename_one(self, from_rel, to_rel):
1228
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1230
def _build_checkout_with_index(self):
1231
build_index_from_tree(
1232
self.user_transport.local_abspath('.'),
1233
self.control_transport.local_abspath("index"),
1236
if self.branch.head is None
1237
else self.store[self.branch.head].tree,
1238
honor_filemode=self._supports_executable())
1240
def reset_state(self, revision_ids=None):
1241
"""Reset the state of the working tree.
1243
This does a hard-reset to a last-known-good state. This is a way to
1244
fix if something got corrupted (like the .git/index file)
1246
with self.lock_tree_write():
1247
if revision_ids is not None:
1248
self.set_parent_ids(revision_ids)
1250
self._index_dirty = True
1251
if self.branch.head is not None:
1252
for entry in self.store.iter_tree_contents(
1253
self.store[self.branch.head].tree):
1254
if not validate_path(entry.path):
1257
if S_ISGITLINK(entry.mode):
1258
pass # TODO(jelmer): record and return submodule paths
1260
# Let's at least try to use the working tree file:
1262
st = self._lstat(self.abspath(
1263
decode_git_path(entry.path)))
1265
# But if it doesn't exist, we'll make something up.
1266
obj = self.store[entry.sha]
1267
st = os.stat_result((entry.mode, 0, 0, 0,
1269
obj.as_raw_string()), 0,
1271
(index, subpath) = self._lookup_index(entry.path)
1272
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1274
def _update_git_tree(
1275
self, old_revision, new_revision, change_reporter=None,
1277
basis_tree = self.revision_tree(old_revision)
1278
if new_revision != old_revision:
1279
from .. import merge
1280
with basis_tree.lock_read():
1281
new_basis_tree = self.branch.basis_tree()
1287
change_reporter=change_reporter,
1288
show_base=show_base)
1290
def pull(self, source, overwrite=False, stop_revision=None,
1291
change_reporter=None, possible_transports=None, local=False,
1292
show_base=False, tag_selector=None):
1293
with self.lock_write(), source.lock_read():
1294
old_revision = self.branch.last_revision()
1295
count = self.branch.pull(source, overwrite, stop_revision,
1296
possible_transports=possible_transports,
1297
local=local, tag_selector=tag_selector)
1298
self._update_git_tree(
1299
old_revision=old_revision,
1300
new_revision=self.branch.last_revision(),
1301
change_reporter=change_reporter,
1302
show_base=show_base)
1305
def add_reference(self, sub_tree):
1306
"""Add a TreeReference to the tree, pointing at sub_tree.
1308
:param sub_tree: subtree to add.
1310
with self.lock_tree_write():
1312
sub_tree_path = self.relpath(sub_tree.basedir)
1313
except errors.PathNotChild:
1314
raise BadReferenceTarget(
1315
self, sub_tree, 'Target not inside tree.')
1317
self._add([sub_tree_path], [None], ['tree-reference'])
1319
def _read_submodule_head(self, path):
1320
return read_submodule_head(self.abspath(path))
1322
def get_reference_revision(self, path, branch=None):
1323
hexsha = self._read_submodule_head(path)
1325
(index, subpath) = self._lookup_index(
1326
encode_git_path(path))
1328
raise errors.NoSuchFile(path)
1329
hexsha = index[subpath].sha
1330
return self.branch.lookup_foreign_revision_id(hexsha)
1332
def get_nested_tree(self, path):
1333
return workingtree.WorkingTree.open(self.abspath(path))
1335
def _directory_is_tree_reference(self, relpath):
1336
# as a special case, if a directory contains control files then
1337
# it's a tree reference, except that the root of the tree is not
1338
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1340
def extract(self, sub_path, format=None):
1341
"""Extract a subtree from this tree.
1343
A new branch will be created, relative to the path for this tree.
1346
segments = osutils.splitpath(path)
1347
transport = self.branch.controldir.root_transport
1348
for name in segments:
1349
transport = transport.clone(name)
1350
transport.ensure_base()
1353
with self.lock_tree_write():
1355
branch_transport = mkdirs(sub_path)
1357
format = self.controldir.cloning_metadir()
1358
branch_transport.ensure_base()
1359
branch_bzrdir = format.initialize_on_transport(branch_transport)
1361
repo = branch_bzrdir.find_repository()
1362
except errors.NoRepositoryPresent:
1363
repo = branch_bzrdir.create_repository()
1364
if not repo.supports_rich_root():
1365
raise errors.RootNotRich()
1366
new_branch = branch_bzrdir.create_branch()
1367
new_branch.pull(self.branch)
1368
for parent_id in self.get_parent_ids():
1369
new_branch.fetch(self.branch, parent_id)
1370
tree_transport = self.controldir.root_transport.clone(sub_path)
1371
if tree_transport.base != branch_transport.base:
1372
tree_bzrdir = format.initialize_on_transport(tree_transport)
1373
tree_bzrdir.set_branch_reference(new_branch)
1375
tree_bzrdir = branch_bzrdir
1376
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1377
wt.set_parent_ids(self.get_parent_ids())
1380
def _get_check_refs(self):
1381
"""Return the references needed to perform a check of this tree.
1383
The default implementation returns no refs, and is only suitable for
1384
trees that have no local caching and can commit on ghosts at any time.
1386
:seealso: breezy.check for details about check_refs.
1390
def copy_content_into(self, tree, revision_id=None):
1391
"""Copy the current content and user files of this tree into tree."""
1392
from .. import merge
1393
with self.lock_read():
1394
if revision_id is None:
1395
merge.transform_tree(tree, self)
1397
# TODO now merge from tree.last_revision to revision (to
1398
# preserve user local changes)
1400
other_tree = self.revision_tree(revision_id)
1401
except errors.NoSuchRevision:
1402
other_tree = self.branch.repository.revision_tree(
1405
merge.transform_tree(tree, other_tree)
1406
if revision_id == _mod_revision.NULL_REVISION:
1409
new_parents = [revision_id]
1410
tree.set_parent_ids(new_parents)
1412
def reference_parent(self, path, possible_transports=None):
1413
remote_url = self.get_reference_info(path)
1414
if remote_url is None:
1415
trace.warning("Unable to find submodule info for %s", path)
1417
return _mod_branch.Branch.open(remote_url, possible_transports=possible_transports)
1419
def get_reference_info(self, path):
1420
submodule_info = self._submodule_info()
1421
info = submodule_info.get(encode_git_path(path))
1424
return decode_git_path(info[0])
1426
def set_reference_info(self, tree_path, branch_location):
1427
path = self.abspath('.gitmodules')
1429
config = GitConfigFile.from_path(path)
1430
except EnvironmentError as e:
1431
if e.errno == errno.ENOENT:
1432
config = GitConfigFile()
1435
section = (b'submodule', encode_git_path(tree_path))
1436
if branch_location is None:
1442
branch_location = urlutils.join(
1443
urlutils.strip_segment_parameters(self.branch.user_url),
1447
b'path', encode_git_path(tree_path))
1450
b'url', branch_location.encode('utf-8'))
1451
config.write_to_path(path)
1452
self.add('.gitmodules')
1455
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1457
_tree_class = GitWorkingTree
1459
supports_versioned_directories = False
1461
supports_setting_file_ids = False
1463
supports_store_uncommitted = False
1465
supports_leftmost_parent_id_as_ghost = False
1467
supports_righthand_parent_id_as_ghost = False
1469
requires_normalized_unicode_filenames = True
1471
supports_merge_modified = False
1473
ignore_filename = ".gitignore"
1476
def _matchingcontroldir(self):
1477
from .dir import LocalGitControlDirFormat
1478
return LocalGitControlDirFormat()
1480
def get_format_description(self):
1481
return "Git Working Tree"
1483
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1484
accelerator_tree=None, hardlink=False):
1485
"""See WorkingTreeFormat.initialize()."""
1486
if not isinstance(a_controldir, LocalGitDir):
1487
raise errors.IncompatibleFormat(self, a_controldir)
1488
branch = a_controldir.open_branch(nascent_ok=True)
1489
if revision_id is not None:
1490
branch.set_last_revision(revision_id)
1491
wt = GitWorkingTree(
1492
a_controldir, a_controldir.open_repository(), branch)
1493
for hook in MutableTree.hooks['post_build_tree']: