1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from cStringIO import (
26
from collections import defaultdict
28
from dulwich.errors import NotGitRepository
29
from dulwich.ignore import (
32
from dulwich.index import (
34
build_index_from_tree,
38
index_entry_from_path,
39
index_entry_from_stat,
41
blob_from_path_and_stat,
45
from dulwich.object_store import (
48
from dulwich.objects import (
55
from dulwich.repo import Repo
63
conflicts as _mod_conflicts,
65
controldir as _mod_controldir,
71
revision as _mod_revision,
73
transport as _mod_transport,
77
from ...decorators import (
83
from ...mutabletree import (
92
changes_from_git_changes,
93
tree_delta_from_git_changes,
97
from .mapping import (
102
IGNORE_FILENAME = ".gitignore"
105
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
106
"""A Git working tree."""
108
def __init__(self, controldir, repo, branch, index):
109
MutableGitIndexTree.__init__(self)
110
basedir = controldir.root_transport.local_abspath('.')
111
self.basedir = osutils.realpath(basedir)
112
self.controldir = controldir
113
self.repository = repo
114
self.store = self.repository._git.object_store
115
self.mapping = self.repository.get_mapping()
116
self._branch = branch
117
self._transport = controldir.transport
118
self._format = GitWorkingTreeFormat()
120
self.views = self._make_views()
121
self._rules_searcher = None
122
self._detect_case_handling()
125
def supports_tree_reference(self):
128
def supports_rename_tracking(self):
132
"""Lock the repository for read operations.
134
:return: A breezy.lock.LogicalLockResult.
136
if not self._lock_mode:
137
self._lock_mode = 'r'
141
self._lock_count += 1
142
self.branch.lock_read()
143
return lock.LogicalLockResult(self.unlock)
145
def _lock_write_tree(self):
146
# TODO(jelmer): Actually create index.lock
147
if not self._lock_mode:
148
self._lock_mode = 'w'
151
elif self._lock_mode == 'r':
152
raise errors.ReadOnlyError(self)
156
def lock_tree_write(self):
157
self.branch.lock_read()
159
self._lock_write_tree()
160
return lock.LogicalLockResult(self.unlock)
165
def lock_write(self, token=None):
166
self.branch.lock_write()
168
self._lock_write_tree()
169
return lock.LogicalLockResult(self.unlock)
175
return self._lock_count >= 1
177
def get_physical_lock_status(self):
180
@only_raises(errors.LockNotHeld, errors.LockBroken)
182
if not self._lock_count:
183
return lock.cant_unlock_not_held(self)
186
self._lock_count -= 1
187
if self._lock_count > 0:
189
self._lock_mode = None
196
def _detect_case_handling(self):
198
self._transport.stat(".git/cOnFiG")
199
except errors.NoSuchFile:
200
self.case_sensitive = True
202
self.case_sensitive = False
204
def merge_modified(self):
207
def set_merge_modified(self, modified_hashes):
208
raise errors.UnsupportedOperation(self.set_merge_modified, self)
210
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
211
self.set_parent_ids([p for p, t in parents_list])
213
def _set_merges_from_parent_ids(self, rhs_parent_ids):
215
merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
216
except errors.NoSuchRevision as e:
217
raise errors.GhostRevisionUnusableHere(e.revision)
219
self.control_transport.put_bytes('MERGE_HEAD', '\n'.join(merges),
220
mode=self.controldir._get_file_mode())
223
self.control_transport.delete('MERGE_HEAD')
224
except errors.NoSuchFile:
227
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
228
"""Set the parent ids to revision_ids.
230
See also set_parent_trees. This api will try to retrieve the tree data
231
for each element of revision_ids from the trees repository. If you have
232
tree data already available, it is more efficient to use
233
set_parent_trees rather than set_parent_ids. set_parent_ids is however
234
an easier API to use.
236
:param revision_ids: The revision_ids to set as the parent ids of this
237
working tree. Any of these may be ghosts.
239
with self.lock_tree_write():
240
self._check_parents_for_ghosts(revision_ids,
241
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
242
for revision_id in revision_ids:
243
_mod_revision.check_not_reserved_id(revision_id)
245
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
247
if len(revision_ids) > 0:
248
self.set_last_revision(revision_ids[0])
250
self.set_last_revision(_mod_revision.NULL_REVISION)
252
self._set_merges_from_parent_ids(revision_ids[1:])
254
def get_parent_ids(self):
255
"""See Tree.get_parent_ids.
257
This implementation reads the pending merges list and last_revision
258
value and uses that to decide what the parents list should be.
260
last_rev = _mod_revision.ensure_null(self._last_revision())
261
if _mod_revision.NULL_REVISION == last_rev:
266
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
267
except errors.NoSuchFile:
270
for l in osutils.split_lines(merges_bytes):
271
revision_id = l.rstrip('\n')
272
parents.append(self.branch.lookup_foreign_revision_id(revision_id))
275
def check_state(self):
276
"""Check that the working state is/isn't valid."""
279
def remove(self, files, verbose=False, to_file=None, keep_files=True,
281
"""Remove nominated files from the working tree metadata.
283
:param files: File paths relative to the basedir.
284
:param keep_files: If true, the files will also be kept.
285
:param force: Delete files and directories, even if they are changed
286
and even if the directories are not empty.
288
if isinstance(files, basestring):
294
def backup(file_to_backup):
295
abs_path = self.abspath(file_to_backup)
296
backup_name = self.controldir._available_backup_name(file_to_backup)
297
osutils.rename(abs_path, self.abspath(backup_name))
298
return "removed %s (but kept a copy: %s)" % (
299
file_to_backup, backup_name)
301
# Sort needed to first handle directory content before the directory
306
def recurse_directory_to_add_files(directory):
307
# Recurse directory and add all files
308
# so we can check if they have changed.
309
for parent_info, file_infos in self.walkdirs(directory):
310
for relpath, basename, kind, lstat, fileid, kind in file_infos:
311
# Is it versioned or ignored?
312
if self.is_versioned(relpath):
313
# Add nested content for deletion.
314
all_files.add(relpath)
316
# Files which are not versioned
317
# should be treated as unknown.
318
files_to_backup.append(relpath)
320
with self.lock_tree_write():
321
for filepath in files:
322
# Get file name into canonical form.
323
abspath = self.abspath(filepath)
324
filepath = self.relpath(abspath)
327
all_files.add(filepath)
328
recurse_directory_to_add_files(filepath)
330
files = list(all_files)
333
return # nothing to do
335
# Sort needed to first handle directory content before the directory
336
files.sort(reverse=True)
338
# Bail out if we are going to delete files we shouldn't
339
if not keep_files and not force:
340
for (file_id, path, content_change, versioned, parent_id, name,
341
kind, executable) in self.iter_changes(self.basis_tree(),
342
include_unchanged=True, require_versioned=False,
343
want_unversioned=True, specific_files=files):
344
if versioned[0] == False:
345
# The record is unknown or newly added
346
files_to_backup.append(path[1])
347
files_to_backup.extend(osutils.parent_directories(path[1]))
348
elif (content_change and (kind[1] is not None) and
349
osutils.is_inside_any(files, path[1])):
350
# Versioned and changed, but not deleted, and still
351
# in one of the dirs to be deleted.
352
files_to_backup.append(path[1])
353
files_to_backup.extend(osutils.parent_directories(path[1]))
361
except errors.NoSuchFile:
364
abs_path = self.abspath(f)
366
# having removed it, it must be either ignored or unknown
367
if self.is_ignored(f):
371
kind_ch = osutils.kind_marker(kind)
372
to_file.write(new_status + ' ' + f + kind_ch + '\n')
374
message = "%s does not exist" % (f, )
377
if f in files_to_backup and not force:
380
if kind == 'directory':
381
osutils.rmtree(abs_path)
383
osutils.delete_any(abs_path)
384
message = "deleted %s" % (f,)
386
message = "removed %s" % (f,)
387
self._unversion_path(f)
389
# print only one message (if any) per file.
390
if message is not None:
392
self._versioned_dirs = None
395
def smart_add(self, file_list, recurse=True, action=None, save=True):
399
# expand any symlinks in the directory part, while leaving the
401
# only expanding if symlinks are supported avoids windows path bugs
402
if osutils.has_symlinks():
403
file_list = list(map(osutils.normalizepath, file_list))
405
conflicts_related = set()
406
for c in self.conflicts():
407
conflicts_related.update(c.associated_filenames())
412
def call_action(filepath, kind):
413
if action is not None:
414
parent_path = posixpath.dirname(filepath)
415
parent_id = self.path2id(parent_path)
416
parent_ie = self._get_dir_ie(parent_path, parent_id)
417
file_id = action(self, parent_ie, filepath, kind)
418
if file_id is not None:
419
raise workingtree.SettingFileIdUnsupported()
421
with self.lock_tree_write():
422
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
423
filepath, can_access = osutils.normalized_filename(filepath)
425
raise errors.InvalidNormalization(filepath)
427
abspath = self.abspath(filepath)
428
kind = osutils.file_kind(abspath)
429
if kind in ("file", "symlink"):
430
if filepath in self.index:
433
call_action(filepath, kind)
435
self._index_add_entry(filepath, kind)
436
added.append(filepath)
437
elif kind == "directory":
438
if filepath not in self.index:
439
call_action(filepath, kind)
441
user_dirs.append(filepath)
443
raise errors.BadFileKindError(filename=abspath, kind=kind)
444
for user_dir in user_dirs:
445
abs_user_dir = self.abspath(user_dir)
448
transport = _mod_transport.get_transport_from_path(abs_user_dir)
449
_mod_controldir.ControlDirFormat.find_format(transport)
451
except errors.NotBranchError:
453
except errors.UnsupportedFormatError:
458
trace.warning('skipping nested tree %r', abs_user_dir)
461
for name in os.listdir(abs_user_dir):
462
subp = os.path.join(user_dir, name)
463
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
465
ignore_glob = self.is_ignored(subp)
466
if ignore_glob is not None:
467
ignored.setdefault(ignore_glob, []).append(subp)
469
abspath = self.abspath(subp)
470
kind = osutils.file_kind(abspath)
471
if kind == "directory":
472
user_dirs.append(subp)
474
if subp in self.index:
477
if subp in conflicts_related:
479
call_action(filepath, kind)
481
self._index_add_entry(subp, kind)
485
return added, ignored
487
def has_filename(self, filename):
488
return osutils.lexists(self.abspath(filename))
490
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
493
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
494
dir_relpath = dirpath[len(self.basedir):].strip("/")
495
if self.controldir.is_control_filename(dir_relpath):
497
for name in list(dirnames):
498
if self.controldir.is_control_filename(name):
499
dirnames.remove(name)
501
relpath = os.path.join(dir_relpath, name)
504
yield relpath.decode(osutils._fs_enc)
505
except UnicodeDecodeError:
506
raise errors.BadFilenameEncoding(
507
relpath, osutils._fs_enc)
508
if not self._has_dir(relpath):
509
dirnames.remove(name)
510
for name in filenames:
511
if not self.mapping.is_special_file(name):
512
yp = os.path.join(dir_relpath, name)
514
yield yp.decode(osutils._fs_enc)
515
except UnicodeDecodeError:
516
raise errors.BadFilenameEncoding(
520
"""Yield all unversioned files in this WorkingTree.
522
with self.lock_read():
523
for p in (set(self._iter_files_recursive(include_dirs=True)) - set([p.decode('utf-8') for p in self.index])):
524
if not self._has_dir(p):
528
# TODO: Maybe this should only write on dirty ?
529
if self._lock_mode != 'w':
530
raise errors.NotWriteLocked(self)
533
def has_or_had_id(self, file_id):
534
if self.has_id(file_id):
536
if self.had_id(file_id):
540
def had_id(self, file_id):
541
path = self._basis_fileid_map.lookup_file_id(file_id)
543
head = self.repository._git.head()
545
# Assume no if basis is not accessible
548
root_tree = self.store[head].tree
552
tree_lookup_path(self.store.__getitem__, root_tree, path)
558
def get_file_mtime(self, path, file_id=None):
559
"""See Tree.get_file_mtime."""
561
return self._lstat(path).st_mtime
562
except OSError, (num, msg):
563
if num == errno.ENOENT:
564
raise errors.NoSuchFile(path)
567
def is_ignored(self, filename):
568
r"""Check whether the filename matches an ignore pattern.
570
If the file is ignored, returns the pattern which caused it to
571
be ignored, otherwise None. So this can simply be used as a
572
boolean if desired."""
573
if getattr(self, '_global_ignoreglobster', None) is None:
575
ignore_globs.update(ignores.get_runtime_ignores())
576
ignore_globs.update(ignores.get_user_ignores())
577
self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
578
match = self._global_ignoreglobster.match(filename)
579
if match is not None:
582
if self.kind(filename) == 'directory':
584
except errors.NoSuchFile:
586
filename = filename.lstrip(b'/')
587
ignore_manager = self._get_ignore_manager()
588
ps = list(ignore_manager.find_matching(filename))
591
if not ps[-1].is_exclude:
595
def _get_ignore_manager(self):
596
ignoremanager = getattr(self, '_ignoremanager', None)
597
if ignoremanager is not None:
600
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
601
self._ignoremanager = ignore_manager
602
return ignore_manager
604
def _flush_ignore_list_cache(self):
605
self._ignoremanager = None
607
def set_last_revision(self, revid):
608
if _mod_revision.is_null(revid):
609
self.branch.set_last_revision_info(0, revid)
611
_mod_revision.check_not_reserved_id(revid)
613
self.branch.generate_revision_history(revid)
614
except errors.NoSuchRevision:
615
raise errors.GhostRevisionUnusableHere(revid)
617
def _reset_data(self):
619
head = self.repository._git.head()
621
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
623
self._basis_fileid_map = self.mapping.get_fileid_map(
624
self.store.__getitem__, self.store[head].tree)
625
self._fileid_map = self._basis_fileid_map.copy()
627
def get_file_verifier(self, path, file_id=None, stat_value=None):
628
with self.lock_read():
630
return ("GIT", self.index[path.encode('utf-8')].sha)
632
if self._has_dir(path):
634
raise errors.NoSuchFile(path)
636
def get_file_sha1(self, path, file_id=None, stat_value=None):
637
with self.lock_read():
638
if not self.is_versioned(path):
639
raise errors.NoSuchFile(path)
640
abspath = self.abspath(path)
642
return osutils.sha_file_by_name(abspath)
643
except OSError, (num, msg):
644
if num in (errno.EISDIR, errno.ENOENT):
648
def revision_tree(self, revid):
649
return self.repository.revision_tree(revid)
651
def filter_unversioned_files(self, files):
652
return set([p for p in files if not self.is_versioned(p)])
654
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
655
mode = stat_result.st_mode
656
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
658
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
659
return self.basis_tree().is_executable(path)
661
def stored_kind(self, path, file_id=None):
662
with self.lock_read():
664
return mode_kind(self.index[path.encode("utf-8")].mode)
666
# Maybe it's a directory?
667
if self._has_dir(path):
669
raise errors.NoSuchFile(path)
671
def _lstat(self, path):
672
return os.lstat(self.abspath(path))
674
def is_executable(self, path, file_id=None):
675
with self.lock_read():
676
if getattr(self, "_supports_executable", osutils.supports_executable)():
677
mode = self._lstat(path).st_mode
680
mode = self.index[path.encode('utf-8')].mode
683
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
685
def _is_executable_from_path_and_stat(self, path, stat_result):
686
if getattr(self, "_supports_executable", osutils.supports_executable)():
687
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
689
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
691
def list_files(self, include_root=False, from_dir=None, recursive=True):
695
fk_entries = {'directory': tree.TreeDirectory,
696
'file': tree.TreeFile,
697
'symlink': tree.TreeLink}
698
with self.lock_read():
699
root_ie = self._get_dir_ie(u"", None)
700
if include_root and not from_dir:
701
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
702
dir_ids[u""] = root_ie.file_id
704
path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
706
path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
707
os.listdir(self.abspath(from_dir).encode(osutils._fs_enc)) if not self.controldir.is_control_filename(name)
708
and not self.mapping.is_special_file(name)])
709
for path in path_iterator:
711
index_path = path.encode("utf-8")
712
except UnicodeEncodeError:
713
raise errors.BadFilenameEncoding(
714
path, osutils._fs_enc)
716
value = self.index[index_path]
719
kind = osutils.file_kind(self.abspath(path))
720
parent, name = posixpath.split(path)
721
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
723
if kind == 'directory':
725
if self._has_dir(path):
726
ie = self._get_dir_ie(path, self.path2id(path))
729
elif self.is_ignored(path):
731
ie = fk_entries[kind]()
735
ie = fk_entries[kind]()
737
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
739
if value is not None:
740
ie = self._get_file_ie(name, path, value, dir_ids[parent])
741
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
743
ie = fk_entries[kind]()
744
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
746
def all_file_ids(self):
747
with self.lock_read():
748
ids = {u"": self.path2id("")}
749
for path in self.index:
750
if self.mapping.is_special_file(path):
752
path = path.decode("utf-8")
753
parent = posixpath.dirname(path).strip("/")
754
for e in self._add_missing_parent_ids(parent, ids):
756
ids[path] = self.path2id(path)
757
return set(ids.values())
759
def all_versioned_paths(self):
760
with self.lock_read():
762
for path in self.index:
763
if self.mapping.is_special_file(path):
765
path = path.decode("utf-8")
768
path = posixpath.dirname(path).strip("/")
774
def _directory_is_tree_reference(self, path):
775
# FIXME: Check .gitsubmodules for path
778
def iter_child_entries(self, path, file_id=None):
779
encoded_path = path.encode('utf-8')
780
with self.lock_read():
781
parent_id = self.path2id(path)
783
seen_children = set()
784
for item_path, value in self.index.iteritems():
785
if self.mapping.is_special_file(item_path):
787
if not osutils.is_inside(encoded_path, item_path):
790
subpath = posixpath.relpath(item_path, encoded_path)
792
dirname = subpath.split(b'/', 1)[0]
793
file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
795
(parent, name) = posixpath.split(item_path)
796
file_ie = self._get_file_ie(
797
name.decode('utf-8'),
798
item_path.decode('utf-8'), value, parent_id)
800
if not found_any and path != u'':
801
raise errors.NoSuchFile(path)
804
with self.lock_read():
805
conflicts = _mod_conflicts.ConflictList()
806
for item_path, value in self.index.iteritems():
807
if value.flags & FLAG_STAGEMASK:
808
conflicts.append(_mod_conflicts.TextConflict(item_path.decode('utf-8')))
811
def set_conflicts(self, conflicts):
813
for conflict in conflicts:
814
if conflict.typestring in ('text conflict', 'contents conflict'):
815
by_path.add(conflict.path.encode('utf-8'))
817
raise errors.UnsupportedOperation(self.set_conflicts, self)
818
with self.lock_tree_write():
819
for path in self.index:
820
self._set_conflicted(path, path in by_path)
823
def _set_conflicted(self, path, conflicted):
824
trace.mutter('change conflict: %r -> %r', path, conflicted)
825
value = self.index[path]
827
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
829
self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
831
def add_conflicts(self, new_conflicts):
832
with self.lock_tree_write():
833
for conflict in new_conflicts:
834
if conflict.typestring in ('text conflict', 'contents conflict'):
836
self._set_conflicted(conflict.path.encode('utf-8'), True)
838
raise errors.UnsupportedOperation(self.add_conflicts, self)
840
raise errors.UnsupportedOperation(self.add_conflicts, self)
843
def walkdirs(self, prefix=""):
844
"""Walk the directories of this tree.
846
returns a generator which yields items in the form:
847
((curren_directory_path, fileid),
848
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
851
This API returns a generator, which is only valid during the current
852
tree transaction - within a single lock_read or lock_write duration.
854
If the tree is not locked, it may cause an error to be raised,
855
depending on the tree implementation.
857
from bisect import bisect_left
859
disk_top = self.abspath(prefix)
860
if disk_top.endswith('/'):
861
disk_top = disk_top[:-1]
862
top_strip_len = len(disk_top) + 1
863
inventory_iterator = self._walkdirs(prefix)
864
disk_iterator = osutils.walkdirs(disk_top, prefix)
866
current_disk = next(disk_iterator)
867
disk_finished = False
869
if not (e.errno == errno.ENOENT or
870
(sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
875
current_inv = next(inventory_iterator)
877
except StopIteration:
880
while not inv_finished or not disk_finished:
882
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
883
cur_disk_dir_content) = current_disk
885
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
886
cur_disk_dir_content) = ((None, None), None)
887
if not disk_finished:
888
# strip out .bzr dirs
889
if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
890
len(cur_disk_dir_content) > 0):
891
# osutils.walkdirs can be made nicer -
892
# yield the path-from-prefix rather than the pathjoined
894
bzrdir_loc = bisect_left(cur_disk_dir_content,
896
if (bzrdir_loc < len(cur_disk_dir_content)
897
and self.controldir.is_control_filename(
898
cur_disk_dir_content[bzrdir_loc][0])):
899
# we dont yield the contents of, or, .bzr itself.
900
del cur_disk_dir_content[bzrdir_loc]
902
# everything is unknown
905
# everything is missing
908
direction = cmp(current_inv[0][0], cur_disk_dir_relpath)
910
# disk is before inventory - unknown
911
dirblock = [(relpath, basename, kind, stat, None, None) for
912
relpath, basename, kind, stat, top_path in
913
cur_disk_dir_content]
914
yield (cur_disk_dir_relpath, None), dirblock
916
current_disk = next(disk_iterator)
917
except StopIteration:
920
# inventory is before disk - missing.
921
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
922
for relpath, basename, dkind, stat, fileid, kind in
924
yield (current_inv[0][0], current_inv[0][1]), dirblock
926
current_inv = next(inventory_iterator)
927
except StopIteration:
930
# versioned present directory
931
# merge the inventory and disk data together
933
for relpath, subiterator in itertools.groupby(sorted(
934
current_inv[1] + cur_disk_dir_content,
935
key=operator.itemgetter(0)), operator.itemgetter(1)):
936
path_elements = list(subiterator)
937
if len(path_elements) == 2:
938
inv_row, disk_row = path_elements
939
# versioned, present file
940
dirblock.append((inv_row[0],
941
inv_row[1], disk_row[2],
942
disk_row[3], inv_row[4],
944
elif len(path_elements[0]) == 5:
946
dirblock.append((path_elements[0][0],
947
path_elements[0][1], path_elements[0][2],
948
path_elements[0][3], None, None))
949
elif len(path_elements[0]) == 6:
950
# versioned, absent file.
951
dirblock.append((path_elements[0][0],
952
path_elements[0][1], 'unknown', None,
953
path_elements[0][4], path_elements[0][5]))
955
raise NotImplementedError('unreachable code')
956
yield current_inv[0], dirblock
958
current_inv = next(inventory_iterator)
959
except StopIteration:
962
current_disk = next(disk_iterator)
963
except StopIteration:
966
def _walkdirs(self, prefix=""):
969
prefix = prefix.encode('utf-8')
970
per_dir = defaultdict(set)
972
per_dir[('', self.get_root_id())] = set()
973
def add_entry(path, kind):
974
if path == '' or not path.startswith(prefix):
976
(dirname, child_name) = posixpath.split(path)
977
add_entry(dirname, 'directory')
978
dirname = dirname.decode("utf-8")
979
dir_file_id = self.path2id(dirname)
980
if not isinstance(value, tuple) or len(value) != 10:
981
raise ValueError(value)
982
per_dir[(dirname, dir_file_id)].add(
983
(path.decode("utf-8"), child_name.decode("utf-8"),
985
self.path2id(path.decode("utf-8")),
987
with self.lock_read():
988
for path, value in self.index.iteritems():
989
if self.mapping.is_special_file(path):
991
if not path.startswith(prefix):
993
add_entry(path, mode_kind(value.mode))
994
return ((k, sorted(v)) for (k, v) in sorted(per_dir.iteritems()))
996
def get_shelf_manager(self):
997
raise workingtree.ShelvingUnsupported()
999
def store_uncommitted(self):
1000
raise errors.StoringUncommittedNotSupported(self)
1002
def apply_inventory_delta(self, changes):
1003
for (old_path, new_path, file_id, ie) in changes:
1004
if old_path is not None:
1006
del self.index[old_path.encode('utf-8')]
1010
self._versioned_dirs = None
1011
if new_path is not None and ie.kind != 'directory':
1012
self._index_add_entry(new_path, ie.kind)
1015
def annotate_iter(self, path, file_id=None,
1016
default_revision=_mod_revision.CURRENT_REVISION):
1017
"""See Tree.annotate_iter
1019
This implementation will use the basis tree implementation if possible.
1020
Lines not in the basis are attributed to CURRENT_REVISION
1022
If there are pending merges, lines added by those merges will be
1023
incorrectly attributed to CURRENT_REVISION (but after committing, the
1024
attribution will be correct).
1026
with self.lock_read():
1027
maybe_file_parent_keys = []
1028
for parent_id in self.get_parent_ids():
1030
parent_tree = self.revision_tree(parent_id)
1031
except errors.NoSuchRevisionInTree:
1032
parent_tree = self.branch.repository.revision_tree(
1034
with parent_tree.lock_read():
1035
# TODO(jelmer): Use rename/copy tracker to find path name in parent
1038
kind = parent_tree.kind(parent_path)
1039
except errors.NoSuchFile:
1042
# Note: this is slightly unnecessary, because symlinks and
1043
# directories have a "text" which is the empty text, and we
1044
# know that won't mess up annotations. But it seems cleaner
1048
parent_tree.get_file_revision(parent_path))
1049
if parent_text_key not in maybe_file_parent_keys:
1050
maybe_file_parent_keys.append(parent_text_key)
1051
graph = self.branch.repository.get_file_graph()
1052
heads = graph.heads(maybe_file_parent_keys)
1053
file_parent_keys = []
1054
for key in maybe_file_parent_keys:
1056
file_parent_keys.append(key)
1058
# Now we have the parents of this content
1059
from breezy.annotate import Annotator
1060
from .annotate import AnnotateProvider
1061
annotator = Annotator(AnnotateProvider(
1062
self.branch.repository._file_change_scanner))
1063
text = self.get_file_text(path)
1064
this_key = (path, default_revision)
1065
annotator.add_special_text(this_key, file_parent_keys, text)
1066
annotations = [(key[-1], line)
1067
for key, line in annotator.annotate_flat(this_key)]
1070
def _rename_one(self, from_rel, to_rel):
1071
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1073
def _build_checkout_with_index(self):
1074
build_index_from_tree(
1075
self.user_transport.local_abspath('.'),
1076
self.control_transport.local_abspath("index"),
1078
None if self.branch.head is None else self.store[self.branch.head].tree)
1080
def reset_state(self, revision_ids=None):
1081
"""Reset the state of the working tree.
1083
This does a hard-reset to a last-known-good state. This is a way to
1084
fix if something got corrupted (like the .git/index file)
1086
with self.lock_tree_write():
1087
if revision_ids is not None:
1088
self.set_parent_ids(revision_ids)
1090
if self.branch.head is not None:
1091
for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
1092
if not validate_path(entry.path):
1095
if S_ISGITLINK(entry.mode):
1096
pass # TODO(jelmer): record and return submodule paths
1098
# Let's at least try to use the working tree file:
1100
st = self._lstat(self.abspath(entry.path))
1101
except OSError, (num, msg):
1102
# But if it doesn't exist, we'll make something up.
1103
obj = self.store[entry.sha]
1104
st = os.stat_result((entry.mode, 0, 0, 0,
1105
0, 0, len(obj.as_raw_string()), 0,
1107
self.index[entry.path] = index_entry_from_stat(st, entry.sha, 0)
1110
def pull(self, source, overwrite=False, stop_revision=None,
1111
change_reporter=None, possible_transports=None, local=False,
1113
with self.lock_write(), source.lock_read():
1114
old_revision = self.branch.last_revision()
1115
basis_tree = self.basis_tree()
1116
count = self.branch.pull(source, overwrite, stop_revision,
1117
possible_transports=possible_transports,
1119
new_revision = self.branch.last_revision()
1120
if new_revision != old_revision:
1121
with basis_tree.lock_read():
1122
new_basis_tree = self.branch.basis_tree()
1128
change_reporter=change_reporter,
1129
show_base=show_base)
1133
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1135
_tree_class = GitWorkingTree
1137
supports_versioned_directories = False
1139
supports_setting_file_ids = False
1141
supports_store_uncommitted = False
1143
supports_leftmost_parent_id_as_ghost = False
1145
supports_righthand_parent_id_as_ghost = False
1147
requires_normalized_unicode_filenames = True
1149
supports_merge_modified = False
1152
def _matchingcontroldir(self):
1153
from .dir import LocalGitControlDirFormat
1154
return LocalGitControlDirFormat()
1156
def get_format_description(self):
1157
return "Git Working Tree"
1159
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1160
accelerator_tree=None, hardlink=False):
1161
"""See WorkingTreeFormat.initialize()."""
1162
if not isinstance(a_controldir, LocalGitDir):
1163
raise errors.IncompatibleFormat(self, a_controldir)
1164
index = Index(a_controldir.root_transport.local_abspath(".git/index"))
1166
branch = a_controldir.open_branch(nascent_ok=True)
1167
if revision_id is not None:
1168
branch.set_last_revision(revision_id)
1169
wt = GitWorkingTree(
1170
a_controldir, a_controldir.open_repository(), branch, index)
1171
for hook in MutableTree.hooks['post_build_tree']:
1176
class InterIndexGitTree(InterGitTrees):
1177
"""InterTree that works between a Git revision tree and an index."""
1179
def __init__(self, source, target):
1180
super(InterIndexGitTree, self).__init__(source, target)
1181
self._index = target.index
1184
def is_compatible(cls, source, target):
1185
from .repository import GitRevisionTree
1186
return (isinstance(source, GitRevisionTree) and
1187
isinstance(target, GitWorkingTree))
1189
def _iter_git_changes(self, want_unchanged=False, specific_files=None,
1190
require_versioned=False, extra_trees=None,
1191
want_unversioned=False):
1192
trees = [self.source]
1193
if extra_trees is not None:
1194
trees.extend(extra_trees)
1195
if specific_files is not None:
1196
specific_files = self.target.find_related_paths_across_trees(
1197
specific_files, trees,
1198
require_versioned=require_versioned)
1199
# TODO(jelmer): Restrict to specific_files, for performance reasons.
1200
with self.lock_read():
1201
return changes_between_git_tree_and_working_copy(
1202
self.source.store, self.source.tree,
1203
self.target, want_unchanged=want_unchanged,
1204
want_unversioned=want_unversioned)
1207
tree.InterTree.register_optimiser(InterIndexGitTree)
1210
def changes_between_git_tree_and_working_copy(store, from_tree_sha, target,
1211
want_unchanged=False, want_unversioned=False):
1212
"""Determine the changes between a git tree and a working tree with index.
1217
# Report dirified directories to commit_tree first, so that they can be
1218
# replaced with non-empty directories if they have contents.
1220
target_root_path = target.abspath('.').encode(sys.getfilesystemencoding())
1221
for path, index_entry in target.index.iteritems():
1223
live_entry = index_entry_from_path(
1224
target.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
1225
except EnvironmentError as e:
1226
if e.errno == errno.ENOENT:
1227
# Entry was removed; keep it listed, but mark it as gone.
1228
blobs[path] = (ZERO_SHA, 0)
1229
elif e.errno == errno.EISDIR:
1230
# Entry was turned into a directory
1231
dirified.append((path, Tree().id, stat.S_IFDIR))
1232
store.add_object(Tree())
1236
blobs[path] = (live_entry.sha, cleanup_mode(live_entry.mode))
1237
if want_unversioned:
1238
for e in target.extras():
1239
ap = target.abspath(e)
1242
np, accessible = osutils.normalized_filename(e)
1243
except UnicodeDecodeError:
1244
raise errors.BadFilenameEncoding(
1246
if stat.S_ISDIR(st.st_mode):
1249
blob = blob_from_path_and_stat(ap.encode('utf-8'), st)
1250
store.add_object(blob)
1251
np = np.encode('utf-8')
1252
blobs[np] = (blob.id, cleanup_mode(st.st_mode))
1254
to_tree_sha = commit_tree(store, dirified + [(p, s, m) for (p, (s, m)) in blobs.iteritems()])
1255
return store.tree_changes(
1256
from_tree_sha, to_tree_sha, include_trees=True,
1257
want_unchanged=want_unchanged, change_type_same=True), extras