1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from cStringIO import (
26
from collections import defaultdict
28
from dulwich.errors import NotGitRepository
29
from dulwich.ignore import (
32
from dulwich.index import (
34
build_index_from_tree,
38
index_entry_from_stat,
40
blob_from_path_and_stat,
44
from dulwich.object_store import (
47
from dulwich.objects import (
53
from dulwich.repo import Repo
61
conflicts as _mod_conflicts,
63
controldir as _mod_controldir,
69
revision as _mod_revision,
71
transport as _mod_transport,
75
from ...decorators import (
81
from ...mutabletree import (
90
changes_from_git_changes,
91
tree_delta_from_git_changes,
95
from .mapping import (
100
IGNORE_FILENAME = ".gitignore"
103
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
104
"""A Git working tree."""
106
def __init__(self, controldir, repo, branch, index):
107
MutableGitIndexTree.__init__(self)
108
basedir = controldir.root_transport.local_abspath('.')
109
self.basedir = osutils.realpath(basedir)
110
self.controldir = controldir
111
self.repository = repo
112
self.store = self.repository._git.object_store
113
self.mapping = self.repository.get_mapping()
114
self._branch = branch
115
self._transport = controldir.transport
116
self._format = GitWorkingTreeFormat()
118
self.views = self._make_views()
119
self._rules_searcher = None
120
self._detect_case_handling()
123
def supports_tree_reference(self):
126
def supports_rename_tracking(self):
130
"""Lock the repository for read operations.
132
:return: A breezy.lock.LogicalLockResult.
134
if not self._lock_mode:
135
self._lock_mode = 'r'
139
self._lock_count += 1
140
self.branch.lock_read()
141
return lock.LogicalLockResult(self.unlock)
143
def _lock_write_tree(self):
144
# TODO(jelmer): Actually create index.lock
145
if not self._lock_mode:
146
self._lock_mode = 'w'
149
elif self._lock_mode == 'r':
150
raise errors.ReadOnlyError(self)
154
def lock_tree_write(self):
155
self.branch.lock_read()
157
self._lock_write_tree()
158
return lock.LogicalLockResult(self.unlock)
163
def lock_write(self, token=None):
164
self.branch.lock_write()
166
self._lock_write_tree()
167
return lock.LogicalLockResult(self.unlock)
173
return self._lock_count >= 1
175
def get_physical_lock_status(self):
178
@only_raises(errors.LockNotHeld, errors.LockBroken)
180
if not self._lock_count:
181
return lock.cant_unlock_not_held(self)
184
self._lock_count -= 1
185
if self._lock_count > 0:
187
self._lock_mode = None
194
def _detect_case_handling(self):
196
self._transport.stat(".git/cOnFiG")
197
except errors.NoSuchFile:
198
self.case_sensitive = True
200
self.case_sensitive = False
202
def merge_modified(self):
205
def set_merge_modified(self, modified_hashes):
206
raise errors.UnsupportedOperation(self.set_merge_modified, self)
208
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
209
self.set_parent_ids([p for p, t in parents_list])
211
def _set_merges_from_parent_ids(self, rhs_parent_ids):
213
merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
214
except errors.NoSuchRevision as e:
215
raise errors.GhostRevisionUnusableHere(e.revision)
217
self.control_transport.put_bytes('MERGE_HEAD', '\n'.join(merges),
218
mode=self.controldir._get_file_mode())
221
self.control_transport.delete('MERGE_HEAD')
222
except errors.NoSuchFile:
225
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
226
"""Set the parent ids to revision_ids.
228
See also set_parent_trees. This api will try to retrieve the tree data
229
for each element of revision_ids from the trees repository. If you have
230
tree data already available, it is more efficient to use
231
set_parent_trees rather than set_parent_ids. set_parent_ids is however
232
an easier API to use.
234
:param revision_ids: The revision_ids to set as the parent ids of this
235
working tree. Any of these may be ghosts.
237
with self.lock_tree_write():
238
self._check_parents_for_ghosts(revision_ids,
239
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
240
for revision_id in revision_ids:
241
_mod_revision.check_not_reserved_id(revision_id)
243
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
245
if len(revision_ids) > 0:
246
self.set_last_revision(revision_ids[0])
248
self.set_last_revision(_mod_revision.NULL_REVISION)
250
self._set_merges_from_parent_ids(revision_ids[1:])
252
def get_parent_ids(self):
253
"""See Tree.get_parent_ids.
255
This implementation reads the pending merges list and last_revision
256
value and uses that to decide what the parents list should be.
258
last_rev = _mod_revision.ensure_null(self._last_revision())
259
if _mod_revision.NULL_REVISION == last_rev:
264
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
265
except errors.NoSuchFile:
268
for l in osutils.split_lines(merges_bytes):
269
revision_id = l.rstrip('\n')
270
parents.append(self.branch.lookup_foreign_revision_id(revision_id))
273
def check_state(self):
274
"""Check that the working state is/isn't valid."""
277
def remove(self, files, verbose=False, to_file=None, keep_files=True,
279
"""Remove nominated files from the working tree metadata.
281
:param files: File paths relative to the basedir.
282
:param keep_files: If true, the files will also be kept.
283
:param force: Delete files and directories, even if they are changed
284
and even if the directories are not empty.
286
if isinstance(files, basestring):
292
def backup(file_to_backup):
293
abs_path = self.abspath(file_to_backup)
294
backup_name = self.controldir._available_backup_name(file_to_backup)
295
osutils.rename(abs_path, self.abspath(backup_name))
296
return "removed %s (but kept a copy: %s)" % (
297
file_to_backup, backup_name)
299
# Sort needed to first handle directory content before the directory
304
def recurse_directory_to_add_files(directory):
305
# Recurse directory and add all files
306
# so we can check if they have changed.
307
for parent_info, file_infos in self.walkdirs(directory):
308
for relpath, basename, kind, lstat, fileid, kind in file_infos:
309
# Is it versioned or ignored?
310
if self.is_versioned(relpath):
311
# Add nested content for deletion.
312
all_files.add(relpath)
314
# Files which are not versioned
315
# should be treated as unknown.
316
files_to_backup.append(relpath)
318
with self.lock_tree_write():
319
for filepath in files:
320
# Get file name into canonical form.
321
abspath = self.abspath(filepath)
322
filepath = self.relpath(abspath)
325
all_files.add(filepath)
326
recurse_directory_to_add_files(filepath)
328
files = list(all_files)
331
return # nothing to do
333
# Sort needed to first handle directory content before the directory
334
files.sort(reverse=True)
336
# Bail out if we are going to delete files we shouldn't
337
if not keep_files and not force:
338
for (file_id, path, content_change, versioned, parent_id, name,
339
kind, executable) in self.iter_changes(self.basis_tree(),
340
include_unchanged=True, require_versioned=False,
341
want_unversioned=True, specific_files=files):
342
if versioned[0] == False:
343
# The record is unknown or newly added
344
files_to_backup.append(path[1])
345
files_to_backup.extend(osutils.parent_directories(path[1]))
346
elif (content_change and (kind[1] is not None) and
347
osutils.is_inside_any(files, path[1])):
348
# Versioned and changed, but not deleted, and still
349
# in one of the dirs to be deleted.
350
files_to_backup.append(path[1])
351
files_to_backup.extend(osutils.parent_directories(path[1]))
359
except errors.NoSuchFile:
362
abs_path = self.abspath(f)
364
# having removed it, it must be either ignored or unknown
365
if self.is_ignored(f):
369
kind_ch = osutils.kind_marker(kind)
370
to_file.write(new_status + ' ' + f + kind_ch + '\n')
372
message = "%s does not exist" % (f, )
375
if f in files_to_backup and not force:
378
if kind == 'directory':
379
osutils.rmtree(abs_path)
381
osutils.delete_any(abs_path)
382
message = "deleted %s" % (f,)
384
message = "removed %s" % (f,)
385
self._unversion_path(f)
387
# print only one message (if any) per file.
388
if message is not None:
390
self._versioned_dirs = None
393
def smart_add(self, file_list, recurse=True, action=None, save=True):
397
# expand any symlinks in the directory part, while leaving the
399
# only expanding if symlinks are supported avoids windows path bugs
400
if osutils.has_symlinks():
401
file_list = list(map(osutils.normalizepath, file_list))
403
conflicts_related = set()
404
for c in self.conflicts():
405
conflicts_related.update(c.associated_filenames())
410
def call_action(filepath, kind):
411
if action is not None:
412
parent_path = posixpath.dirname(filepath)
413
parent_id = self.path2id(parent_path)
414
parent_ie = self._get_dir_ie(parent_path, parent_id)
415
file_id = action(self, parent_ie, filepath, kind)
416
if file_id is not None:
417
raise workingtree.SettingFileIdUnsupported()
419
with self.lock_tree_write():
420
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
421
filepath, can_access = osutils.normalized_filename(filepath)
423
raise errors.InvalidNormalization(filepath)
425
abspath = self.abspath(filepath)
426
kind = osutils.file_kind(abspath)
427
if kind in ("file", "symlink"):
428
if filepath in self.index:
431
call_action(filepath, kind)
433
self._index_add_entry(filepath, kind)
434
added.append(filepath)
435
elif kind == "directory":
436
if filepath not in self.index:
437
call_action(filepath, kind)
439
user_dirs.append(filepath)
441
raise errors.BadFileKindError(filename=abspath, kind=kind)
442
for user_dir in user_dirs:
443
abs_user_dir = self.abspath(user_dir)
446
transport = _mod_transport.get_transport_from_path(abs_user_dir)
447
_mod_controldir.ControlDirFormat.find_format(transport)
449
except errors.NotBranchError:
451
except errors.UnsupportedFormatError:
456
trace.warning('skipping nested tree %r', abs_user_dir)
459
for name in os.listdir(abs_user_dir):
460
subp = os.path.join(user_dir, name)
461
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
463
ignore_glob = self.is_ignored(subp)
464
if ignore_glob is not None:
465
ignored.setdefault(ignore_glob, []).append(subp)
467
abspath = self.abspath(subp)
468
kind = osutils.file_kind(abspath)
469
if kind == "directory":
470
user_dirs.append(subp)
472
if subp in self.index:
475
if subp in conflicts_related:
477
call_action(filepath, kind)
479
self._index_add_entry(subp, kind)
483
return added, ignored
485
def has_filename(self, filename):
486
return osutils.lexists(self.abspath(filename))
488
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
491
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
492
dir_relpath = dirpath[len(self.basedir):].strip("/")
493
if self.controldir.is_control_filename(dir_relpath):
495
for name in list(dirnames):
496
if self.controldir.is_control_filename(name):
497
dirnames.remove(name)
499
relpath = os.path.join(dir_relpath, name)
502
yield relpath.decode(osutils._fs_enc)
503
except UnicodeDecodeError:
504
raise errors.BadFilenameEncoding(
505
relpath, osutils._fs_enc)
506
if not self._has_dir(relpath):
507
dirnames.remove(name)
508
for name in filenames:
509
if not self.mapping.is_special_file(name):
510
yp = os.path.join(dir_relpath, name)
512
yield yp.decode(osutils._fs_enc)
513
except UnicodeDecodeError:
514
raise errors.BadFilenameEncoding(
518
"""Yield all unversioned files in this WorkingTree.
520
with self.lock_read():
521
for p in (set(self._iter_files_recursive(include_dirs=True)) - set([p.decode('utf-8') for p in self.index])):
522
if not self._has_dir(p):
526
# TODO: Maybe this should only write on dirty ?
527
if self._lock_mode != 'w':
528
raise errors.NotWriteLocked(self)
531
def has_or_had_id(self, file_id):
532
if self.has_id(file_id):
534
if self.had_id(file_id):
538
def had_id(self, file_id):
539
path = self._basis_fileid_map.lookup_file_id(file_id)
541
head = self.repository._git.head()
543
# Assume no if basis is not accessible
546
root_tree = self.store[head].tree
550
tree_lookup_path(self.store.__getitem__, root_tree, path)
556
def get_file_mtime(self, path, file_id=None):
557
"""See Tree.get_file_mtime."""
559
return self._lstat(path).st_mtime
560
except OSError, (num, msg):
561
if num == errno.ENOENT:
562
raise errors.NoSuchFile(path)
565
def is_ignored(self, filename):
566
r"""Check whether the filename matches an ignore pattern.
568
If the file is ignored, returns the pattern which caused it to
569
be ignored, otherwise None. So this can simply be used as a
570
boolean if desired."""
571
if getattr(self, '_global_ignoreglobster', None) is None:
573
ignore_globs.update(ignores.get_runtime_ignores())
574
ignore_globs.update(ignores.get_user_ignores())
575
self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
576
match = self._global_ignoreglobster.match(filename)
577
if match is not None:
580
if self.kind(filename) == 'directory':
582
except errors.NoSuchFile:
584
filename = filename.lstrip(b'/')
585
ignore_manager = self._get_ignore_manager()
586
ps = list(ignore_manager.find_matching(filename))
589
if not ps[-1].is_exclude:
593
def _get_ignore_manager(self):
594
ignoremanager = getattr(self, '_ignoremanager', None)
595
if ignoremanager is not None:
598
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
599
self._ignoremanager = ignore_manager
600
return ignore_manager
602
def _flush_ignore_list_cache(self):
603
self._ignoremanager = None
605
def set_last_revision(self, revid):
606
if _mod_revision.is_null(revid):
607
self.branch.set_last_revision_info(0, revid)
609
_mod_revision.check_not_reserved_id(revid)
611
self.branch.generate_revision_history(revid)
612
except errors.NoSuchRevision:
613
raise errors.GhostRevisionUnusableHere(revid)
615
def _reset_data(self):
617
head = self.repository._git.head()
619
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
621
self._basis_fileid_map = self.mapping.get_fileid_map(
622
self.store.__getitem__, self.store[head].tree)
623
self._fileid_map = self._basis_fileid_map.copy()
625
def get_file_verifier(self, path, file_id=None, stat_value=None):
626
with self.lock_read():
628
return ("GIT", self.index[path.encode('utf-8')].sha)
630
if self._has_dir(path):
632
raise errors.NoSuchFile(path)
634
def get_file_sha1(self, path, file_id=None, stat_value=None):
635
with self.lock_read():
636
if not self.is_versioned(path):
637
raise errors.NoSuchFile(path)
638
abspath = self.abspath(path)
640
return osutils.sha_file_by_name(abspath)
641
except OSError, (num, msg):
642
if num in (errno.EISDIR, errno.ENOENT):
646
def revision_tree(self, revid):
647
return self.repository.revision_tree(revid)
649
def filter_unversioned_files(self, files):
650
return set([p for p in files if not self.is_versioned(p)])
652
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
653
mode = stat_result.st_mode
654
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
656
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
657
return self.basis_tree().is_executable(path)
659
def stored_kind(self, path, file_id=None):
660
with self.lock_read():
662
return mode_kind(self.index[path.encode("utf-8")].mode)
664
# Maybe it's a directory?
665
if self._has_dir(path):
667
raise errors.NoSuchFile(path)
669
def _lstat(self, path):
670
return os.lstat(self.abspath(path))
672
def is_executable(self, path, file_id=None):
673
with self.lock_read():
674
if getattr(self, "_supports_executable", osutils.supports_executable)():
675
mode = self._lstat(path).st_mode
678
mode = self.index[path.encode('utf-8')].mode
681
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
683
def _is_executable_from_path_and_stat(self, path, stat_result):
684
if getattr(self, "_supports_executable", osutils.supports_executable)():
685
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
687
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
689
def list_files(self, include_root=False, from_dir=None, recursive=True):
693
fk_entries = {'directory': tree.TreeDirectory,
694
'file': tree.TreeFile,
695
'symlink': tree.TreeLink}
696
with self.lock_read():
697
root_ie = self._get_dir_ie(u"", None)
698
if include_root and not from_dir:
699
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
700
dir_ids[u""] = root_ie.file_id
702
path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
704
path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
705
os.listdir(self.abspath(from_dir).encode(osutils._fs_enc)) if not self.controldir.is_control_filename(name)
706
and not self.mapping.is_special_file(name)])
707
for path in path_iterator:
709
index_path = path.encode("utf-8")
710
except UnicodeEncodeError:
711
raise errors.BadFilenameEncoding(
712
path, osutils._fs_enc)
714
value = self.index[index_path]
717
kind = osutils.file_kind(self.abspath(path))
718
parent, name = posixpath.split(path)
719
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
721
if kind == 'directory':
723
if self._has_dir(path):
724
ie = self._get_dir_ie(path, self.path2id(path))
727
elif self.is_ignored(path):
729
ie = fk_entries[kind]()
733
ie = fk_entries[kind]()
735
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
737
if value is not None:
738
ie = self._get_file_ie(name, path, value, dir_ids[parent])
739
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
741
ie = fk_entries[kind]()
742
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
744
def all_file_ids(self):
745
with self.lock_read():
746
ids = {u"": self.path2id("")}
747
for path in self.index:
748
if self.mapping.is_special_file(path):
750
path = path.decode("utf-8")
751
parent = posixpath.dirname(path).strip("/")
752
for e in self._add_missing_parent_ids(parent, ids):
754
ids[path] = self.path2id(path)
755
return set(ids.values())
757
def all_versioned_paths(self):
758
with self.lock_read():
760
for path in self.index:
761
if self.mapping.is_special_file(path):
763
path = path.decode("utf-8")
766
path = posixpath.dirname(path).strip("/")
772
def _directory_is_tree_reference(self, path):
773
# FIXME: Check .gitsubmodules for path
776
def iter_child_entries(self, path, file_id=None):
777
encoded_path = path.encode('utf-8')
778
with self.lock_read():
779
parent_id = self.path2id(path)
781
seen_children = set()
782
for item_path, value in self.index.iteritems():
783
if self.mapping.is_special_file(item_path):
785
if not osutils.is_inside(encoded_path, item_path):
788
subpath = posixpath.relpath(item_path, encoded_path)
790
dirname = subpath.split(b'/', 1)[0]
791
file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
793
(parent, name) = posixpath.split(item_path)
794
file_ie = self._get_file_ie(
795
name.decode('utf-8'),
796
item_path.decode('utf-8'), value, parent_id)
798
if not found_any and path != u'':
799
raise errors.NoSuchFile(path)
802
with self.lock_read():
803
conflicts = _mod_conflicts.ConflictList()
804
for item_path, value in self.index.iteritems():
805
if value.flags & FLAG_STAGEMASK:
806
conflicts.append(_mod_conflicts.TextConflict(item_path.decode('utf-8')))
809
def set_conflicts(self, conflicts):
811
for conflict in conflicts:
812
if conflict.typestring in ('text conflict', 'contents conflict'):
813
by_path.add(conflict.path.encode('utf-8'))
815
raise errors.UnsupportedOperation(self.set_conflicts, self)
816
with self.lock_tree_write():
817
for path in self.index:
818
self._set_conflicted(path, path in by_path)
821
def _set_conflicted(self, path, conflicted):
822
trace.mutter('change conflict: %r -> %r', path, conflicted)
823
value = self.index[path]
825
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
827
self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
829
def add_conflicts(self, new_conflicts):
830
with self.lock_tree_write():
831
for conflict in new_conflicts:
832
if conflict.typestring in ('text conflict', 'contents conflict'):
834
self._set_conflicted(conflict.path.encode('utf-8'), True)
836
raise errors.UnsupportedOperation(self.add_conflicts, self)
838
raise errors.UnsupportedOperation(self.add_conflicts, self)
841
def walkdirs(self, prefix=""):
842
"""Walk the directories of this tree.
844
returns a generator which yields items in the form:
845
((curren_directory_path, fileid),
846
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
849
This API returns a generator, which is only valid during the current
850
tree transaction - within a single lock_read or lock_write duration.
852
If the tree is not locked, it may cause an error to be raised,
853
depending on the tree implementation.
855
from bisect import bisect_left
857
disk_top = self.abspath(prefix)
858
if disk_top.endswith('/'):
859
disk_top = disk_top[:-1]
860
top_strip_len = len(disk_top) + 1
861
inventory_iterator = self._walkdirs(prefix)
862
disk_iterator = osutils.walkdirs(disk_top, prefix)
864
current_disk = next(disk_iterator)
865
disk_finished = False
867
if not (e.errno == errno.ENOENT or
868
(sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
873
current_inv = next(inventory_iterator)
875
except StopIteration:
878
while not inv_finished or not disk_finished:
880
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
881
cur_disk_dir_content) = current_disk
883
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
884
cur_disk_dir_content) = ((None, None), None)
885
if not disk_finished:
886
# strip out .bzr dirs
887
if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
888
len(cur_disk_dir_content) > 0):
889
# osutils.walkdirs can be made nicer -
890
# yield the path-from-prefix rather than the pathjoined
892
bzrdir_loc = bisect_left(cur_disk_dir_content,
894
if (bzrdir_loc < len(cur_disk_dir_content)
895
and self.controldir.is_control_filename(
896
cur_disk_dir_content[bzrdir_loc][0])):
897
# we dont yield the contents of, or, .bzr itself.
898
del cur_disk_dir_content[bzrdir_loc]
900
# everything is unknown
903
# everything is missing
906
direction = cmp(current_inv[0][0], cur_disk_dir_relpath)
908
# disk is before inventory - unknown
909
dirblock = [(relpath, basename, kind, stat, None, None) for
910
relpath, basename, kind, stat, top_path in
911
cur_disk_dir_content]
912
yield (cur_disk_dir_relpath, None), dirblock
914
current_disk = next(disk_iterator)
915
except StopIteration:
918
# inventory is before disk - missing.
919
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
920
for relpath, basename, dkind, stat, fileid, kind in
922
yield (current_inv[0][0], current_inv[0][1]), dirblock
924
current_inv = next(inventory_iterator)
925
except StopIteration:
928
# versioned present directory
929
# merge the inventory and disk data together
931
for relpath, subiterator in itertools.groupby(sorted(
932
current_inv[1] + cur_disk_dir_content,
933
key=operator.itemgetter(0)), operator.itemgetter(1)):
934
path_elements = list(subiterator)
935
if len(path_elements) == 2:
936
inv_row, disk_row = path_elements
937
# versioned, present file
938
dirblock.append((inv_row[0],
939
inv_row[1], disk_row[2],
940
disk_row[3], inv_row[4],
942
elif len(path_elements[0]) == 5:
944
dirblock.append((path_elements[0][0],
945
path_elements[0][1], path_elements[0][2],
946
path_elements[0][3], None, None))
947
elif len(path_elements[0]) == 6:
948
# versioned, absent file.
949
dirblock.append((path_elements[0][0],
950
path_elements[0][1], 'unknown', None,
951
path_elements[0][4], path_elements[0][5]))
953
raise NotImplementedError('unreachable code')
954
yield current_inv[0], dirblock
956
current_inv = next(inventory_iterator)
957
except StopIteration:
960
current_disk = next(disk_iterator)
961
except StopIteration:
964
def _walkdirs(self, prefix=""):
967
prefix = prefix.encode('utf-8')
968
per_dir = defaultdict(set)
970
per_dir[('', self.get_root_id())] = set()
971
def add_entry(path, kind):
972
if path == '' or not path.startswith(prefix):
974
(dirname, child_name) = posixpath.split(path)
975
add_entry(dirname, 'directory')
976
dirname = dirname.decode("utf-8")
977
dir_file_id = self.path2id(dirname)
978
if not isinstance(value, tuple) or len(value) != 10:
979
raise ValueError(value)
980
per_dir[(dirname, dir_file_id)].add(
981
(path.decode("utf-8"), child_name.decode("utf-8"),
983
self.path2id(path.decode("utf-8")),
985
with self.lock_read():
986
for path, value in self.index.iteritems():
987
if self.mapping.is_special_file(path):
989
if not path.startswith(prefix):
991
add_entry(path, mode_kind(value.mode))
992
return ((k, sorted(v)) for (k, v) in sorted(per_dir.iteritems()))
994
def get_shelf_manager(self):
995
raise workingtree.ShelvingUnsupported()
997
def store_uncommitted(self):
998
raise errors.StoringUncommittedNotSupported(self)
1000
def apply_inventory_delta(self, changes):
1001
for (old_path, new_path, file_id, ie) in changes:
1002
if old_path is not None:
1004
del self.index[old_path.encode('utf-8')]
1008
self._versioned_dirs = None
1009
if new_path is not None and ie.kind != 'directory':
1010
self._index_add_entry(new_path, ie.kind)
1013
def annotate_iter(self, path, file_id=None,
1014
default_revision=_mod_revision.CURRENT_REVISION):
1015
"""See Tree.annotate_iter
1017
This implementation will use the basis tree implementation if possible.
1018
Lines not in the basis are attributed to CURRENT_REVISION
1020
If there are pending merges, lines added by those merges will be
1021
incorrectly attributed to CURRENT_REVISION (but after committing, the
1022
attribution will be correct).
1024
with self.lock_read():
1025
maybe_file_parent_keys = []
1026
for parent_id in self.get_parent_ids():
1028
parent_tree = self.revision_tree(parent_id)
1029
except errors.NoSuchRevisionInTree:
1030
parent_tree = self.branch.repository.revision_tree(
1032
with parent_tree.lock_read():
1033
# TODO(jelmer): Use rename/copy tracker to find path name in parent
1036
kind = parent_tree.kind(parent_path)
1037
except errors.NoSuchFile:
1040
# Note: this is slightly unnecessary, because symlinks and
1041
# directories have a "text" which is the empty text, and we
1042
# know that won't mess up annotations. But it seems cleaner
1046
parent_tree.get_file_revision(parent_path))
1047
if parent_text_key not in maybe_file_parent_keys:
1048
maybe_file_parent_keys.append(parent_text_key)
1049
graph = self.branch.repository.get_file_graph()
1050
heads = graph.heads(maybe_file_parent_keys)
1051
file_parent_keys = []
1052
for key in maybe_file_parent_keys:
1054
file_parent_keys.append(key)
1056
# Now we have the parents of this content
1057
from breezy.annotate import Annotator
1058
from .annotate import AnnotateProvider
1059
annotator = Annotator(AnnotateProvider(
1060
self.branch.repository._file_change_scanner))
1061
text = self.get_file_text(path)
1062
this_key = (path, default_revision)
1063
annotator.add_special_text(this_key, file_parent_keys, text)
1064
annotations = [(key[-1], line)
1065
for key, line in annotator.annotate_flat(this_key)]
1068
def _rename_one(self, from_rel, to_rel):
1069
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1071
def _build_checkout_with_index(self):
1072
build_index_from_tree(
1073
self.user_transport.local_abspath('.'),
1074
self.control_transport.local_abspath("index"),
1076
None if self.branch.head is None else self.store[self.branch.head].tree)
1078
def reset_state(self, revision_ids=None):
1079
"""Reset the state of the working tree.
1081
This does a hard-reset to a last-known-good state. This is a way to
1082
fix if something got corrupted (like the .git/index file)
1084
with self.lock_tree_write():
1085
if revision_ids is not None:
1086
self.set_parent_ids(revision_ids)
1088
if self.branch.head is not None:
1089
for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
1090
if not validate_path(entry.path):
1093
if S_ISGITLINK(entry.mode):
1094
pass # TODO(jelmer): record and return submodule paths
1096
# Let's at least try to use the working tree file:
1098
st = self._lstat(self.abspath(entry.path))
1099
except OSError, (num, msg):
1100
# But if it doesn't exist, we'll make something up.
1101
obj = self.store[entry.sha]
1102
st = os.stat_result((entry.mode, 0, 0, 0,
1103
0, 0, len(obj.as_raw_string()), 0,
1105
self.index[entry.path] = index_entry_from_stat(st, entry.sha, 0)
1108
def pull(self, source, overwrite=False, stop_revision=None,
1109
change_reporter=None, possible_transports=None, local=False,
1111
with self.lock_write(), source.lock_read():
1112
old_revision = self.branch.last_revision()
1113
basis_tree = self.basis_tree()
1114
count = self.branch.pull(source, overwrite, stop_revision,
1115
possible_transports=possible_transports,
1117
new_revision = self.branch.last_revision()
1118
if new_revision != old_revision:
1119
with basis_tree.lock_read():
1120
new_basis_tree = self.branch.basis_tree()
1126
change_reporter=change_reporter,
1127
show_base=show_base)
1131
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1133
_tree_class = GitWorkingTree
1135
supports_versioned_directories = False
1137
supports_setting_file_ids = False
1139
supports_store_uncommitted = False
1141
supports_leftmost_parent_id_as_ghost = False
1143
supports_righthand_parent_id_as_ghost = False
1145
requires_normalized_unicode_filenames = True
1147
supports_merge_modified = False
1150
def _matchingcontroldir(self):
1151
from .dir import LocalGitControlDirFormat
1152
return LocalGitControlDirFormat()
1154
def get_format_description(self):
1155
return "Git Working Tree"
1157
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1158
accelerator_tree=None, hardlink=False):
1159
"""See WorkingTreeFormat.initialize()."""
1160
if not isinstance(a_controldir, LocalGitDir):
1161
raise errors.IncompatibleFormat(self, a_controldir)
1162
index = Index(a_controldir.root_transport.local_abspath(".git/index"))
1164
branch = a_controldir.open_branch()
1165
if revision_id is not None:
1166
branch.set_last_revision(revision_id)
1167
wt = GitWorkingTree(
1168
a_controldir, a_controldir.open_repository(), branch, index)
1169
for hook in MutableTree.hooks['post_build_tree']:
1174
class InterIndexGitTree(InterGitTrees):
1175
"""InterTree that works between a Git revision tree and an index."""
1177
def __init__(self, source, target):
1178
super(InterIndexGitTree, self).__init__(source, target)
1179
self._index = target.index
1182
def is_compatible(cls, source, target):
1183
from .repository import GitRevisionTree
1184
return (isinstance(source, GitRevisionTree) and
1185
isinstance(target, GitWorkingTree))
1187
def _iter_git_changes(self, want_unchanged=False, specific_files=None,
1188
require_versioned=False, include_root=False, extra_trees=None):
1189
trees = [self.source]
1190
if extra_trees is not None:
1191
trees.extend(extra_trees)
1192
if specific_files is not None:
1193
specific_files = self.target.find_related_paths_across_trees(
1194
specific_files, trees,
1195
require_versioned=require_versioned)
1196
# TODO(jelmer): Restrict to specific_files, for performance reasons.
1197
with self.lock_read():
1198
return changes_between_git_tree_and_working_copy(
1199
self.source.store, self.source.tree,
1200
self.target, want_unchanged=want_unchanged,
1201
include_root=include_root)
1203
def compare(self, want_unchanged=False, specific_files=None,
1204
extra_trees=None, require_versioned=False, include_root=False,
1205
want_unversioned=False):
1206
with self.lock_read():
1207
changes = self._iter_git_changes(
1208
want_unchanged=want_unchanged,
1209
specific_files=specific_files,
1210
require_versioned=require_versioned,
1211
include_root=include_root,
1212
extra_trees=extra_trees)
1213
source_fileid_map = self.source._fileid_map
1214
target_fileid_map = self.target._fileid_map
1215
ret = tree_delta_from_git_changes(changes, self.target.mapping,
1216
(source_fileid_map, target_fileid_map),
1217
specific_files=specific_files, require_versioned=require_versioned,
1218
include_root=include_root)
1219
if want_unversioned:
1220
for e in self.target.extras():
1221
ret.unversioned.append(
1222
(osutils.normalized_filename(e)[0], None,
1223
osutils.file_kind(self.target.abspath(e))))
1226
def iter_changes(self, include_unchanged=False, specific_files=None,
1227
pb=None, extra_trees=[], require_versioned=True,
1228
want_unversioned=False):
1229
with self.lock_read():
1230
changes = self._iter_git_changes(
1231
want_unchanged=include_unchanged,
1232
specific_files=specific_files,
1233
require_versioned=require_versioned,
1234
extra_trees=extra_trees)
1235
if want_unversioned:
1236
changes = itertools.chain(
1238
untracked_changes(self.target))
1239
return changes_from_git_changes(
1240
changes, self.target.mapping,
1241
specific_files=specific_files,
1242
include_unchanged=include_unchanged)
1245
tree.InterTree.register_optimiser(InterIndexGitTree)
1248
def untracked_changes(tree):
1249
for e in tree.extras():
1250
ap = tree.abspath(e)
1253
np, accessible = osutils.normalized_filename(e)
1254
except UnicodeDecodeError:
1255
raise errors.BadFilenameEncoding(
1257
if stat.S_ISDIR(st.st_mode):
1260
obj_id = blob_from_path_and_stat(ap.encode('utf-8'), st).id
1261
yield ((None, np), (None, st.st_mode), (None, obj_id))
1264
def changes_between_git_tree_and_index(store, from_tree_sha, target,
1265
want_unchanged=False, update_index=False):
1266
"""Determine the changes between a git tree and a working tree with index.
1269
to_tree_sha = target.index.commit(store)
1270
return store.tree_changes(from_tree_sha, to_tree_sha, include_trees=True,
1271
want_unchanged=want_unchanged, change_type_same=True)
1274
def changes_between_git_tree_and_working_copy(store, from_tree_sha, target,
1275
want_unchanged=False, update_index=False, include_root=False):
1276
"""Determine the changes between a git tree and a working tree with index.
1279
blobs = iter_fresh_blobs(target.index, target.abspath('.').encode(sys.getfilesystemencoding()))
1280
to_tree_sha = commit_tree(store, blobs)
1281
return store.tree_changes(from_tree_sha, to_tree_sha, include_trees=True,
1282
want_unchanged=want_unchanged, change_type_same=True)