1
# Copyright (C) 2008-2011 Jelmer Vernooij <jelmer@samba.org>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from cStringIO import (
26
from collections import defaultdict
28
from dulwich.errors import NotGitRepository
29
from dulwich.ignore import (
32
from dulwich.index import (
37
index_entry_from_stat,
39
blob_from_path_and_stat,
41
from dulwich.object_store import (
44
from dulwich.objects import (
49
from dulwich.repo import Repo
58
conflicts as _mod_conflicts,
59
controldir as _mod_controldir,
64
revision as _mod_revision,
66
transport as _mod_transport,
73
from ...mutabletree import (
82
changes_from_git_changes,
83
tree_delta_from_git_changes,
87
from .mapping import (
92
IGNORE_FILENAME = ".gitignore"
95
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
96
"""A Git working tree."""
98
def __init__(self, controldir, repo, branch, index):
99
MutableGitIndexTree.__init__(self)
100
basedir = controldir.root_transport.local_abspath('.')
101
self.basedir = osutils.realpath(basedir)
102
self.controldir = controldir
103
self.repository = repo
104
self.store = self.repository._git.object_store
105
self.mapping = self.repository.get_mapping()
106
self._branch = branch
107
self._transport = controldir.transport
108
self._format = GitWorkingTreeFormat()
110
self.views = self._make_views()
111
self._rules_searcher = None
112
self._detect_case_handling()
114
self._fileid_map = self._basis_fileid_map.copy()
116
def supports_tree_reference(self):
119
def supports_rename_tracking(self):
123
"""Lock the repository for read operations.
125
:return: A breezy.lock.LogicalLockResult.
127
if not self._lock_mode:
128
self._lock_mode = 'r'
132
self._lock_count += 1
133
self.branch.lock_read()
134
return lock.LogicalLockResult(self.unlock)
136
def lock_tree_write(self):
137
if not self._lock_mode:
138
self._lock_mode = 'w'
141
elif self._lock_mode == 'r':
142
raise errors.ReadOnlyError(self)
145
self.branch.lock_read()
146
return lock.LogicalLockResult(self.unlock)
148
def lock_write(self, token=None):
149
if not self._lock_mode:
150
self._lock_mode = 'w'
153
elif self._lock_mode == 'r':
154
raise errors.ReadOnlyError(self)
157
self.branch.lock_write()
158
return lock.LogicalLockResult(self.unlock)
161
return self._lock_count >= 1
163
def get_physical_lock_status(self):
167
if not self._lock_count:
168
return lock.cant_unlock_not_held(self)
171
self._lock_count -= 1
172
if self._lock_count > 0:
174
self._lock_mode = None
182
def _detect_case_handling(self):
184
self._transport.stat(".git/cOnFiG")
185
except errors.NoSuchFile:
186
self.case_sensitive = True
188
self.case_sensitive = False
190
def merge_modified(self):
193
def set_merge_modified(self, modified_hashes):
194
raise errors.UnsupportedOperation(self.set_merge_modified, self)
196
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
197
self.set_parent_ids([p for p, t in parents_list])
199
def _set_merges_from_parent_ids(self, rhs_parent_ids):
201
merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
202
except errors.NoSuchRevision as e:
203
raise errors.GhostRevisionUnusableHere(e.revision)
205
self.control_transport.put_bytes('MERGE_HEAD', '\n'.join(merges),
206
mode=self.controldir._get_file_mode())
209
self.control_transport.delete('MERGE_HEAD')
210
except errors.NoSuchFile:
213
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
214
"""Set the parent ids to revision_ids.
216
See also set_parent_trees. This api will try to retrieve the tree data
217
for each element of revision_ids from the trees repository. If you have
218
tree data already available, it is more efficient to use
219
set_parent_trees rather than set_parent_ids. set_parent_ids is however
220
an easier API to use.
222
:param revision_ids: The revision_ids to set as the parent ids of this
223
working tree. Any of these may be ghosts.
225
with self.lock_tree_write():
226
self._check_parents_for_ghosts(revision_ids,
227
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
228
for revision_id in revision_ids:
229
_mod_revision.check_not_reserved_id(revision_id)
231
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
233
if len(revision_ids) > 0:
234
self.set_last_revision(revision_ids[0])
236
self.set_last_revision(_mod_revision.NULL_REVISION)
238
self._set_merges_from_parent_ids(revision_ids[1:])
240
def get_parent_ids(self):
241
"""See Tree.get_parent_ids.
243
This implementation reads the pending merges list and last_revision
244
value and uses that to decide what the parents list should be.
246
last_rev = _mod_revision.ensure_null(self._last_revision())
247
if _mod_revision.NULL_REVISION == last_rev:
252
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
253
except errors.NoSuchFile:
256
for l in osutils.split_lines(merges_bytes):
257
revision_id = l.rstrip('\n')
258
parents.append(self.branch.lookup_foreign_revision_id(revision_id))
261
def iter_children(self, file_id):
262
dpath = self.id2path(file_id) + "/"
263
if dpath in self.index:
265
for path in self.index:
266
if not path.startswith(dpath):
268
if "/" in path[len(dpath):]:
269
# Not a direct child but something further down
271
yield self.path2id(path)
273
def check_state(self):
274
"""Check that the working state is/isn't valid."""
277
def remove(self, files, verbose=False, to_file=None, keep_files=True,
279
"""Remove nominated files from the working tree metadata.
281
:param files: File paths relative to the basedir.
282
:param keep_files: If true, the files will also be kept.
283
:param force: Delete files and directories, even if they are changed
284
and even if the directories are not empty.
286
if isinstance(files, basestring):
292
def backup(file_to_backup):
293
abs_path = self.abspath(file_to_backup)
294
backup_name = self.controldir._available_backup_name(file_to_backup)
295
osutils.rename(abs_path, self.abspath(backup_name))
296
return "removed %s (but kept a copy: %s)" % (
297
file_to_backup, backup_name)
299
# Sort needed to first handle directory content before the directory
304
def recurse_directory_to_add_files(directory):
305
# Recurse directory and add all files
306
# so we can check if they have changed.
307
for parent_info, file_infos in self.walkdirs(directory):
308
for relpath, basename, kind, lstat, fileid, kind in file_infos:
309
# Is it versioned or ignored?
310
if self.is_versioned(relpath):
311
# Add nested content for deletion.
312
all_files.add(relpath)
314
# Files which are not versioned
315
# should be treated as unknown.
316
files_to_backup.append(relpath)
318
with self.lock_tree_write():
319
for filepath in files:
320
# Get file name into canonical form.
321
abspath = self.abspath(filepath)
322
filepath = self.relpath(abspath)
325
all_files.add(filepath)
326
recurse_directory_to_add_files(filepath)
328
files = list(all_files)
331
return # nothing to do
333
# Sort needed to first handle directory content before the directory
334
files.sort(reverse=True)
336
# Bail out if we are going to delete files we shouldn't
337
if not keep_files and not force:
338
for (file_id, path, content_change, versioned, parent_id, name,
339
kind, executable) in self.iter_changes(self.basis_tree(),
340
include_unchanged=True, require_versioned=False,
341
want_unversioned=True, specific_files=files):
342
if versioned[0] == False:
343
# The record is unknown or newly added
344
files_to_backup.append(path[1])
345
files_to_backup.extend(osutils.parent_directories(path[1]))
346
elif (content_change and (kind[1] is not None) and
347
osutils.is_inside_any(files, path[1])):
348
# Versioned and changed, but not deleted, and still
349
# in one of the dirs to be deleted.
350
files_to_backup.append(path[1])
351
files_to_backup.extend(osutils.parent_directories(path[1]))
359
except errors.NoSuchFile:
362
abs_path = self.abspath(f)
364
# having removed it, it must be either ignored or unknown
365
if self.is_ignored(f):
369
kind_ch = osutils.kind_marker(kind)
370
to_file.write(new_status + ' ' + f + kind_ch + '\n')
372
message = "%s does not exist" % (f, )
375
if f in files_to_backup and not force:
378
if kind == 'directory':
379
osutils.rmtree(abs_path)
381
osutils.delete_any(abs_path)
382
message = "deleted %s" % (f,)
384
message = "removed %s" % (f,)
385
self._unversion_path(f)
387
# print only one message (if any) per file.
388
if message is not None:
390
self._versioned_dirs = None
393
def smart_add(self, file_list, recurse=True, action=None, save=True):
397
# expand any symlinks in the directory part, while leaving the
399
# only expanding if symlinks are supported avoids windows path bugs
400
if osutils.has_symlinks():
401
file_list = list(map(osutils.normalizepath, file_list))
406
def call_action(filepath, kind):
407
if action is not None:
408
parent_path = posixpath.dirname(filepath)
409
parent_id = self.path2id(parent_path)
410
parent_ie = self._get_dir_ie(parent_path, parent_id)
411
file_id = action(self, parent_ie, filepath, kind)
412
if file_id is not None:
413
raise workingtree.SettingFileIdUnsupported()
415
with self.lock_tree_write():
416
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
417
filepath, can_access = osutils.normalized_filename(filepath)
419
raise errors.InvalidNormalization(filepath)
421
abspath = self.abspath(filepath)
422
kind = osutils.file_kind(abspath)
423
if kind in ("file", "symlink"):
424
call_action(filepath, kind)
426
self._index_add_entry(filepath, kind)
427
added.append(filepath)
428
elif kind == "directory":
429
call_action(filepath, kind)
431
user_dirs.append(filepath)
433
raise errors.BadFileKindError(filename=abspath, kind=kind)
434
for user_dir in user_dirs:
435
abs_user_dir = self.abspath(user_dir)
438
transport = _mod_transport.get_transport_from_path(abs_user_dir)
439
_mod_controldir.ControlDirFormat.find_format(transport)
441
except errors.NotBranchError:
443
except errors.UnsupportedFormatError:
448
trace.warning('skipping nested tree %r', abs_user_dir)
451
for name in os.listdir(abs_user_dir):
452
subp = os.path.join(user_dir, name)
453
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
455
ignore_glob = self.is_ignored(subp)
456
if ignore_glob is not None:
457
ignored.setdefault(ignore_glob, []).append(subp)
459
abspath = self.abspath(subp)
460
kind = osutils.file_kind(abspath)
461
if kind == "directory":
462
user_dirs.append(subp)
464
call_action(filepath, kind)
466
self._index_add_entry(subp, kind)
470
return added, ignored
472
def has_filename(self, filename):
473
return osutils.lexists(self.abspath(filename))
475
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
478
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
479
dir_relpath = dirpath[len(self.basedir):].strip("/")
480
if self.controldir.is_control_filename(dir_relpath):
482
for name in list(dirnames):
483
if self.controldir.is_control_filename(name):
484
dirnames.remove(name)
486
relpath = os.path.join(dir_relpath, name)
489
yield relpath.decode(osutils._fs_enc)
490
except UnicodeDecodeError as e:
491
raise errors.BadFilenameEncoding(
492
relpath, osutils._fs_enc)
493
if not self._has_dir(relpath):
494
dirnames.remove(name)
495
for name in filenames:
496
if not self.mapping.is_special_file(name):
497
yp = os.path.join(dir_relpath, name)
499
yield yp.decode(osutils._fs_enc)
500
except UnicodeDecodeError:
501
raise errors.BadFilenameEncoding(
505
"""Yield all unversioned files in this WorkingTree.
507
with self.lock_read():
508
for p in (set(self._iter_files_recursive(include_dirs=True)) - set(self.index)):
510
up = p.decode(osutils._fs_enc)
511
except UnicodeDecodeError:
512
raise errors.BadFilenameEncoding(
514
if not self._has_dir(up):
518
# TODO: Maybe this should only write on dirty ?
519
if self._lock_mode != 'w':
520
raise errors.NotWriteLocked(self)
524
with self.lock_read():
525
for path in self.index:
526
yield self.path2id(path)
528
for path in self._versioned_dirs:
529
yield self.path2id(path)
531
def has_or_had_id(self, file_id):
532
if self.has_id(file_id):
534
if self.had_id(file_id):
538
def had_id(self, file_id):
539
path = self._basis_fileid_map.lookup_file_id(file_id)
541
head = self.repository._git.head()
543
# Assume no if basis is not accessible
546
root_tree = self.store[head].tree
550
tree_lookup_path(self.store.__getitem__, root_tree, path)
556
def get_file_mtime(self, path, file_id=None):
557
"""See Tree.get_file_mtime."""
559
return self._lstat(path).st_mtime
560
except OSError, (num, msg):
561
if num == errno.ENOENT:
562
raise errors.NoSuchFile(path)
565
def is_ignored(self, filename):
566
r"""Check whether the filename matches an ignore pattern.
568
If the file is ignored, returns the pattern which caused it to
569
be ignored, otherwise None. So this can simply be used as a
570
boolean if desired."""
571
if getattr(self, '_global_ignoreglobster', None) is None:
573
ignore_globs.update(ignores.get_runtime_ignores())
574
ignore_globs.update(ignores.get_user_ignores())
575
self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
576
match = self._global_ignoreglobster.match(filename)
577
if match is not None:
580
if self.kind(filename) == 'directory':
582
except errors.NoSuchFile:
584
filename = filename.lstrip(b'/')
585
ignore_manager = self._get_ignore_manager()
586
ps = list(ignore_manager.find_matching(filename))
589
if not ps[-1].is_exclude:
593
def _get_ignore_manager(self):
594
ignoremanager = getattr(self, '_ignoremanager', None)
595
if ignoremanager is not None:
598
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
599
self._ignoremanager = ignore_manager
600
return ignore_manager
602
def _flush_ignore_list_cache(self):
603
self._ignoremanager = None
605
def set_last_revision(self, revid):
606
if _mod_revision.is_null(revid):
607
self.branch.set_last_revision_info(0, revid)
609
_mod_revision.check_not_reserved_id(revid)
611
self.branch.generate_revision_history(revid)
612
except errors.NoSuchRevision:
613
raise errors.GhostRevisionUnusableHere(revid)
615
def _reset_data(self):
617
head = self.repository._git.head()
619
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
621
self._basis_fileid_map = self.mapping.get_fileid_map(
622
self.store.__getitem__, self.store[head].tree)
624
def get_file_verifier(self, path, file_id=None, stat_value=None):
625
with self.lock_read():
627
return ("GIT", self.index[path][-2])
629
if self._has_dir(path):
631
raise errors.NoSuchFile(path)
633
def get_file_sha1(self, path, file_id=None, stat_value=None):
634
with self.lock_read():
635
if not self.is_versioned(path):
636
raise errors.NoSuchFile(path)
637
abspath = self.abspath(path)
639
return osutils.sha_file_by_name(abspath)
640
except OSError, (num, msg):
641
if num in (errno.EISDIR, errno.ENOENT):
645
def revision_tree(self, revid):
646
return self.repository.revision_tree(revid)
648
def filter_unversioned_files(self, files):
649
return set([p for p in files if not self.is_versioned(p)])
651
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
652
mode = stat_result.st_mode
653
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
655
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
656
return self.basis_tree().is_executable(path)
658
def stored_kind(self, path, file_id=None):
659
with self.lock_read():
661
return mode_kind(self.index[path.encode("utf-8")][4])
663
# Maybe it's a directory?
664
if self._has_dir(path):
666
raise errors.NoSuchFile(path)
668
def _lstat(self, path):
669
return os.lstat(self.abspath(path))
671
def is_executable(self, path, file_id=None):
672
if getattr(self, "_supports_executable", osutils.supports_executable)():
673
mode = self._lstat(path).st_mode
676
mode = self.index[path.encode('utf-8')].mode
679
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
681
def _is_executable_from_path_and_stat(self, path, stat_result):
682
if getattr(self, "_supports_executable", osutils.supports_executable)():
683
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
685
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
687
def list_files(self, include_root=False, from_dir=None, recursive=True):
691
fk_entries = {'directory': tree.TreeDirectory,
692
'file': tree.TreeFile,
693
'symlink': tree.TreeLink}
694
with self.lock_read():
695
root_ie = self._get_dir_ie(u"", None)
696
if include_root and not from_dir:
697
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
698
dir_ids[u""] = root_ie.file_id
700
path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
705
start = os.path.join(self.basedir, from_dir)
706
path_iterator = sorted([os.path.join(from_dir, name) for name in
707
os.listdir(start) if not self.controldir.is_control_filename(name)
708
and not self.mapping.is_special_file(name)])
709
for path in path_iterator:
711
value = self.index[path]
715
path = path.decode("utf-8")
716
except UnicodeDecodeError:
717
raise errors.BadFilenameEncoding(
718
path, osutils._fs_enc)
719
kind = osutils.file_kind(self.abspath(path))
720
parent, name = posixpath.split(path)
721
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
723
if kind == 'directory':
725
if self._has_dir(path):
726
ie = self._get_dir_ie(path, self.path2id(path))
729
elif self.is_ignored(path):
731
ie = fk_entries[kind]()
735
ie = fk_entries[kind]()
737
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
739
if value is not None:
740
ie = self._get_file_ie(name, path, value, dir_ids[parent])
741
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
743
ie = fk_entries[kind]()
744
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
746
def all_file_ids(self):
747
with self.lock_read():
748
ids = {u"": self.path2id("")}
749
for path in self.index:
750
if self.mapping.is_special_file(path):
752
path = path.decode("utf-8")
753
parent = posixpath.dirname(path).strip("/")
754
for e in self._add_missing_parent_ids(parent, ids):
756
ids[path] = self.path2id(path)
757
return set(ids.values())
759
def all_versioned_paths(self):
760
with self.lock_read():
762
for path in self.index:
763
if self.mapping.is_special_file(path):
765
path = path.decode("utf-8")
768
path = posixpath.dirname(path).strip("/")
774
def _directory_is_tree_reference(self, path):
775
# FIXME: Check .gitsubmodules for path
778
def iter_child_entries(self, path, file_id=None):
779
encoded_path = path.encode('utf-8')
780
parent_id = self.path2id(path)
782
seen_children = set()
783
for item_path, value in self.index.iteritems():
784
if self.mapping.is_special_file(item_path):
786
if not osutils.is_inside(encoded_path, item_path):
789
subpath = posixpath.relpath(item_path, encoded_path)
791
dirname = subpath.split(b'/', 1)[0]
792
file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
794
(parent, name) = posixpath.split(item_path)
796
file_ie = self._get_file_ie(name, item_path, value, parent_id)
801
raise errors.NoSuchFile(path)
804
with self.lock_read():
806
return _mod_conflicts.ConflictList()
808
def walkdirs(self, prefix=""):
809
"""Walk the directories of this tree.
811
returns a generator which yields items in the form:
812
((curren_directory_path, fileid),
813
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
816
This API returns a generator, which is only valid during the current
817
tree transaction - within a single lock_read or lock_write duration.
819
If the tree is not locked, it may cause an error to be raised,
820
depending on the tree implementation.
822
from bisect import bisect_left
824
disk_top = self.abspath(prefix)
825
if disk_top.endswith('/'):
826
disk_top = disk_top[:-1]
827
top_strip_len = len(disk_top) + 1
828
inventory_iterator = self._walkdirs(prefix)
829
disk_iterator = osutils.walkdirs(disk_top, prefix)
831
current_disk = next(disk_iterator)
832
disk_finished = False
834
if not (e.errno == errno.ENOENT or
835
(sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
840
current_inv = next(inventory_iterator)
842
except StopIteration:
845
while not inv_finished or not disk_finished:
847
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
848
cur_disk_dir_content) = current_disk
850
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
851
cur_disk_dir_content) = ((None, None), None)
852
if not disk_finished:
853
# strip out .bzr dirs
854
if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
855
len(cur_disk_dir_content) > 0):
856
# osutils.walkdirs can be made nicer -
857
# yield the path-from-prefix rather than the pathjoined
859
bzrdir_loc = bisect_left(cur_disk_dir_content,
861
if (bzrdir_loc < len(cur_disk_dir_content)
862
and self.controldir.is_control_filename(
863
cur_disk_dir_content[bzrdir_loc][0])):
864
# we dont yield the contents of, or, .bzr itself.
865
del cur_disk_dir_content[bzrdir_loc]
867
# everything is unknown
870
# everything is missing
873
direction = cmp(current_inv[0][0], cur_disk_dir_relpath)
875
# disk is before inventory - unknown
876
dirblock = [(relpath, basename, kind, stat, None, None) for
877
relpath, basename, kind, stat, top_path in
878
cur_disk_dir_content]
879
yield (cur_disk_dir_relpath, None), dirblock
881
current_disk = next(disk_iterator)
882
except StopIteration:
885
# inventory is before disk - missing.
886
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
887
for relpath, basename, dkind, stat, fileid, kind in
889
yield (current_inv[0][0], current_inv[0][1]), dirblock
891
current_inv = next(inventory_iterator)
892
except StopIteration:
895
# versioned present directory
896
# merge the inventory and disk data together
898
for relpath, subiterator in itertools.groupby(sorted(
899
current_inv[1] + cur_disk_dir_content,
900
key=operator.itemgetter(0)), operator.itemgetter(1)):
901
path_elements = list(subiterator)
902
if len(path_elements) == 2:
903
inv_row, disk_row = path_elements
904
# versioned, present file
905
dirblock.append((inv_row[0],
906
inv_row[1], disk_row[2],
907
disk_row[3], inv_row[4],
909
elif len(path_elements[0]) == 5:
911
dirblock.append((path_elements[0][0],
912
path_elements[0][1], path_elements[0][2],
913
path_elements[0][3], None, None))
914
elif len(path_elements[0]) == 6:
915
# versioned, absent file.
916
dirblock.append((path_elements[0][0],
917
path_elements[0][1], 'unknown', None,
918
path_elements[0][4], path_elements[0][5]))
920
raise NotImplementedError('unreachable code')
921
yield current_inv[0], dirblock
923
current_inv = next(inventory_iterator)
924
except StopIteration:
927
current_disk = next(disk_iterator)
928
except StopIteration:
931
def _walkdirs(self, prefix=""):
934
per_dir = defaultdict(set)
936
per_dir[('', self.get_root_id())] = set()
937
def add_entry(path, kind):
938
if path == '' or not path.startswith(prefix):
940
(dirname, child_name) = posixpath.split(path)
941
add_entry(dirname, 'directory')
942
dirname = dirname.decode("utf-8")
943
dir_file_id = self.path2id(dirname)
944
assert isinstance(value, tuple) and len(value) == 10
945
per_dir[(dirname, dir_file_id)].add(
946
(path.decode("utf-8"), child_name.decode("utf-8"),
948
self.path2id(path.decode("utf-8")),
950
for path, value in self.index.iteritems():
951
if self.mapping.is_special_file(path):
953
if not path.startswith(prefix):
955
add_entry(path, mode_kind(value.mode))
956
return ((k, sorted(v)) for (k, v) in sorted(per_dir.iteritems()))
958
def get_shelf_manager(self):
959
raise workingtree.ShelvingUnsupported()
961
def store_uncommitted(self):
962
raise errors.StoringUncommittedNotSupported(self)
964
def apply_inventory_delta(self, changes):
965
for (old_path, new_path, file_id, ie) in changes:
966
if old_path is not None:
967
del self.index[old_path.encode('utf-8')]
968
self._versioned_dirs = None
969
if new_path is not None and ie.kind != 'directory':
970
self._index_add_entry(new_path, ie.kind)
973
def annotate_iter(self, path, file_id=None,
974
default_revision=_mod_revision.CURRENT_REVISION):
975
"""See Tree.annotate_iter
977
This implementation will use the basis tree implementation if possible.
978
Lines not in the basis are attributed to CURRENT_REVISION
980
If there are pending merges, lines added by those merges will be
981
incorrectly attributed to CURRENT_REVISION (but after committing, the
982
attribution will be correct).
984
with self.lock_read():
985
maybe_file_parent_keys = []
986
for parent_id in self.get_parent_ids():
988
parent_tree = self.revision_tree(parent_id)
989
except errors.NoSuchRevisionInTree:
990
parent_tree = self.branch.repository.revision_tree(
992
with parent_tree.lock_read():
993
# TODO(jelmer): Use rename/copy tracker to find path name in parent
996
kind = parent_tree.kind(parent_path)
997
except errors.NoSuchFile:
1000
# Note: this is slightly unnecessary, because symlinks and
1001
# directories have a "text" which is the empty text, and we
1002
# know that won't mess up annotations. But it seems cleaner
1006
parent_tree.get_file_revision(parent_path))
1007
if parent_text_key not in maybe_file_parent_keys:
1008
maybe_file_parent_keys.append(parent_text_key)
1009
graph = self.branch.repository.get_file_graph()
1010
heads = graph.heads(maybe_file_parent_keys)
1011
file_parent_keys = []
1012
for key in maybe_file_parent_keys:
1014
file_parent_keys.append(key)
1016
# Now we have the parents of this content
1017
from breezy.annotate import Annotator
1018
from .annotate import AnnotateProvider
1019
annotator = Annotator(AnnotateProvider(
1020
self.branch.repository._file_change_scanner))
1021
text = self.get_file_text(path)
1022
this_key = (path, default_revision)
1023
annotator.add_special_text(this_key, file_parent_keys, text)
1024
annotations = [(key[-1], line)
1025
for key, line in annotator.annotate_flat(this_key)]
1028
def _rename_one(self, from_rel, to_rel):
1029
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1032
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1034
_tree_class = GitWorkingTree
1036
supports_versioned_directories = False
1038
supports_setting_file_ids = False
1040
supports_store_uncommitted = False
1042
supports_leftmost_parent_id_as_ghost = False
1044
supports_righthand_parent_id_as_ghost = False
1046
requires_normalized_unicode_filenames = True
1049
def _matchingcontroldir(self):
1050
from .dir import LocalGitControlDirFormat
1051
return LocalGitControlDirFormat()
1053
def get_format_description(self):
1054
return "Git Working Tree"
1056
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1057
accelerator_tree=None, hardlink=False):
1058
"""See WorkingTreeFormat.initialize()."""
1059
if not isinstance(a_controldir, LocalGitDir):
1060
raise errors.IncompatibleFormat(self, a_controldir)
1061
index = Index(a_controldir.root_transport.local_abspath(".git/index"))
1063
branch = a_controldir.open_branch()
1064
if revision_id is not None:
1065
branch.set_last_revision(revision_id)
1066
wt = GitWorkingTree(
1067
a_controldir, a_controldir.open_repository(), branch, index)
1068
for hook in MutableTree.hooks['post_build_tree']:
1073
class InterIndexGitTree(InterGitTrees):
1074
"""InterTree that works between a Git revision tree and an index."""
1076
def __init__(self, source, target):
1077
super(InterIndexGitTree, self).__init__(source, target)
1078
self._index = target.index
1081
def is_compatible(cls, source, target):
1082
from .repository import GitRevisionTree
1083
return (isinstance(source, GitRevisionTree) and
1084
isinstance(target, GitWorkingTree))
1086
def _iter_git_changes(self, want_unchanged=False, specific_files=None,
1087
require_versioned=False, include_root=False):
1088
if require_versioned and specific_files:
1089
for path in specific_files:
1090
if (not self.source.is_versioned(path) and
1091
not self.target.is_versioned(path)):
1092
raise errors.PathsNotVersionedError(path)
1093
# TODO(jelmer): Restrict to specific_files, for performance reasons.
1094
with self.lock_read():
1095
return changes_between_git_tree_and_working_copy(
1096
self.source.store, self.source.tree,
1097
self.target, want_unchanged=want_unchanged,
1098
include_root=include_root)
1100
def compare(self, want_unchanged=False, specific_files=None,
1101
extra_trees=None, require_versioned=False, include_root=False,
1102
want_unversioned=False):
1103
with self.lock_read():
1104
changes = self._iter_git_changes(
1105
want_unchanged=want_unchanged,
1106
specific_files=specific_files,
1107
require_versioned=require_versioned,
1108
include_root=include_root)
1109
source_fileid_map = self.source._fileid_map
1110
target_fileid_map = self.target._fileid_map
1111
ret = tree_delta_from_git_changes(changes, self.target.mapping,
1112
(source_fileid_map, target_fileid_map),
1113
specific_files=specific_files, require_versioned=require_versioned,
1114
include_root=include_root)
1115
if want_unversioned:
1116
for e in self.target.extras():
1117
ret.unversioned.append(
1118
(osutils.normalized_filename(e)[0], None,
1119
osutils.file_kind(self.target.abspath(e))))
1122
def iter_changes(self, include_unchanged=False, specific_files=None,
1123
pb=None, extra_trees=[], require_versioned=True,
1124
want_unversioned=False):
1125
with self.lock_read():
1126
changes = self._iter_git_changes(
1127
want_unchanged=include_unchanged,
1128
specific_files=specific_files,
1129
require_versioned=require_versioned)
1130
if want_unversioned:
1131
changes = itertools.chain(
1133
untracked_changes(self.target))
1134
return changes_from_git_changes(
1135
changes, self.target.mapping,
1136
specific_files=specific_files,
1137
include_unchanged=include_unchanged)
1140
tree.InterTree.register_optimiser(InterIndexGitTree)
1143
def untracked_changes(tree):
1144
for e in tree.extras():
1145
ap = tree.abspath(e)
1148
np, accessible = osutils.normalized_filename(e)
1149
except UnicodeDecodeError:
1150
raise errors.BadFilenameEncoding(
1152
if stat.S_ISDIR(st.st_mode):
1155
obj_id = blob_from_path_and_stat(ap.encode('utf-8'), st).id
1156
yield ((None, np), (None, st.st_mode), (None, obj_id))
1159
def changes_between_git_tree_and_index(store, from_tree_sha, target,
1160
want_unchanged=False, update_index=False):
1161
"""Determine the changes between a git tree and a working tree with index.
1164
to_tree_sha = target.index.commit(store)
1165
return store.tree_changes(from_tree_sha, to_tree_sha, include_trees=True,
1166
want_unchanged=want_unchanged, change_type_same=True)
1169
def changes_between_git_tree_and_working_copy(store, from_tree_sha, target,
1170
want_unchanged=False, update_index=False, include_root=False):
1171
"""Determine the changes between a git tree and a working tree with index.
1174
blobs = iter_fresh_blobs(target.index, target.abspath('.').encode(sys.getfilesystemencoding()))
1175
to_tree_sha = commit_tree(store, blobs)
1176
return store.tree_changes(from_tree_sha, to_tree_sha, include_trees=True,
1177
want_unchanged=want_unchanged, change_type_same=True)