118
131
self.index = Index(self.control_transport.local_abspath('index'))
119
132
self._index_dirty = False
121
def _get_submodule_index(self, relpath):
122
if not isinstance(relpath, bytes):
123
raise TypeError(relpath)
125
info = self._submodule_info()[relpath]
127
index_path = os.path.join(self.basedir, relpath.decode('utf-8'), '.git', 'index')
129
index_path = self.control_transport.local_abspath(
130
posixpath.join('modules', info[1].decode('utf-8'), 'index'))
131
return Index(index_path)
133
134
def lock_read(self):
134
135
"""Lock the repository for read operations.
145
146
return lock.LogicalLockResult(self.unlock)
147
148
def _lock_write_tree(self):
149
# TODO(jelmer): Actually create index.lock
148
150
if not self._lock_mode:
149
151
self._lock_mode = 'w'
150
152
self._lock_count = 1
152
self._index_file = GitFile(
153
self.control_transport.local_abspath('index'), 'wb')
154
self._index_file = GitFile(self.control_transport.local_abspath('index'), 'wb')
154
155
except FileLocked:
155
156
raise errors.LockContention('index')
156
157
self._read_index()
157
158
elif self._lock_mode == 'r':
158
159
raise errors.ReadOnlyError(self)
160
self._lock_count += 1
162
163
def lock_tree_write(self):
163
164
self.branch.lock_read()
165
166
self._lock_write_tree()
166
167
return lock.LogicalLockResult(self.unlock)
167
except BaseException:
168
169
self.branch.unlock()
240
237
def _set_merges_from_parent_ids(self, rhs_parent_ids):
242
merges = [self.branch.lookup_bzr_revision_id(
243
revid)[0] for revid in rhs_parent_ids]
239
merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
244
240
except errors.NoSuchRevision as e:
245
241
raise errors.GhostRevisionUnusableHere(e.revision)
247
self.control_transport.put_bytes(
248
'MERGE_HEAD', b'\n'.join(merges),
243
self.control_transport.put_bytes('MERGE_HEAD', '\n'.join(merges),
249
244
mode=self.controldir._get_file_mode())
266
261
working tree. Any of these may be ghosts.
268
263
with self.lock_tree_write():
269
self._check_parents_for_ghosts(
270
revision_ids, allow_leftmost_as_ghost=allow_leftmost_as_ghost)
264
self._check_parents_for_ghosts(revision_ids,
265
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
271
266
for revision_id in revision_ids:
272
267
_mod_revision.check_not_reserved_id(revision_id)
361
354
files = list(all_files)
363
356
if len(files) == 0:
364
return # nothing to do
357
return # nothing to do
366
# Sort needed to first handle directory content before the
359
# Sort needed to first handle directory content before the directory
368
360
files.sort(reverse=True)
370
362
# Bail out if we are going to delete files we shouldn't
371
363
if not keep_files and not force:
372
for change in self.iter_changes(
373
self.basis_tree(), include_unchanged=True,
374
require_versioned=False, want_unversioned=True,
375
specific_files=files):
376
if change.versioned[0] is False:
364
for (file_id, path, content_change, versioned, parent_id, name,
365
kind, executable) in self.iter_changes(self.basis_tree(),
366
include_unchanged=True, require_versioned=False,
367
want_unversioned=True, specific_files=files):
368
if versioned[0] == False:
377
369
# The record is unknown or newly added
378
files_to_backup.append(change.path[1])
379
files_to_backup.extend(
380
osutils.parent_directories(change.path[1]))
381
elif (change.changed_content and (change.kind[1] is not None)
382
and osutils.is_inside_any(files, change.path[1])):
370
files_to_backup.append(path[1])
371
files_to_backup.extend(osutils.parent_directories(path[1]))
372
elif (content_change and (kind[1] is not None) and
373
osutils.is_inside_any(files, path[1])):
383
374
# Versioned and changed, but not deleted, and still
384
375
# in one of the dirs to be deleted.
385
files_to_backup.append(change.path[1])
386
files_to_backup.extend(
387
osutils.parent_directories(change.path[1]))
376
files_to_backup.append(path[1])
377
files_to_backup.extend(osutils.parent_directories(path[1]))
455
442
raise workingtree.SettingFileIdUnsupported()
457
444
with self.lock_tree_write():
458
for filepath in osutils.canonical_relpaths(
459
self.basedir, file_list):
445
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
460
446
filepath, can_access = osutils.normalized_filename(filepath)
461
447
if not can_access:
462
448
raise errors.InvalidNormalization(filepath)
530
510
def has_filename(self, filename):
531
511
return osutils.lexists(self.abspath(filename))
533
def _iter_files_recursive(self, from_dir=None, include_dirs=False,
534
recurse_nested=False):
513
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
535
514
if from_dir is None:
537
if not isinstance(from_dir, text_type):
538
raise TypeError(from_dir)
539
encoded_from_dir = self.abspath(from_dir).encode(osutils._fs_enc)
540
for (dirpath, dirnames, filenames) in os.walk(encoded_from_dir):
541
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
542
if self.controldir.is_control_filename(
543
dir_relpath.decode(osutils._fs_enc)):
516
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
517
dir_relpath = dirpath[len(self.basedir):].strip("/")
518
if self.controldir.is_control_filename(dir_relpath):
545
520
for name in list(dirnames):
546
if self.controldir.is_control_filename(
547
name.decode(osutils._fs_enc)):
521
if self.controldir.is_control_filename(name):
548
522
dirnames.remove(name)
550
524
relpath = os.path.join(dir_relpath, name)
551
if not recurse_nested and self._directory_is_tree_reference(relpath.decode(osutils._fs_enc)):
552
dirnames.remove(name)
555
527
yield relpath.decode(osutils._fs_enc)
556
528
except UnicodeDecodeError:
557
529
raise errors.BadFilenameEncoding(
558
530
relpath, osutils._fs_enc)
559
if not self.is_versioned(relpath.decode(osutils._fs_enc)):
560
dirnames.remove(name)
531
if not self._has_dir(relpath):
532
dirnames.remove(name)
561
533
for name in filenames:
562
if self.mapping.is_special_file(name):
564
if self.controldir.is_control_filename(
565
name.decode(osutils._fs_enc, 'replace')):
567
yp = os.path.join(dir_relpath, name)
569
yield yp.decode(osutils._fs_enc)
570
except UnicodeDecodeError:
571
raise errors.BadFilenameEncoding(
534
if not self.mapping.is_special_file(name):
535
yp = os.path.join(dir_relpath, name)
537
yield yp.decode(osutils._fs_enc)
538
except UnicodeDecodeError:
539
raise errors.BadFilenameEncoding(
574
542
def extras(self):
575
543
"""Yield all unversioned files in this WorkingTree.
577
545
with self.lock_read():
579
[p.decode('utf-8') for p, i in self._recurse_index_entries()])
580
all_paths = set(self._iter_files_recursive(include_dirs=False))
581
return iter(all_paths - index_paths)
546
index_paths = set([p.decode('utf-8') for p, i in self._recurse_index_entries()])
547
all_paths = set(self._iter_files_recursive(include_dirs=True))
548
for p in (all_paths - index_paths):
549
if not self._has_dir(p):
583
552
def _gather_kinds(self, files, kinds):
584
553
"""See MutableTree._gather_kinds."""
599
568
if self._lock_mode != 'w':
600
569
raise errors.NotWriteLocked(self)
601
570
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
602
# already in use and GitFile doesn't allow overriding the lock file
571
# already in use and GitFile doesn't allow overriding the lock file name :(
604
572
f = open(self.control_transport.local_abspath('index'), 'wb')
605
573
# Note that _flush will close the file
610
578
shaf = SHA1Writer(f)
611
579
write_index_dict(shaf, self.index)
613
except BaseException:
616
584
self._index_dirty = False
618
def get_file_mtime(self, path):
586
def has_or_had_id(self, file_id):
587
if self.has_id(file_id):
589
if self.had_id(file_id):
593
def had_id(self, file_id):
594
path = self._basis_fileid_map.lookup_file_id(file_id)
596
head = self.repository._git.head()
598
# Assume no if basis is not accessible
601
root_tree = self.store[head].tree
605
tree_lookup_path(self.store.__getitem__, root_tree, path)
611
def get_file_mtime(self, path, file_id=None):
619
612
"""See Tree.get_file_mtime."""
621
614
return self._lstat(path).st_mtime
622
615
except OSError as e:
623
if e.errno == errno.ENOENT:
617
if num == errno.ENOENT:
624
618
raise errors.NoSuchFile(path)
634
628
ignore_globs = set()
635
629
ignore_globs.update(ignores.get_runtime_ignores())
636
630
ignore_globs.update(ignores.get_user_ignores())
637
self._global_ignoreglobster = globbing.ExceptionGlobster(
631
self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
639
632
match = self._global_ignoreglobster.match(filename)
640
633
if match is not None:
643
636
if self.kind(filename) == 'directory':
645
638
except errors.NoSuchFile:
647
filename = filename.lstrip('/')
640
filename = filename.lstrip(b'/')
648
641
ignore_manager = self._get_ignore_manager()
649
642
ps = list(ignore_manager.find_matching(filename))
676
669
raise errors.GhostRevisionUnusableHere(revid)
678
671
def _reset_data(self):
673
head = self.repository._git.head()
675
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
677
self._basis_fileid_map = self.mapping.get_fileid_map(
678
self.store.__getitem__, self.store[head].tree)
679
self._fileid_map = self._basis_fileid_map.copy()
681
def get_file_verifier(self, path, stat_value=None):
681
def get_file_verifier(self, path, file_id=None, stat_value=None):
682
682
with self.lock_read():
683
683
(index, subpath) = self._lookup_index(path.encode('utf-8'))
710
711
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
711
712
return self.basis_tree().is_executable(path)
713
def stored_kind(self, path):
714
def stored_kind(self, path, file_id=None):
714
715
with self.lock_read():
715
encoded_path = path.encode('utf-8')
716
(index, subpath) = self._lookup_index(encoded_path)
716
(index, subpath) = self._lookup_index(path.encode('utf-8'))
718
718
return mode_kind(index[subpath].mode)
720
720
# Maybe it's a directory?
721
if self._has_dir(encoded_path):
721
if self._has_dir(path):
722
722
return "directory"
723
723
raise errors.NoSuchFile(path)
726
726
return os.lstat(self.abspath(path))
728
728
def _live_entry(self, path):
729
encoded_path = self.abspath(path.decode('utf-8')).encode(
731
return index_entry_from_path(encoded_path)
729
return index_entry_from_path(self.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
733
def is_executable(self, path):
731
def is_executable(self, path, file_id=None):
734
732
with self.lock_read():
735
if self._supports_executable():
733
if getattr(self, "_supports_executable", osutils.supports_executable)():
736
734
mode = self._lstat(path).st_mode
738
736
(index, subpath) = self._lookup_index(path.encode('utf-8'))
743
741
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
745
743
def _is_executable_from_path_and_stat(self, path, stat_result):
746
if self._supports_executable():
744
if getattr(self, "_supports_executable", osutils.supports_executable)():
747
745
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
749
return self._is_executable_from_path_and_stat_from_basis(
747
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
752
def list_files(self, include_root=False, from_dir=None, recursive=True,
753
recurse_nested=False):
754
if from_dir is None or from_dir == '.':
749
def list_files(self, include_root=False, from_dir=None, recursive=True):
757
753
fk_entries = {'directory': tree.TreeDirectory,
761
757
with self.lock_read():
762
758
root_ie = self._get_dir_ie(u"", None)
763
759
if include_root and not from_dir:
764
yield "", "V", root_ie.kind, root_ie
760
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
765
761
dir_ids[u""] = root_ie.file_id
767
path_iterator = sorted(
768
self._iter_files_recursive(
769
from_dir, include_dirs=True,
770
recurse_nested=recurse_nested))
763
path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
772
encoded_from_dir = self.abspath(from_dir).encode(
774
path_iterator = sorted(
775
[os.path.join(from_dir, name.decode(osutils._fs_enc))
776
for name in os.listdir(encoded_from_dir)
777
if not self.controldir.is_control_filename(
778
name.decode(osutils._fs_enc)) and
779
not self.mapping.is_special_file(
780
name.decode(osutils._fs_enc))])
765
path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
766
os.listdir(self.abspath(from_dir).encode(osutils._fs_enc)) if not self.controldir.is_control_filename(name)
767
and not self.mapping.is_special_file(name)])
781
768
for path in path_iterator:
783
770
encoded_path = path.encode("utf-8")
792
779
kind = self.kind(path)
793
780
parent, name = posixpath.split(path)
794
for dir_path, dir_ie in self._add_missing_parent_ids(
781
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
797
if kind == 'tree-reference' and recurse_nested:
798
ie = self._get_dir_ie(path, self.path2id(path))
799
yield (posixpath.relpath(path, from_dir), 'V', 'directory',
802
if kind == 'directory':
783
if kind in ('directory', 'tree-reference'):
803
784
if path != from_dir:
804
if self._has_dir(encoded_path):
785
if self._has_dir(path):
805
786
ie = self._get_dir_ie(path, self.path2id(path))
807
789
elif self.is_ignored(path):
809
791
ie = fk_entries[kind]()
812
795
ie = fk_entries[kind]()
813
yield (posixpath.relpath(path, from_dir), status, kind,
797
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
816
799
if value is not None:
817
800
ie = self._get_file_ie(name, path, value, dir_ids[parent])
818
yield (posixpath.relpath(path, from_dir), "V", ie.kind, ie)
801
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
820
803
ie = fk_entries[kind]()
821
yield (posixpath.relpath(path, from_dir),
822
("I" if self.is_ignored(path) else "?"), kind, ie)
804
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
824
806
def all_file_ids(self):
825
raise errors.UnsupportedOperation(self.all_file_ids, self)
807
with self.lock_read():
808
ids = {u"": self.path2id("")}
809
for path in self.index:
810
if self.mapping.is_special_file(path):
812
path = path.decode("utf-8")
813
parent = posixpath.dirname(path).strip("/")
814
for e in self._add_missing_parent_ids(parent, ids):
816
ids[path] = self.path2id(path)
817
return set(ids.values())
827
819
def all_versioned_paths(self):
828
820
with self.lock_read():
842
def iter_child_entries(self, path):
834
def iter_child_entries(self, path, file_id=None):
843
835
encoded_path = path.encode('utf-8')
844
836
with self.lock_read():
845
837
parent_id = self.path2id(path)
846
838
found_any = False
839
seen_children = set()
847
840
for item_path, value in self.index.iteritems():
848
decoded_item_path = item_path.decode('utf-8')
849
841
if self.mapping.is_special_file(item_path):
851
if not osutils.is_inside(path, decoded_item_path):
843
if not osutils.is_inside(encoded_path, item_path):
854
subpath = posixpath.relpath(decoded_item_path, path)
856
dirname = subpath.split('/', 1)[0]
857
file_ie = self._get_dir_ie(
858
posixpath.join(path, dirname), parent_id)
846
subpath = posixpath.relpath(item_path, encoded_path)
848
dirname = subpath.split(b'/', 1)[0]
849
file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
860
(unused_parent, name) = posixpath.split(decoded_item_path)
851
(parent, name) = posixpath.split(item_path)
861
852
file_ie = self._get_file_ie(
862
name, decoded_item_path, value, parent_id)
853
name.decode('utf-8'),
854
item_path.decode('utf-8'), value, parent_id)
864
856
if not found_any and path != u'':
865
857
raise errors.NoSuchFile(path)
892
883
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
894
self.index[path] = (value[:9] + (value[9] & ~ FLAG_STAGEMASK, ))
885
self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
896
887
def add_conflicts(self, new_conflicts):
897
888
with self.lock_tree_write():
898
889
for conflict in new_conflicts:
899
if conflict.typestring in ('text conflict',
900
'contents conflict'):
890
if conflict.typestring in ('text conflict', 'contents conflict'):
902
self._set_conflicted(
903
conflict.path.encode('utf-8'), True)
892
self._set_conflicted(conflict.path.encode('utf-8'), True)
905
raise errors.UnsupportedOperation(
906
self.add_conflicts, self)
894
raise errors.UnsupportedOperation(self.add_conflicts, self)
908
896
raise errors.UnsupportedOperation(self.add_conflicts, self)
953
941
cur_disk_dir_content) = ((None, None), None)
954
942
if not disk_finished:
955
943
# strip out .bzr dirs
956
if (cur_disk_dir_path_from_top[top_strip_len:] == ''
957
and len(cur_disk_dir_content) > 0):
944
if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
945
len(cur_disk_dir_content) > 0):
958
946
# osutils.walkdirs can be made nicer -
959
947
# yield the path-from-prefix rather than the pathjoined
961
949
bzrdir_loc = bisect_left(cur_disk_dir_content,
963
if (bzrdir_loc < len(cur_disk_dir_content) and
964
self.controldir.is_control_filename(
951
if (bzrdir_loc < len(cur_disk_dir_content)
952
and self.controldir.is_control_filename(
965
953
cur_disk_dir_content[bzrdir_loc][0])):
966
954
# we dont yield the contents of, or, .bzr itself.
967
955
del cur_disk_dir_content[bzrdir_loc]
972
960
# everything is missing
975
direction = ((current_inv[0][0] > cur_disk_dir_relpath)
976
- (current_inv[0][0] < cur_disk_dir_relpath))
963
direction = cmp(current_inv[0][0], cur_disk_dir_relpath)
977
964
if direction > 0:
978
965
# disk is before inventory - unknown
979
966
dirblock = [(relpath, basename, kind, stat, None, None) for
980
relpath, basename, kind, stat, top_path in
981
cur_disk_dir_content]
967
relpath, basename, kind, stat, top_path in
968
cur_disk_dir_content]
982
969
yield (cur_disk_dir_relpath, None), dirblock
984
971
current_disk = next(disk_iterator)
987
974
elif direction < 0:
988
975
# inventory is before disk - missing.
989
976
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
990
for relpath, basename, dkind, stat, fileid, kind in
977
for relpath, basename, dkind, stat, fileid, kind in
992
979
yield (current_inv[0][0], current_inv[0][1]), dirblock
994
981
current_inv = next(inventory_iterator)
999
986
# merge the inventory and disk data together
1001
988
for relpath, subiterator in itertools.groupby(sorted(
1002
current_inv[1] + cur_disk_dir_content,
1003
key=operator.itemgetter(0)), operator.itemgetter(1)):
989
current_inv[1] + cur_disk_dir_content,
990
key=operator.itemgetter(0)), operator.itemgetter(1)):
1004
991
path_elements = list(subiterator)
1005
992
if len(path_elements) == 2:
1006
993
inv_row, disk_row = path_elements
1007
994
# versioned, present file
1008
995
dirblock.append((inv_row[0],
1009
inv_row[1], disk_row[2],
1010
disk_row[3], inv_row[4],
996
inv_row[1], disk_row[2],
997
disk_row[3], inv_row[4],
1012
999
elif len(path_elements[0]) == 5:
1013
1000
# unknown disk file
1015
(path_elements[0][0], path_elements[0][1],
1016
path_elements[0][2], path_elements[0][3],
1001
dirblock.append((path_elements[0][0],
1002
path_elements[0][1], path_elements[0][2],
1003
path_elements[0][3], None, None))
1018
1004
elif len(path_elements[0]) == 6:
1019
1005
# versioned, absent file.
1021
(path_elements[0][0], path_elements[0][1],
1022
'unknown', None, path_elements[0][4],
1023
path_elements[0][5]))
1006
dirblock.append((path_elements[0][0],
1007
path_elements[0][1], 'unknown', None,
1008
path_elements[0][4], path_elements[0][5]))
1025
1010
raise NotImplementedError('unreachable code')
1026
1011
yield current_inv[0], dirblock
1033
1018
except StopIteration:
1034
1019
disk_finished = True
1036
def _walkdirs(self, prefix=u""):
1021
def _walkdirs(self, prefix=""):
1039
1024
prefix = prefix.encode('utf-8')
1040
1025
per_dir = defaultdict(set)
1042
per_dir[(u'', self.path2id(''))] = set()
1027
per_dir[('', self.get_root_id())] = set()
1044
1028
def add_entry(path, kind):
1045
if path == b'' or not path.startswith(prefix):
1029
if path == '' or not path.startswith(prefix):
1047
1031
(dirname, child_name) = posixpath.split(path)
1048
1032
add_entry(dirname, 'directory')
1052
1036
raise ValueError(value)
1053
1037
per_dir[(dirname, dir_file_id)].add(
1054
1038
(path.decode("utf-8"), child_name.decode("utf-8"),
1056
self.path2id(path.decode("utf-8")),
1040
self.path2id(path.decode("utf-8")),
1058
1042
with self.lock_read():
1059
1043
for path, value in self.index.iteritems():
1060
1044
if self.mapping.is_special_file(path):
1073
1057
def apply_inventory_delta(self, changes):
1074
1058
for (old_path, new_path, file_id, ie) in changes:
1075
1059
if old_path is not None:
1076
(index, old_subpath) = self._lookup_index(
1077
old_path.encode('utf-8'))
1060
(index, old_subpath) = self._lookup_index(old_path.encode('utf-8'))
1079
1062
self._index_del_entry(index, old_subpath)
1080
1063
except KeyError:
1084
1067
if new_path is not None and ie.kind != 'directory':
1085
1068
if ie.kind == 'tree-reference':
1086
1069
self._index_add_entry(
1088
reference_revision=ie.reference_revision)
1071
reference_revision=ie.reference_revision)
1090
1073
self._index_add_entry(new_path, ie.kind)
1093
def annotate_iter(self, path,
1076
def annotate_iter(self, path, file_id=None,
1094
1077
default_revision=_mod_revision.CURRENT_REVISION):
1095
1078
"""See Tree.annotate_iter
1108
1091
parent_tree = self.revision_tree(parent_id)
1109
1092
except errors.NoSuchRevisionInTree:
1110
1093
parent_tree = self.branch.repository.revision_tree(
1112
1095
with parent_tree.lock_read():
1113
# TODO(jelmer): Use rename/copy tracker to find path name
1096
# TODO(jelmer): Use rename/copy tracker to find path name in parent
1115
1097
parent_path = path
1117
1099
kind = parent_tree.kind(parent_path)
1118
1100
except errors.NoSuchFile:
1120
1102
if kind != 'file':
1121
# Note: this is slightly unnecessary, because symlinks
1122
# and directories have a "text" which is the empty
1123
# text, and we know that won't mess up annotations. But
1103
# Note: this is slightly unnecessary, because symlinks and
1104
# directories have a "text" which is the empty text, and we
1105
# know that won't mess up annotations. But it seems cleaner
1126
1107
parent_text_key = (
1128
1109
parent_tree.get_file_revision(parent_path))
1129
1110
if parent_text_key not in maybe_file_parent_keys:
1130
1111
maybe_file_parent_keys.append(parent_text_key)
1131
# Now we have the parents of this content
1132
from breezy.annotate import Annotator
1133
from .annotate import AnnotateProvider
1134
annotate_provider = AnnotateProvider(
1135
self.branch.repository._file_change_scanner)
1136
annotator = Annotator(annotate_provider)
1138
from breezy.graph import Graph
1139
graph = Graph(annotate_provider)
1112
graph = self.branch.repository.get_file_graph()
1140
1113
heads = graph.heads(maybe_file_parent_keys)
1141
1114
file_parent_keys = []
1142
1115
for key in maybe_file_parent_keys:
1143
1116
if key in heads:
1144
1117
file_parent_keys.append(key)
1119
# Now we have the parents of this content
1120
from breezy.annotate import Annotator
1121
from .annotate import AnnotateProvider
1122
annotator = Annotator(AnnotateProvider(
1123
self.branch.repository._file_change_scanner))
1146
1124
text = self.get_file_text(path)
1147
1125
this_key = (path, default_revision)
1148
1126
annotator.add_special_text(this_key, file_parent_keys, text)
1158
1136
self.user_transport.local_abspath('.'),
1159
1137
self.control_transport.local_abspath("index"),
1162
if self.branch.head is None
1163
else self.store[self.branch.head].tree,
1164
honor_filemode=self._supports_executable())
1139
None if self.branch.head is None else self.store[self.branch.head].tree)
1166
1141
def reset_state(self, revision_ids=None):
1167
1142
"""Reset the state of the working tree.
1175
1150
self.index.clear()
1176
1151
self._index_dirty = True
1177
1152
if self.branch.head is not None:
1178
for entry in self.store.iter_tree_contents(
1179
self.store[self.branch.head].tree):
1153
for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
1180
1154
if not validate_path(entry.path):
1183
1157
if S_ISGITLINK(entry.mode):
1184
pass # TODO(jelmer): record and return submodule paths
1158
pass # TODO(jelmer): record and return submodule paths
1186
1160
# Let's at least try to use the working tree file:
1188
st = self._lstat(self.abspath(
1189
entry.path.decode('utf-8')))
1162
st = self._lstat(self.abspath(entry.path))
1190
1163
except OSError:
1191
1164
# But if it doesn't exist, we'll make something up.
1192
1165
obj = self.store[entry.sha]
1193
1166
st = os.stat_result((entry.mode, 0, 0, 0,
1195
obj.as_raw_string()), 0,
1167
0, 0, len(obj.as_raw_string()), 0,
1197
1169
(index, subpath) = self._lookup_index(entry.path)
1198
1170
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1200
def _update_git_tree(self, old_revision, new_revision, change_reporter=None,
1202
basis_tree = self.revision_tree(old_revision)
1203
if new_revision != old_revision:
1204
with basis_tree.lock_read():
1205
new_basis_tree = self.branch.basis_tree()
1211
change_reporter=change_reporter,
1212
show_base=show_base)
1214
1172
def pull(self, source, overwrite=False, stop_revision=None,
1215
1173
change_reporter=None, possible_transports=None, local=False,
1216
show_base=False, tag_selector=None):
1217
1175
with self.lock_write(), source.lock_read():
1218
1176
old_revision = self.branch.last_revision()
1177
basis_tree = self.basis_tree()
1219
1178
count = self.branch.pull(source, overwrite, stop_revision,
1220
1179
possible_transports=possible_transports,
1221
local=local, tag_selector=tag_selector)
1222
self._update_git_tree(
1223
old_revision=old_revision,
1224
new_revision=self.branch.last_revision(),
1225
change_reporter=change_reporter,
1226
show_base=show_base)
1181
new_revision = self.branch.last_revision()
1182
if new_revision != old_revision:
1183
with basis_tree.lock_read():
1184
new_basis_tree = self.branch.basis_tree()
1190
change_reporter=change_reporter,
1191
show_base=show_base)
1229
1194
def add_reference(self, sub_tree):
1236
1201
sub_tree_path = self.relpath(sub_tree.basedir)
1237
1202
except errors.PathNotChild:
1238
1203
raise BadReferenceTarget(
1239
self, sub_tree, 'Target not inside tree.')
1204
self, sub_tree, 'Target not inside tree.')
1241
1206
self._add([sub_tree_path], [None], ['tree-reference'])
1243
1208
def _read_submodule_head(self, path):
1244
1209
return read_submodule_head(self.abspath(path))
1246
def get_reference_revision(self, path, branch=None):
1211
def get_reference_revision(self, path, file_id=None):
1247
1212
hexsha = self._read_submodule_head(path)
1248
1213
if hexsha is None:
1249
1214
return _mod_revision.NULL_REVISION
1250
1215
return self.branch.lookup_foreign_revision_id(hexsha)
1252
def get_nested_tree(self, path):
1217
def get_nested_tree(self, path, file_id=None):
1253
1218
return workingtree.WorkingTree.open(self.abspath(path))
1255
1220
def _directory_is_tree_reference(self, relpath):
1257
1222
# it's a tree reference, except that the root of the tree is not
1258
1223
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1260
def extract(self, sub_path, format=None):
1225
def extract(self, sub_path, file_id=None, format=None):
1261
1226
"""Extract a subtree from this tree.
1263
1228
A new branch will be created, relative to the path for this tree.
1328
1293
new_parents = [revision_id]
1329
1294
tree.set_parent_ids(new_parents)
1331
def reference_parent(self, path, possible_transports=None):
1332
remote_url = self.get_reference_info(path)
1333
if remote_url is None:
1334
trace.warning("Unable to find submodule info for %s", path)
1336
return _mod_branch.Branch.open(remote_url, possible_transports=possible_transports)
1338
def get_reference_info(self, path):
1339
submodule_info = self._submodule_info()
1340
info = submodule_info.get(path.encode('utf-8'))
1343
return info[0].decode('utf-8')
1345
def set_reference_info(self, tree_path, branch_location):
1346
path = self.abspath('.gitmodules')
1348
config = GitConfigFile.from_path(path)
1349
except EnvironmentError as e:
1350
if e.errno == errno.ENOENT:
1351
config = GitConfigFile()
1354
section = (b'submodule', tree_path.encode('utf-8'))
1355
if branch_location is None:
1361
branch_location = urlutils.join(
1362
urlutils.strip_segment_parameters(self.branch.user_url),
1366
b'path', tree_path.encode('utf-8'))
1369
b'url', branch_location.encode('utf-8'))
1370
config.write_to_path(path)
1371
self.add('.gitmodules')
1374
1297
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):