86
86
IGNORE_FILENAME = ".gitignore"
89
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
89
class GitWorkingTree(MutableGitIndexTree, workingtree.WorkingTree):
90
90
"""A Git working tree."""
92
92
def __init__(self, controldir, repo, branch):
136
136
self._lock_mode = 'w'
137
137
self._lock_count = 1
139
self._index_file = GitFile(self.control_transport.local_abspath('index'), 'wb')
139
self._index_file = GitFile(
140
self.control_transport.local_abspath('index'), 'wb')
140
141
except FileLocked:
141
142
raise errors.LockContention('index')
142
143
self._read_index()
143
144
elif self._lock_mode == 'r':
144
145
raise errors.ReadOnlyError(self)
147
self._lock_count += 1
148
149
def lock_tree_write(self):
149
150
self.branch.lock_read()
151
152
self._lock_write_tree()
152
153
return lock.LogicalLockResult(self.unlock)
154
except BaseException:
154
155
self.branch.unlock()
222
223
def _set_merges_from_parent_ids(self, rhs_parent_ids):
224
merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
225
merges = [self.branch.lookup_bzr_revision_id(
226
revid)[0] for revid in rhs_parent_ids]
225
227
except errors.NoSuchRevision as e:
226
228
raise errors.GhostRevisionUnusableHere(e.revision)
228
self.control_transport.put_bytes('MERGE_HEAD', b'\n'.join(merges),
230
self.control_transport.put_bytes(
231
'MERGE_HEAD', b'\n'.join(merges),
229
232
mode=self.controldir._get_file_mode())
246
249
working tree. Any of these may be ghosts.
248
251
with self.lock_tree_write():
249
self._check_parents_for_ghosts(revision_ids,
250
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
252
self._check_parents_for_ghosts(
253
revision_ids, allow_leftmost_as_ghost=allow_leftmost_as_ghost)
251
254
for revision_id in revision_ids:
252
255
_mod_revision.check_not_reserved_id(revision_id)
288
292
def remove(self, files, verbose=False, to_file=None, keep_files=True,
290
294
"""Remove nominated files from the working tree metadata.
292
296
:param files: File paths relative to the basedir.
303
307
def backup(file_to_backup):
304
308
abs_path = self.abspath(file_to_backup)
305
backup_name = self.controldir._available_backup_name(file_to_backup)
309
backup_name = self.controldir._available_backup_name(
306
311
osutils.rename(abs_path, self.abspath(backup_name))
307
312
return "removed %s (but kept a copy: %s)" % (
308
313
file_to_backup, backup_name)
339
344
files = list(all_files)
341
346
if len(files) == 0:
342
return # nothing to do
347
return # nothing to do
344
# Sort needed to first handle directory content before the directory
349
# Sort needed to first handle directory content before the
345
351
files.sort(reverse=True)
347
353
# Bail out if we are going to delete files we shouldn't
348
354
if not keep_files and not force:
349
355
for (file_id, path, content_change, versioned, parent_id, name,
350
kind, executable) in self.iter_changes(self.basis_tree(),
351
include_unchanged=True, require_versioned=False,
352
want_unversioned=True, specific_files=files):
353
if versioned[0] == False:
356
kind, executable) in self.iter_changes(
357
self.basis_tree(), include_unchanged=True,
358
require_versioned=False, want_unversioned=True,
359
specific_files=files):
360
if versioned[0] is False:
354
361
# The record is unknown or newly added
355
362
files_to_backup.append(path[1])
356
files_to_backup.extend(osutils.parent_directories(path[1]))
357
elif (content_change and (kind[1] is not None) and
358
osutils.is_inside_any(files, path[1])):
363
files_to_backup.extend(
364
osutils.parent_directories(path[1]))
365
elif (content_change and (kind[1] is not None)
366
and osutils.is_inside_any(files, path[1])):
359
367
# Versioned and changed, but not deleted, and still
360
368
# in one of the dirs to be deleted.
361
369
files_to_backup.append(path[1])
362
files_to_backup.extend(osutils.parent_directories(path[1]))
370
files_to_backup.extend(
371
osutils.parent_directories(path[1]))
429
439
raise workingtree.SettingFileIdUnsupported()
431
441
with self.lock_tree_write():
432
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
442
for filepath in osutils.canonical_relpaths(
443
self.basedir, file_list):
433
444
filepath, can_access = osutils.normalized_filename(filepath)
434
445
if not can_access:
435
446
raise errors.InvalidNormalization(filepath)
437
448
abspath = self.abspath(filepath)
438
449
kind = osutils.file_kind(abspath)
439
450
if kind in ("file", "symlink"):
440
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
451
(index, subpath) = self._lookup_index(
452
filepath.encode('utf-8'))
441
453
if subpath in index:
442
454
# Already present
446
458
self._index_add_entry(filepath, kind)
447
459
added.append(filepath)
448
460
elif kind == "directory":
449
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
461
(index, subpath) = self._lookup_index(
462
filepath.encode('utf-8'))
450
463
if subpath not in index:
451
464
call_action(filepath, kind)
457
470
abs_user_dir = self.abspath(user_dir)
458
471
if user_dir != '':
460
transport = _mod_transport.get_transport_from_path(abs_user_dir)
473
transport = _mod_transport.get_transport_from_path(
461
475
_mod_controldir.ControlDirFormat.find_format(transport)
463
477
except errors.NotBranchError:
473
487
for name in os.listdir(abs_user_dir):
474
488
subp = os.path.join(user_dir, name)
475
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
489
if (self.is_control_filename(subp) or
490
self.mapping.is_special_file(subp)):
477
492
ignore_glob = self.is_ignored(subp)
478
493
if ignore_glob is not None:
501
517
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
502
518
if from_dir is None:
504
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
520
encoded_from_dir = self.abspath(from_dir).encode(osutils._fs_enc)
521
for (dirpath, dirnames, filenames) in os.walk(encoded_from_dir):
505
522
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
506
if self.controldir.is_control_filename(dir_relpath.decode(osutils._fs_enc)):
523
if self.controldir.is_control_filename(
524
dir_relpath.decode(osutils._fs_enc)):
508
526
for name in list(dirnames):
509
if self.controldir.is_control_filename(name.decode(osutils._fs_enc)):
527
if self.controldir.is_control_filename(
528
name.decode(osutils._fs_enc)):
510
529
dirnames.remove(name)
512
531
relpath = os.path.join(dir_relpath, name)
531
550
"""Yield all unversioned files in this WorkingTree.
533
552
with self.lock_read():
534
index_paths = set([p.decode('utf-8') for p, i in self._recurse_index_entries()])
553
index_paths = set([p.decode('utf-8')
554
for p, i in self._recurse_index_entries()])
535
555
all_paths = set(self._iter_files_recursive(include_dirs=True))
536
556
for p in (all_paths - index_paths):
537
557
if not self._has_dir(p.encode('utf-8')):
544
564
if kinds[pos] is None:
545
565
fullpath = osutils.normpath(self.abspath(f))
547
kind = osutils.file_kind(fullpath)
567
kind = osutils.file_kind(fullpath)
548
568
except OSError as e:
549
569
if e.errno == errno.ENOENT:
550
570
raise errors.NoSuchFile(fullpath)
551
if kind == 'directory' and f != '' and os.path.exists(os.path.join(fullpath, '.git')):
571
if (kind == 'directory' and f != '' and
572
os.path.exists(os.path.join(fullpath, '.git'))):
552
573
kind = 'tree-reference'
553
574
kinds[pos] = kind
556
577
if self._lock_mode != 'w':
557
578
raise errors.NotWriteLocked(self)
558
579
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
559
# already in use and GitFile doesn't allow overriding the lock file name :(
580
# already in use and GitFile doesn't allow overriding the lock file
560
582
f = open(self.control_transport.local_abspath('index'), 'wb')
561
583
# Note that _flush will close the file
593
tree_lookup_path(self.store.__getitem__, root_tree, path.encode('utf-8'))
615
tree_lookup_path(self.store.__getitem__,
616
root_tree, path.encode('utf-8'))
615
638
ignore_globs = set()
616
639
ignore_globs.update(ignores.get_runtime_ignores())
617
640
ignore_globs.update(ignores.get_user_ignores())
618
self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
641
self._global_ignoreglobster = globbing.ExceptionGlobster(
619
643
match = self._global_ignoreglobster.match(filename)
620
644
if match is not None:
713
737
return os.lstat(self.abspath(path))
715
739
def _live_entry(self, path):
716
return index_entry_from_path(self.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
740
encoded_path = self.abspath(path.decode('utf-8')).encode(
742
return index_entry_from_path(encoded_path)
718
744
def is_executable(self, path):
719
745
with self.lock_read():
720
if getattr(self, "_supports_executable", osutils.supports_executable)():
746
if getattr(self, "_supports_executable",
747
osutils.supports_executable)():
721
748
mode = self._lstat(path).st_mode
723
750
(index, subpath) = self._lookup_index(path.encode('utf-8'))
728
755
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
730
757
def _is_executable_from_path_and_stat(self, path, stat_result):
731
if getattr(self, "_supports_executable", osutils.supports_executable)():
732
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
758
if getattr(self, "_supports_executable",
759
osutils.supports_executable)():
760
return self._is_executable_from_path_and_stat_from_stat(
734
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
763
return self._is_executable_from_path_and_stat_from_basis(
736
766
def list_files(self, include_root=False, from_dir=None, recursive=True):
737
767
if from_dir is None:
747
777
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
748
778
dir_ids[u""] = root_ie.file_id
750
path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
780
path_iterator = sorted(
781
self._iter_files_recursive(from_dir, include_dirs=True))
752
path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
753
os.listdir(self.abspath(from_dir).encode(osutils._fs_enc))
754
if not self.controldir.is_control_filename(name.decode(osutils._fs_enc))
755
and not self.mapping.is_special_file(name.decode(osutils._fs_enc))])
783
encoded_from_dir = self.abspath(from_dir).encode(
785
path_iterator = sorted(
786
[os.path.join(from_dir, name.decode(osutils._fs_enc))
787
for name in os.listdir(encoded_from_dir)
788
if not self.controldir.is_control_filename(
789
name.decode(osutils._fs_enc)) and
790
not self.mapping.is_special_file(
791
name.decode(osutils._fs_enc))])
756
792
for path in path_iterator:
758
794
encoded_path = path.encode("utf-8")
767
803
kind = self.kind(path)
768
804
parent, name = posixpath.split(path)
769
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
805
for dir_path, dir_ie in self._add_missing_parent_ids(
771
808
if kind in ('directory', 'tree-reference'):
772
809
if path != from_dir:
783
820
ie = fk_entries[kind]()
785
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
823
posixpath.relpath(path, from_dir), status, kind,
787
826
if value is not None:
788
827
ie = self._get_file_ie(name, path, value, dir_ids[parent])
789
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
828
yield (posixpath.relpath(path, from_dir), "V", ie.kind,
791
831
ie = fk_entries[kind]()
792
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
832
yield (posixpath.relpath(path, from_dir), ("I" if
833
self.is_ignored(path) else "?"), kind, None, ie)
794
835
def all_file_ids(self):
795
836
with self.lock_read():
824
865
with self.lock_read():
825
866
parent_id = self.path2id(path)
826
867
found_any = False
827
seen_children = set()
828
868
for item_path, value in self.index.iteritems():
829
869
decoded_item_path = item_path.decode('utf-8')
830
870
if self.mapping.is_special_file(item_path):
835
875
subpath = posixpath.relpath(decoded_item_path, path)
836
876
if '/' in subpath:
837
877
dirname = subpath.split('/', 1)[0]
838
file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
878
file_ie = self._get_dir_ie(
879
posixpath.join(path, dirname), parent_id)
840
881
(unused_parent, name) = posixpath.split(decoded_item_path)
841
882
file_ie = self._get_file_ie(
849
890
conflicts = _mod_conflicts.ConflictList()
850
891
for item_path, value in self.index.iteritems():
851
892
if value.flags & FLAG_STAGEMASK:
852
conflicts.append(_mod_conflicts.TextConflict(item_path.decode('utf-8')))
893
conflicts.append(_mod_conflicts.TextConflict(
894
item_path.decode('utf-8')))
855
897
def set_conflicts(self, conflicts):
871
913
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
873
self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
915
self.index[path] = (value[:9] + (value[9] & ~ FLAG_STAGEMASK, ))
875
917
def add_conflicts(self, new_conflicts):
876
918
with self.lock_tree_write():
877
919
for conflict in new_conflicts:
878
if conflict.typestring in ('text conflict', 'contents conflict'):
920
if conflict.typestring in ('text conflict',
921
'contents conflict'):
880
self._set_conflicted(conflict.path.encode('utf-8'), True)
923
self._set_conflicted(
924
conflict.path.encode('utf-8'), True)
882
raise errors.UnsupportedOperation(self.add_conflicts, self)
926
raise errors.UnsupportedOperation(
927
self.add_conflicts, self)
884
929
raise errors.UnsupportedOperation(self.add_conflicts, self)
909
954
current_disk = next(disk_iterator)
910
955
disk_finished = False
911
956
except OSError as e:
912
if not (e.errno == errno.ENOENT or
913
(sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
957
if not (e.errno == errno.ENOENT
958
or (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
915
960
current_disk = None
916
961
disk_finished = True
929
974
cur_disk_dir_content) = ((None, None), None)
930
975
if not disk_finished:
931
976
# strip out .bzr dirs
932
if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
933
len(cur_disk_dir_content) > 0):
977
if (cur_disk_dir_path_from_top[top_strip_len:] == ''
978
and len(cur_disk_dir_content) > 0):
934
979
# osutils.walkdirs can be made nicer -
935
980
# yield the path-from-prefix rather than the pathjoined
937
982
bzrdir_loc = bisect_left(cur_disk_dir_content,
939
if (bzrdir_loc < len(cur_disk_dir_content)
940
and self.controldir.is_control_filename(
984
if (bzrdir_loc < len(cur_disk_dir_content) and
985
self.controldir.is_control_filename(
941
986
cur_disk_dir_content[bzrdir_loc][0])):
942
987
# we dont yield the contents of, or, .bzr itself.
943
988
del cur_disk_dir_content[bzrdir_loc]
948
993
# everything is missing
951
direction = ((current_inv[0][0] > cur_disk_dir_relpath) -
952
(current_inv[0][0] < cur_disk_dir_relpath))
996
direction = ((current_inv[0][0] > cur_disk_dir_relpath)
997
- (current_inv[0][0] < cur_disk_dir_relpath))
953
998
if direction > 0:
954
999
# disk is before inventory - unknown
955
1000
dirblock = [(relpath, basename, kind, stat, None, None) for
956
relpath, basename, kind, stat, top_path in
957
cur_disk_dir_content]
1001
relpath, basename, kind, stat, top_path in
1002
cur_disk_dir_content]
958
1003
yield (cur_disk_dir_relpath, None), dirblock
960
1005
current_disk = next(disk_iterator)
963
1008
elif direction < 0:
964
1009
# inventory is before disk - missing.
965
1010
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
966
for relpath, basename, dkind, stat, fileid, kind in
1011
for relpath, basename, dkind, stat, fileid, kind in
968
1013
yield (current_inv[0][0], current_inv[0][1]), dirblock
970
1015
current_inv = next(inventory_iterator)
975
1020
# merge the inventory and disk data together
977
1022
for relpath, subiterator in itertools.groupby(sorted(
978
current_inv[1] + cur_disk_dir_content,
979
key=operator.itemgetter(0)), operator.itemgetter(1)):
1023
current_inv[1] + cur_disk_dir_content,
1024
key=operator.itemgetter(0)), operator.itemgetter(1)):
980
1025
path_elements = list(subiterator)
981
1026
if len(path_elements) == 2:
982
1027
inv_row, disk_row = path_elements
983
1028
# versioned, present file
984
1029
dirblock.append((inv_row[0],
985
inv_row[1], disk_row[2],
986
disk_row[3], inv_row[4],
1030
inv_row[1], disk_row[2],
1031
disk_row[3], inv_row[4],
988
1033
elif len(path_elements[0]) == 5:
989
1034
# unknown disk file
990
dirblock.append((path_elements[0][0],
991
path_elements[0][1], path_elements[0][2],
992
path_elements[0][3], None, None))
1036
(path_elements[0][0], path_elements[0][1],
1037
path_elements[0][2], path_elements[0][3],
993
1039
elif len(path_elements[0]) == 6:
994
1040
# versioned, absent file.
995
dirblock.append((path_elements[0][0],
996
path_elements[0][1], 'unknown', None,
997
path_elements[0][4], path_elements[0][5]))
1042
(path_elements[0][0], path_elements[0][1],
1043
'unknown', None, path_elements[0][4],
1044
path_elements[0][5]))
999
1046
raise NotImplementedError('unreachable code')
1000
1047
yield current_inv[0], dirblock
1014
1061
per_dir = defaultdict(set)
1015
1062
if prefix == b"":
1016
1063
per_dir[(u'', self.get_root_id())] = set()
1017
1065
def add_entry(path, kind):
1018
1066
if path == b'' or not path.startswith(prefix):
1025
1073
raise ValueError(value)
1026
1074
per_dir[(dirname, dir_file_id)].add(
1027
1075
(path.decode("utf-8"), child_name.decode("utf-8"),
1029
self.path2id(path.decode("utf-8")),
1077
self.path2id(path.decode("utf-8")),
1031
1079
with self.lock_read():
1032
1080
for path, value in self.index.iteritems():
1033
1081
if self.mapping.is_special_file(path):
1046
1094
def apply_inventory_delta(self, changes):
1047
1095
for (old_path, new_path, file_id, ie) in changes:
1048
1096
if old_path is not None:
1049
(index, old_subpath) = self._lookup_index(old_path.encode('utf-8'))
1097
(index, old_subpath) = self._lookup_index(
1098
old_path.encode('utf-8'))
1051
1100
self._index_del_entry(index, old_subpath)
1052
1101
except KeyError:
1056
1105
if new_path is not None and ie.kind != 'directory':
1057
1106
if ie.kind == 'tree-reference':
1058
1107
self._index_add_entry(
1060
reference_revision=ie.reference_revision)
1109
reference_revision=ie.reference_revision)
1062
1111
self._index_add_entry(new_path, ie.kind)
1080
1129
parent_tree = self.revision_tree(parent_id)
1081
1130
except errors.NoSuchRevisionInTree:
1082
1131
parent_tree = self.branch.repository.revision_tree(
1084
1133
with parent_tree.lock_read():
1085
# TODO(jelmer): Use rename/copy tracker to find path name in parent
1134
# TODO(jelmer): Use rename/copy tracker to find path name
1086
1136
parent_path = path
1088
1138
kind = parent_tree.kind(parent_path)
1089
1139
except errors.NoSuchFile:
1091
1141
if kind != 'file':
1092
# Note: this is slightly unnecessary, because symlinks and
1093
# directories have a "text" which is the empty text, and we
1094
# know that won't mess up annotations. But it seems cleaner
1142
# Note: this is slightly unnecessary, because symlinks
1143
# and directories have a "text" which is the empty
1144
# text, and we know that won't mess up annotations. But
1096
1147
parent_text_key = (
1128
1179
self.user_transport.local_abspath('.'),
1129
1180
self.control_transport.local_abspath("index"),
1131
None if self.branch.head is None else self.store[self.branch.head].tree)
1183
if self.branch.head is None
1184
else self.store[self.branch.head].tree)
1133
1186
def reset_state(self, revision_ids=None):
1134
1187
"""Reset the state of the working tree.
1142
1195
self.index.clear()
1143
1196
self._index_dirty = True
1144
1197
if self.branch.head is not None:
1145
for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
1198
for entry in self.store.iter_tree_contents(
1199
self.store[self.branch.head].tree):
1146
1200
if not validate_path(entry.path):
1149
1203
if S_ISGITLINK(entry.mode):
1150
pass # TODO(jelmer): record and return submodule paths
1204
pass # TODO(jelmer): record and return submodule paths
1152
1206
# Let's at least try to use the working tree file:
1154
st = self._lstat(self.abspath(entry.path.decode('utf-8')))
1208
st = self._lstat(self.abspath(
1209
entry.path.decode('utf-8')))
1155
1210
except OSError:
1156
1211
# But if it doesn't exist, we'll make something up.
1157
1212
obj = self.store[entry.sha]
1158
1213
st = os.stat_result((entry.mode, 0, 0, 0,
1159
0, 0, len(obj.as_raw_string()), 0,
1215
obj.as_raw_string()), 0,
1161
1217
(index, subpath) = self._lookup_index(entry.path)
1162
1218
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1175
1231
with basis_tree.lock_read():
1176
1232
new_basis_tree = self.branch.basis_tree()
1177
1233
merge.merge_inner(
1182
change_reporter=change_reporter,
1183
show_base=show_base)
1238
change_reporter=change_reporter,
1239
show_base=show_base)
1186
1242
def add_reference(self, sub_tree):
1193
1249
sub_tree_path = self.relpath(sub_tree.basedir)
1194
1250
except errors.PathNotChild:
1195
1251
raise BadReferenceTarget(
1196
self, sub_tree, 'Target not inside tree.')
1252
self, sub_tree, 'Target not inside tree.')
1198
1254
self._add([sub_tree_path], [None], ['tree-reference'])
1276
1332
other_tree = self.revision_tree(revision_id)
1277
1333
except errors.NoSuchRevision:
1278
1334
other_tree = self.branch.repository.revision_tree(
1281
1337
merge.transform_tree(tree, other_tree)
1282
1338
if revision_id == _mod_revision.NULL_REVISION:
1321
1377
if revision_id is not None:
1322
1378
branch.set_last_revision(revision_id)
1323
1379
wt = GitWorkingTree(
1324
a_controldir, a_controldir.open_repository(), branch)
1380
a_controldir, a_controldir.open_repository(), branch)
1325
1381
for hook in MutableTree.hooks['post_build_tree']: