67
80
self.basedir = bzrdir.root_transport.local_abspath('.')
68
81
self.bzrdir = bzrdir
69
82
self.repository = repo
83
self.store = self.repository._git.object_store
70
84
self.mapping = self.repository.get_mapping()
71
85
self._branch = branch
72
86
self._transport = bzrdir.transport
74
self.controldir = self.bzrdir.transport.local_abspath('bzr')
77
os.makedirs(self.controldir)
78
os.makedirs(os.path.join(self.controldir, 'lock'))
82
self._control_files = lockable_files.LockableFiles(
83
transport.get_transport(self.controldir), 'lock', lockdir.LockDir)
84
87
self._format = GitWorkingTreeFormat()
89
self._versioned_dirs = None
86
90
self.views = self._make_views()
91
self._rules_searcher = None
87
92
self._detect_case_handling()
94
self._fileid_map = self._basis_fileid_map.copy()
95
self._lock_mode = None
99
"""Lock the repository for read operations.
101
:return: A bzrlib.lock.LogicalLockResult.
103
if not self._lock_mode:
104
self._lock_mode = 'r'
108
self._lock_count += 1
109
self.branch.lock_read()
110
return lock.LogicalLockResult(self.unlock)
112
def lock_tree_write(self):
113
if not self._lock_mode:
114
self._lock_mode = 'w'
117
elif self._lock_mode == 'r':
118
raise errors.ReadOnlyError(self)
121
self.branch.lock_read()
122
return lock.LogicalLockResult(self.unlock)
124
def lock_write(self, token=None):
125
if not self._lock_mode:
126
self._lock_mode = 'w'
129
elif self._lock_mode == 'r':
130
raise errors.ReadOnlyError(self)
133
self.branch.lock_write()
134
return lock.LogicalLockResult(self.unlock)
137
return self._lock_count >= 1
139
def get_physical_lock_status(self):
143
if not self._lock_count:
144
return lock.cant_unlock_not_held(self)
147
self._lock_count -= 1
148
if self._lock_count > 0:
150
self._lock_mode = None
152
def _detect_case_handling(self):
154
self._transport.stat(".git/cOnFiG")
155
except errors.NoSuchFile:
156
self.case_sensitive = True
158
self.case_sensitive = False
160
def merge_modified(self):
163
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
164
self.set_parent_ids([p for p, t in parents_list])
166
def _index_add_entry(self, path, file_id, kind):
167
assert self._lock_mode is not None
168
assert isinstance(path, basestring)
169
assert type(file_id) == str or file_id is None
170
if kind == "directory":
171
# Git indexes don't contain directories
176
file, stat_val = self.get_file_with_stat(file_id, path)
177
except (errors.NoSuchFile, IOError):
178
# TODO: Rather than come up with something here, use the old index
180
stat_val = os.stat_result(
181
(stat.S_IFREG | 0644, 0, 0, 0, 0, 0, 0, 0, 0, 0))
182
blob.set_raw_string(file.read())
183
elif kind == "symlink":
186
stat_val = os.lstat(self.abspath(path))
187
except (errors.NoSuchFile, OSError):
188
# TODO: Rather than come up with something here, use the
190
stat_val = os.stat_result(
191
(stat.S_IFLNK, 0, 0, 0, 0, 0, 0, 0, 0, 0))
193
self.get_symlink_target(file_id, path).encode("utf-8"))
195
raise AssertionError("unknown kind '%s'" % kind)
196
# Add object to the repository if it didn't exist yet
197
if not blob.id in self.store:
198
self.store.add_object(blob)
199
# Add an entry to the index or update the existing entry
201
encoded_path = path.encode("utf-8")
202
self.index[encoded_path] = index_entry_from_stat(
203
stat_val, blob.id, flags)
204
if self._versioned_dirs is not None:
205
self._ensure_versioned_dir(encoded_path)
207
def _ensure_versioned_dir(self, dirname):
208
if dirname in self._versioned_dirs:
211
self._ensure_versioned_dir(posixpath.dirname(dirname))
212
self._versioned_dirs.add(dirname)
214
def _load_dirs(self):
215
assert self._lock_mode is not None
216
self._versioned_dirs = set()
218
self._ensure_versioned_dir(posixpath.dirname(p))
220
def _unversion_path(self, path):
221
assert self._lock_mode is not None
222
encoded_path = path.encode("utf-8")
224
del self.index[encoded_path]
226
# A directory, perhaps?
227
for p in list(self.index):
228
if p.startswith(encoded_path+"/"):
230
# FIXME: remove empty directories
232
@needs_tree_write_lock
233
def unversion(self, file_ids):
234
for file_id in file_ids:
235
path = self.id2path(file_id)
236
self._unversion_path(path)
239
def check_state(self):
240
"""Check that the working state is/isn't valid."""
243
@needs_tree_write_lock
244
def remove(self, files, verbose=False, to_file=None, keep_files=True,
246
"""Remove nominated files from the working tree metadata.
248
:param files: File paths relative to the basedir.
249
:param keep_files: If true, the files will also be kept.
250
:param force: Delete files and directories, even if they are changed
251
and even if the directories are not empty.
253
all_files = set() # specified and nested files
255
if isinstance(files, basestring):
261
files = list(all_files)
264
return # nothing to do
266
# Sort needed to first handle directory content before the directory
267
files.sort(reverse=True)
269
def backup(file_to_backup):
270
abs_path = self.abspath(file_to_backup)
271
backup_name = self.bzrdir._available_backup_name(file_to_backup)
272
osutils.rename(abs_path, self.abspath(backup_name))
273
return "removed %s (but kept a copy: %s)" % (
274
file_to_backup, backup_name)
277
fid = self.path2id(f)
279
message = "%s is not versioned." % (f,)
281
abs_path = self.abspath(f)
283
# having removed it, it must be either ignored or unknown
284
if self.is_ignored(f):
288
# XXX: Really should be a more abstract reporter interface
289
kind_ch = osutils.kind_marker(self.kind(fid))
290
to_file.write(new_status + ' ' + f + kind_ch + '\n')
292
# FIXME: _unversion_path() is O(size-of-index) for directories
293
self._unversion_path(f)
294
message = "removed %s" % (f,)
295
if osutils.lexists(abs_path):
296
if (osutils.isdir(abs_path) and
297
len(os.listdir(abs_path)) > 0):
299
osutils.rmtree(abs_path)
300
message = "deleted %s" % (f,)
305
osutils.delete_any(abs_path)
306
message = "deleted %s" % (f,)
308
# print only one message (if any) per file.
309
if message is not None:
313
def _add(self, files, ids, kinds):
314
for (path, file_id, kind) in zip(files, ids, kinds):
315
if file_id is not None:
316
self._fileid_map.set_file_id(path.encode("utf-8"), file_id)
318
file_id = self._fileid_map.lookup_file_id(path.encode("utf-8"))
319
self._index_add_entry(path, file_id, kind)
321
@needs_tree_write_lock
322
def smart_add(self, file_list, recurse=True, action=None, save=True):
326
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
327
abspath = self.abspath(filepath)
328
kind = osutils.file_kind(abspath)
329
if action is not None:
330
file_id = action(self, None, filepath, kind)
333
if kind in ("file", "symlink"):
335
self._index_add_entry(filepath, file_id, kind)
336
added.append(filepath)
337
elif kind == "directory":
339
user_dirs.append(filepath)
341
raise errors.BadFileKindError(filename=abspath, kind=kind)
342
for user_dir in user_dirs:
343
abs_user_dir = self.abspath(user_dir)
344
for name in os.listdir(abs_user_dir):
345
subp = os.path.join(user_dir, name)
346
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
348
ignore_glob = self.is_ignored(subp)
349
if ignore_glob is not None:
350
ignored.setdefault(ignore_glob, []).append(subp)
352
abspath = self.abspath(subp)
353
kind = osutils.file_kind(abspath)
354
if kind == "directory":
355
user_dirs.append(subp)
357
if action is not None:
358
file_id = action(self, None, filepath, kind)
362
self._index_add_entry(subp, file_id, kind)
365
return added, ignored
367
def _set_root_id(self, file_id):
368
self._fileid_map.set_file_id("", file_id)
370
@needs_tree_write_lock
371
def move(self, from_paths, to_dir=None, after=False):
373
to_abs = self.abspath(to_dir)
374
if not os.path.isdir(to_abs):
375
raise errors.BzrMoveFailedError('', to_dir,
376
errors.NotADirectory(to_abs))
378
for from_rel in from_paths:
379
from_tail = os.path.split(from_rel)[-1]
380
to_rel = os.path.join(to_dir, from_tail)
381
self.rename_one(from_rel, to_rel, after=after)
382
rename_tuples.append((from_rel, to_rel))
386
@needs_tree_write_lock
387
def rename_one(self, from_rel, to_rel, after=False):
388
from_path = from_rel.encode("utf-8")
389
to_path = to_rel.encode("utf-8")
390
if not self.has_filename(to_rel):
391
raise errors.BzrMoveFailedError(from_rel, to_rel,
392
errors.NoSuchFile(to_rel))
393
if not from_path in self.index:
394
raise errors.BzrMoveFailedError(from_rel, to_rel,
395
errors.NotVersionedError(path=from_rel))
397
os.rename(self.abspath(from_rel), self.abspath(to_rel))
398
self.index[to_path] = self.index[from_path]
399
del self.index[from_path]
402
def get_root_id(self):
403
return self.path2id("")
405
def _has_dir(self, path):
408
if self._versioned_dirs is None:
410
return path in self._versioned_dirs
413
def path2id(self, path):
414
encoded_path = path.encode("utf-8")
415
if self._is_versioned(encoded_path):
416
return self._fileid_map.lookup_file_id(encoded_path)
419
def _iter_files_recursive(self, from_dir=None):
422
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir)):
423
dir_relpath = dirpath[len(self.basedir):].strip("/")
424
if self.bzrdir.is_control_filename(dir_relpath):
426
for filename in filenames:
427
if not self.mapping.is_special_file(filename):
428
yield os.path.join(dir_relpath, filename)
90
432
"""Yield all unversioned files in this WorkingTree.
92
for (dirpath, dirnames, filenames) in os.walk(self.basedir):
93
if self.bzrdir.is_control_filename(dirpath[len(self.basedir):].strip("/")):
95
for filename in filenames:
96
relpath = os.path.join(dirpath[len(self.basedir):].strip("/"), filename)
97
if not relpath in self.index:
102
# non-implementation specific cleanup
105
# reverse order of locking.
107
return self._control_files.unlock()
111
def is_control_filename(self, path):
112
return os.path.basename(path) == ".git"
114
def _rewrite_index(self):
116
for path, entry in self._inventory.iter_entries():
117
if entry.kind == "directory":
118
# Git indexes don't contain directories
120
if entry.kind == "file":
123
file, stat_val = self.get_file_with_stat(entry.file_id, path)
124
except (errors.NoSuchFile, IOError):
125
# TODO: Rather than come up with something here, use the old index
127
from posix import stat_result
128
stat_val = stat_result((stat.S_IFREG | 0644, 0, 0, 0, 0, 0, 0, 0, 0, 0))
129
blob.set_raw_string(file.read())
130
elif entry.kind == "symlink":
133
stat_val = os.lstat(self.abspath(path))
134
except (errors.NoSuchFile, OSError):
135
# TODO: Rather than come up with something here, use the
137
from posix import stat_result
138
stat_val = stat_result((stat.S_IFLNK, 0, 0, 0, 0, 0, 0, 0, 0, 0))
139
blob.set_raw_string(entry.symlink_target)
141
raise AssertionError("unknown kind '%s'" % entry.kind)
142
# Add object to the repository if it didn't exist yet
143
if not blob.id in self.repository._git.object_store:
144
self.repository._git.object_store.add_object(blob)
145
# Add an entry to the index or update the existing entry
147
self.index[path.encode("utf-8")] = (stat_val.st_ctime, stat_val.st_mtime, stat_val.st_dev, stat_val.st_ino, stat_val.st_mode, stat_val.st_uid, stat_val.st_gid, stat_val.st_size, blob.id, flags)
434
return set(self._iter_files_recursive()) - set(self.index)
436
@needs_tree_write_lock
150
438
# TODO: Maybe this should only write on dirty ?
151
if self._control_files._lock_mode != 'w':
439
if self._lock_mode != 'w':
152
440
raise errors.NotWriteLocked(self)
153
self._rewrite_index()
154
441
self.index.write()
155
self._inventory_is_modified = False
445
for path in self.index:
446
yield self.path2id(path)
448
for path in self._versioned_dirs:
449
yield self.path2id(path)
451
def has_or_had_id(self, file_id):
452
if self.has_id(file_id):
454
if self.had_id(file_id):
458
def had_id(self, file_id):
459
path = self._basis_fileid_map.lookup_file_id(file_id)
461
head = self.repository._git.head()
463
# Assume no if basis is not accessible
467
root_tree = self.store[head].tree
469
tree_lookup_path(self.store.__getitem__, root_tree, path)
475
def has_id(self, file_id):
477
self.id2path(file_id)
478
except errors.NoSuchId:
484
def id2path(self, file_id):
485
file_id = osutils.safe_utf8(file_id)
486
path = self._fileid_map.lookup_path(file_id)
487
# FIXME: What about directories?
488
if self._is_versioned(path):
489
return path.decode("utf-8")
490
raise errors.NoSuchId(self, file_id)
492
def get_file_mtime(self, file_id, path=None):
493
"""See Tree.get_file_mtime."""
495
path = self.id2path(file_id)
496
return os.lstat(self.abspath(path)).st_mtime
157
498
def get_ignore_list(self):
158
499
ignoreset = getattr(self, '_ignoreset', None)
205
550
def revision_tree(self, revid):
206
551
return self.repository.revision_tree(revid)
553
def _is_versioned(self, path):
554
assert self._lock_mode is not None
555
return (path in self.index or self._has_dir(path))
557
def filter_unversioned_files(self, files):
558
return set([p for p in files if not self._is_versioned(p.encode("utf-8"))])
560
def _get_dir_ie(self, path, parent_id):
561
file_id = self.path2id(path)
562
return inventory.InventoryDirectory(file_id,
563
posixpath.basename(path).strip("/"), parent_id)
565
def _add_missing_parent_ids(self, path, dir_ids):
568
parent = posixpath.dirname(path).strip("/")
569
ret = self._add_missing_parent_ids(parent, dir_ids)
570
parent_id = dir_ids[parent]
571
ie = self._get_dir_ie(path, parent_id)
572
dir_ids[path] = ie.file_id
573
ret.append((path, ie))
576
def _get_file_ie(self, name, path, value, parent_id):
577
assert isinstance(name, unicode)
578
assert isinstance(path, unicode)
579
assert isinstance(value, tuple) and len(value) == 10
580
(ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) = value
581
file_id = self.path2id(path)
582
if type(file_id) != str:
584
kind = mode_kind(mode)
585
ie = inventory.entry_factory[kind](file_id, name, parent_id)
586
if kind == 'symlink':
587
ie.symlink_target = self.get_symlink_target(file_id)
589
data = self.get_file_text(file_id, path)
590
ie.text_sha1 = osutils.sha_string(data)
591
ie.text_size = len(data)
592
ie.executable = self.is_executable(file_id, path)
596
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
597
mode = stat_result.st_mode
598
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
601
def stored_kind(self, file_id, path=None):
603
path = self.id2path(file_id)
605
return mode_kind(self.index[path.encode("utf-8")][4])
607
# Maybe it's a directory?
608
if self._has_dir(path):
610
raise errors.NoSuchId(self, file_id)
612
if not osutils.supports_executable():
613
def is_executable(self, file_id, path=None):
614
basis_tree = self.basis_tree()
615
if file_id in basis_tree:
616
return basis_tree.is_executable(file_id)
617
# Default to not executable
620
def is_executable(self, file_id, path=None):
622
path = self.id2path(file_id)
623
mode = os.lstat(self.abspath(path)).st_mode
624
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
626
_is_executable_from_path_and_stat = \
627
_is_executable_from_path_and_stat_from_stat
630
def list_files(self, include_root=False, from_dir=None, recursive=True):
631
# FIXME: Yield non-versioned files
635
fk_entries = {'directory': workingtree.TreeDirectory,
636
'file': workingtree.TreeFile,
637
'symlink': workingtree.TreeLink}
638
root_ie = self._get_dir_ie(u"", None)
639
if include_root and not from_dir:
640
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
641
dir_ids[u""] = root_ie.file_id
643
path_iterator = self._iter_files_recursive(from_dir)
648
start = os.path.join(self.basedir, from_dir)
649
path_iterator = sorted([os.path.join(from_dir, name) for name in
650
os.listdir(start) if not self.bzrdir.is_control_filename(name)
651
and not self.mapping.is_special_file(name)])
652
for path in path_iterator:
654
value = self.index[path]
657
path = path.decode("utf-8")
658
parent, name = posixpath.split(path)
659
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
660
yield dir_path, "V", dir_ie.kind, dir_ie.file_id, dir_ie
661
if value is not None:
662
ie = self._get_file_ie(name, path, value, dir_ids[parent])
663
yield path, "V", ie.kind, ie.file_id, ie
665
kind = osutils.file_kind(self.abspath(path))
666
ie = fk_entries[kind]()
667
yield path, "?", kind, None, ie
670
def all_file_ids(self):
671
ids = {u"": self.path2id("")}
672
for path in self.index:
673
if self.mapping.is_special_file(path):
675
path = path.decode("utf-8")
676
parent = posixpath.dirname(path).strip("/")
677
for e in self._add_missing_parent_ids(parent, ids):
679
ids[path] = self.path2id(path)
680
return set(ids.values())
682
def _directory_is_tree_reference(self, path):
683
# FIXME: Check .gitsubmodules for path
687
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
688
# FIXME: Is return order correct?
690
raise NotImplementedError(self.iter_entries_by_dir)
691
if specific_file_ids is not None:
692
specific_paths = [self.id2path(file_id) for file_id in specific_file_ids]
693
if specific_paths in ([u""], []):
694
specific_paths = None
696
specific_paths = set(specific_paths)
698
specific_paths = None
699
root_ie = self._get_dir_ie(u"", None)
700
if specific_paths is None:
702
dir_ids = {u"": root_ie.file_id}
703
for path, value in self.index.iteritems():
704
if self.mapping.is_special_file(path):
706
path = path.decode("utf-8")
707
if specific_paths is not None and not path in specific_paths:
709
(parent, name) = posixpath.split(path)
711
file_ie = self._get_file_ie(name, path, value, None)
714
for (dir_path, dir_ie) in self._add_missing_parent_ids(parent,
716
yield dir_path, dir_ie
717
file_ie.parent_id = self.path2id(parent)
209
721
def conflicts(self):
723
return _mod_conflicts.ConflictList()
725
def update_basis_by_delta(self, new_revid, delta):
726
# The index just contains content, which won't have changed.
730
def get_canonical_inventory_path(self, path):
732
if p.lower() == path.lower():
738
def _walkdirs(self, prefix=""):
741
per_dir = defaultdict(list)
742
for path, value in self.index.iteritems():
743
if self.mapping.is_special_file(path):
745
if not path.startswith(prefix):
747
(dirname, child_name) = posixpath.split(path)
748
dirname = dirname.decode("utf-8")
749
dir_file_id = self.path2id(dirname)
750
assert isinstance(value, tuple) and len(value) == 10
751
(ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) = value
752
stat_result = os.stat_result((mode, ino,
753
dev, 1, uid, gid, size,
755
per_dir[(dirname, dir_file_id)].append(
756
(path.decode("utf-8"), child_name.decode("utf-8"),
757
mode_kind(mode), stat_result,
758
self.path2id(path.decode("utf-8")),
760
return per_dir.iteritems()
214
763
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
765
_tree_class = GitWorkingTree
767
supports_versioned_directories = False
217
770
def _matchingbzrdir(self):
218
from bzrlib.plugins.git import LocalGitControlDirFormat
771
from bzrlib.plugins.git.dir import LocalGitControlDirFormat
219
772
return LocalGitControlDirFormat()
221
774
def get_format_description(self):
222
775
return "Git Working Tree"
777
def initialize(self, a_bzrdir, revision_id=None, from_branch=None,
778
accelerator_tree=None, hardlink=False):
779
"""See WorkingTreeFormat.initialize()."""
780
if not isinstance(a_bzrdir, LocalGitDir):
781
raise errors.IncompatibleFormat(self, a_bzrdir)
782
index = Index(a_bzrdir.root_transport.local_abspath(".git/index"))
784
return GitWorkingTree(a_bzrdir, a_bzrdir.open_repository(),
785
a_bzrdir.open_branch(), index)
225
788
class InterIndexGitTree(tree.InterTree):
226
789
"""InterTree that works between a Git revision tree and an index."""
228
791
def __init__(self, source, target):
229
792
super(InterIndexGitTree, self).__init__(source, target)
793
assert target.is_locked()
230
794
self._index = target.index
238
802
def compare(self, want_unchanged=False, specific_files=None,
239
803
extra_trees=None, require_versioned=False, include_root=False,
240
804
want_unversioned=False):
241
changes = self._index.changes_from_tree(
242
self.source._repository._git.object_store, self.source.tree,
243
want_unchanged=want_unchanged)
244
source_fileid_map = self.source.mapping.get_fileid_map(
245
self.source._repository._git.object_store.__getitem__,
247
if self.target.mapping.BZR_FILE_IDS_FILE is not None:
248
file_id = self.target.path2id(
249
self.target.mapping.BZR_FILE_IDS_FILE)
251
target_fileid_map = {}
253
target_fileid_map = self.target.mapping.import_fileid_map(Blob.from_string(self.target.get_file_text(file_id)))
255
target_fileid_map = {}
256
target_fileid_map = GitFileIdMap(target_fileid_map, self.target.mapping)
805
# FIXME: Handle include_root
806
changes = changes_between_git_tree_and_index(
807
self.source.store, self.source.tree,
808
self.target.basedir, self.target.index,
809
want_unchanged=want_unchanged,
810
want_unversioned=want_unversioned)
811
source_fileid_map = self.source._fileid_map
812
target_fileid_map = self.target._fileid_map
257
813
ret = tree_delta_from_git_changes(changes, self.target.mapping,
258
814
(source_fileid_map, target_fileid_map),
259
815
specific_file=specific_files, require_versioned=require_versioned)
260
816
if want_unversioned:
261
817
for e in self.target.extras():
262
ret.unversioned.append((e, None, osutils.file_kind(self.target.abspath(e))))
818
ret.unversioned.append((e, None,
819
osutils.file_kind(self.target.abspath(e))))
265
822
def iter_changes(self, include_unchanged=False, specific_files=None,
266
pb=None, extra_trees=[], require_versioned=True, want_unversioned=False):
267
changes = self._index.changes_from_tree(
268
self.source._repository._git.object_store, self.source.tree,
269
want_unchanged=include_unchanged)
270
# FIXME: Handle want_unversioned
271
return changes_from_git_changes(changes, self.target.mapping,
823
pb=None, extra_trees=[], require_versioned=True,
824
want_unversioned=False):
825
changes = changes_between_git_tree_and_index(
826
self.source.store, self.source.tree,
827
self.target.basedir, self.target.index,
828
want_unchanged=include_unchanged,
829
want_unversioned=want_unversioned)
830
return changes_from_git_changes(changes, self.target.mapping,
272
831
specific_file=specific_files)
274
834
tree.InterTree.register_optimiser(InterIndexGitTree)
837
def changes_between_git_tree_and_index(object_store, tree, base_path, index,
838
want_unchanged=False, want_unversioned=False, update_index=False):
839
"""Determine the changes between a git tree and a working tree with index.
842
names = index._byname.keys()
843
def lookup_entry(path):
845
index_mode = entry[-6]
846
index_sha = entry[-2]
847
disk_path = os.path.join(base_path, path)
848
disk_stat = os.lstat(disk_path)
849
mtime = disk_stat.st_mtime
850
mtime_delta = (entry[1][0] - mtime)
851
disk_mode = cleanup_mode(disk_stat.st_mode)
852
if (mtime_delta > 0 or
853
disk_mode != index_mode):
854
with open(disk_path, 'r') as f:
855
blob = Blob.from_string(f.read())
858
index[path] = index_entry_from_stat(disk_stat, blob.id, flags)
859
return (blob.id, disk_mode)
860
return (index_sha, index_mode)
861
for (name, mode, sha) in changes_from_tree(names, lookup_entry,
862
object_store, tree, want_unchanged=want_unchanged):
863
yield (name, mode, sha)