67
80
self.basedir = bzrdir.root_transport.local_abspath('.')
68
81
self.bzrdir = bzrdir
69
82
self.repository = repo
83
self.store = self.repository._git.object_store
70
84
self.mapping = self.repository.get_mapping()
71
85
self._branch = branch
72
86
self._transport = bzrdir.transport
74
self.controldir = self.bzrdir.transport.local_abspath('bzr')
77
os.makedirs(self.controldir)
78
os.makedirs(os.path.join(self.controldir, 'lock'))
82
self._control_files = lockable_files.LockableFiles(
83
transport.get_transport(self.controldir), 'lock', lockdir.LockDir)
84
87
self._format = GitWorkingTreeFormat()
89
self._versioned_dirs = None
86
90
self.views = self._make_views()
91
self._rules_searcher = None
87
92
self._detect_case_handling()
94
self._fileid_map = self._basis_fileid_map.copy()
95
self._lock_mode = None
99
"""Lock the repository for read operations.
101
:return: A bzrlib.lock.LogicalLockResult.
103
if not self._lock_mode:
104
self._lock_mode = 'r'
108
self._lock_count += 1
109
self.branch.lock_read()
110
return lock.LogicalLockResult(self.unlock)
112
def lock_tree_write(self):
113
if not self._lock_mode:
114
self._lock_mode = 'w'
117
elif self._lock_mode == 'r':
118
raise errors.ReadOnlyError(self)
121
self.branch.lock_read()
122
return lock.LogicalLockResult(self.unlock)
124
def lock_write(self, token=None):
125
if not self._lock_mode:
126
self._lock_mode = 'w'
129
elif self._lock_mode == 'r':
130
raise errors.ReadOnlyError(self)
133
self.branch.lock_write()
134
return lock.LogicalLockResult(self.unlock)
137
return self._lock_count >= 1
139
def get_physical_lock_status(self):
143
if not self._lock_count:
144
return lock.cant_unlock_not_held(self)
147
self._lock_count -= 1
148
if self._lock_count > 0:
150
self._lock_mode = None
152
def _detect_case_handling(self):
154
self._transport.stat(".git/cOnFiG")
155
except errors.NoSuchFile:
156
self.case_sensitive = True
158
self.case_sensitive = False
160
def merge_modified(self):
163
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
164
self.set_parent_ids([p for p, t in parents_list])
166
def _index_add_entry(self, path, file_id, kind):
167
assert self._lock_mode is not None
168
assert isinstance(path, basestring)
169
assert type(file_id) == str or file_id is None
170
if kind == "directory":
171
# Git indexes don't contain directories
176
file, stat_val = self.get_file_with_stat(file_id, path)
177
except (errors.NoSuchFile, IOError):
178
# TODO: Rather than come up with something here, use the old index
180
stat_val = os.stat_result(
181
(stat.S_IFREG | 0644, 0, 0, 0, 0, 0, 0, 0, 0, 0))
182
blob.set_raw_string(file.read())
183
elif kind == "symlink":
186
stat_val = os.lstat(self.abspath(path))
187
except (errors.NoSuchFile, OSError):
188
# TODO: Rather than come up with something here, use the
190
stat_val = os.stat_result(
191
(stat.S_IFLNK, 0, 0, 0, 0, 0, 0, 0, 0, 0))
193
self.get_symlink_target(file_id, path).encode("utf-8"))
195
raise AssertionError("unknown kind '%s'" % kind)
196
# Add object to the repository if it didn't exist yet
197
if not blob.id in self.store:
198
self.store.add_object(blob)
199
# Add an entry to the index or update the existing entry
201
encoded_path = path.encode("utf-8")
202
self.index[encoded_path] = index_entry_from_stat(
203
stat_val, blob.id, flags)
204
if self._versioned_dirs is not None:
205
self._ensure_versioned_dir(encoded_path)
207
def _ensure_versioned_dir(self, dirname):
208
if dirname in self._versioned_dirs:
211
self._ensure_versioned_dir(posixpath.dirname(dirname))
212
self._versioned_dirs.add(dirname)
214
def _load_dirs(self):
215
assert self._lock_mode is not None
216
self._versioned_dirs = set()
218
self._ensure_versioned_dir(posixpath.dirname(p))
220
def _unversion_path(self, path):
221
assert self._lock_mode is not None
222
encoded_path = path.encode("utf-8")
224
del self.index[encoded_path]
226
# A directory, perhaps?
227
for p in list(self.index):
228
if p.startswith(encoded_path+"/"):
230
# FIXME: remove empty directories
232
@needs_tree_write_lock
233
def unversion(self, file_ids):
234
for file_id in file_ids:
235
path = self.id2path(file_id)
236
self._unversion_path(path)
239
def check_state(self):
240
"""Check that the working state is/isn't valid."""
243
@needs_tree_write_lock
244
def remove(self, files, verbose=False, to_file=None, keep_files=True,
246
"""Remove nominated files from the working tree metadata.
248
:param files: File paths relative to the basedir.
249
:param keep_files: If true, the files will also be kept.
250
:param force: Delete files and directories, even if they are changed
251
and even if the directories are not empty.
253
all_files = set() # specified and nested files
255
if isinstance(files, basestring):
261
files = list(all_files)
264
return # nothing to do
266
# Sort needed to first handle directory content before the directory
267
files.sort(reverse=True)
269
def backup(file_to_backup):
270
abs_path = self.abspath(file_to_backup)
271
backup_name = self.bzrdir._available_backup_name(file_to_backup)
272
osutils.rename(abs_path, self.abspath(backup_name))
273
return "removed %s (but kept a copy: %s)" % (
274
file_to_backup, backup_name)
277
fid = self.path2id(f)
279
message = "%s is not versioned." % (f,)
281
abs_path = self.abspath(f)
283
# having removed it, it must be either ignored or unknown
284
if self.is_ignored(f):
288
# XXX: Really should be a more abstract reporter interface
289
kind_ch = osutils.kind_marker(self.kind(fid))
290
to_file.write(new_status + ' ' + f + kind_ch + '\n')
292
# FIXME: _unversion_path() is O(size-of-index) for directories
293
self._unversion_path(f)
294
message = "removed %s" % (f,)
295
if osutils.lexists(abs_path):
296
if (osutils.isdir(abs_path) and
297
len(os.listdir(abs_path)) > 0):
299
osutils.rmtree(abs_path)
300
message = "deleted %s" % (f,)
305
osutils.delete_any(abs_path)
306
message = "deleted %s" % (f,)
308
# print only one message (if any) per file.
309
if message is not None:
313
def _add(self, files, ids, kinds):
314
for (path, file_id, kind) in zip(files, ids, kinds):
315
if file_id is not None:
316
self._fileid_map.set_file_id(path.encode("utf-8"), file_id)
318
file_id = self._fileid_map.lookup_file_id(path.encode("utf-8"))
319
self._index_add_entry(path, file_id, kind)
321
@needs_tree_write_lock
322
def smart_add(self, file_list, recurse=True, action=None, save=True):
326
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
327
abspath = self.abspath(filepath)
328
kind = osutils.file_kind(abspath)
329
if action is not None:
330
file_id = action(self, None, filepath, kind)
333
if kind in ("file", "symlink"):
335
self._index_add_entry(filepath, file_id, kind)
336
added.append(filepath)
337
elif kind == "directory":
339
user_dirs.append(filepath)
341
raise errors.BadFileKindError(filename=abspath, kind=kind)
342
for user_dir in user_dirs:
343
abs_user_dir = self.abspath(user_dir)
344
for name in os.listdir(abs_user_dir):
345
subp = os.path.join(user_dir, name)
346
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
348
ignore_glob = self.is_ignored(subp)
349
if ignore_glob is not None:
350
ignored.setdefault(ignore_glob, []).append(subp)
352
abspath = self.abspath(subp)
353
kind = osutils.file_kind(abspath)
354
if kind == "directory":
355
user_dirs.append(subp)
357
if action is not None:
358
file_id = action(self, None, filepath, kind)
362
self._index_add_entry(subp, file_id, kind)
365
return added, ignored
367
def _set_root_id(self, file_id):
368
self._fileid_map.set_file_id("", file_id)
370
@needs_tree_write_lock
371
def move(self, from_paths, to_dir=None, after=False):
373
to_abs = self.abspath(to_dir)
374
if not os.path.isdir(to_abs):
375
raise errors.BzrMoveFailedError('', to_dir,
376
errors.NotADirectory(to_abs))
378
for from_rel in from_paths:
379
from_tail = os.path.split(from_rel)[-1]
380
to_rel = os.path.join(to_dir, from_tail)
381
self.rename_one(from_rel, to_rel, after=after)
382
rename_tuples.append((from_rel, to_rel))
386
@needs_tree_write_lock
387
def rename_one(self, from_rel, to_rel, after=False):
388
from_path = from_rel.encode("utf-8")
389
to_path = to_rel.encode("utf-8")
390
if not self.has_filename(to_rel):
391
raise errors.BzrMoveFailedError(from_rel, to_rel,
392
errors.NoSuchFile(to_rel))
393
if not from_path in self.index:
394
raise errors.BzrMoveFailedError(from_rel, to_rel,
395
errors.NotVersionedError(path=from_rel))
397
os.rename(self.abspath(from_rel), self.abspath(to_rel))
398
self.index[to_path] = self.index[from_path]
399
del self.index[from_path]
402
def get_root_id(self):
403
return self.path2id("")
405
def _has_dir(self, path):
408
if self._versioned_dirs is None:
410
return path in self._versioned_dirs
413
def path2id(self, path):
414
encoded_path = path.encode("utf-8")
415
if self._is_versioned(encoded_path):
416
return self._fileid_map.lookup_file_id(encoded_path)
419
def _iter_files_recursive(self, from_dir=None):
422
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir)):
423
dir_relpath = dirpath[len(self.basedir):].strip("/")
424
if self.bzrdir.is_control_filename(dir_relpath):
426
for filename in filenames:
427
if not self.mapping.is_special_file(filename):
428
yield os.path.join(dir_relpath, filename)
90
432
"""Yield all unversioned files in this WorkingTree.
92
for (dirpath, dirnames, filenames) in os.walk(self.basedir):
93
if self.bzrdir.is_control_filename(dirpath[len(self.basedir):].strip("/")):
95
for filename in filenames:
96
relpath = os.path.join(dirpath[len(self.basedir):].strip("/"), filename)
97
if not relpath in self.index:
102
# non-implementation specific cleanup
105
# reverse order of locking.
107
return self._control_files.unlock()
111
def is_control_filename(self, path):
112
return os.path.basename(path) == ".git"
114
def _rewrite_index(self):
116
for path, entry in self._inventory.iter_entries():
117
if entry.kind == "directory":
118
# Git indexes don't contain directories
120
if entry.kind == "file":
123
file, stat_val = self.get_file_with_stat(entry.file_id, path)
124
except (errors.NoSuchFile, IOError):
125
# TODO: Rather than come up with something here, use the old index
127
from posix import stat_result
128
stat_val = stat_result((stat.S_IFREG | 0644, 0, 0, 0, 0, 0, 0, 0, 0, 0))
129
blob.set_raw_string(file.read())
130
elif entry.kind == "symlink":
133
stat_val = os.lstat(self.abspath(path))
134
except (errors.NoSuchFile, OSError):
135
# TODO: Rather than come up with something here, use the
137
from posix import stat_result
138
stat_val = stat_result((stat.S_IFLNK, 0, 0, 0, 0, 0, 0, 0, 0, 0))
139
blob.set_raw_string(entry.symlink_target)
141
raise AssertionError("unknown kind '%s'" % entry.kind)
142
# Add object to the repository if it didn't exist yet
143
if not blob.id in self.repository._git.object_store:
144
self.repository._git.object_store.add_object(blob)
145
# Add an entry to the index or update the existing entry
147
self.index[path.encode("utf-8")] = (stat_val.st_ctime, stat_val.st_mtime, stat_val.st_dev, stat_val.st_ino, stat_val.st_mode, stat_val.st_uid, stat_val.st_gid, stat_val.st_size, blob.id, flags)
434
return set(self._iter_files_recursive()) - set(self.index)
436
@needs_tree_write_lock
150
438
# TODO: Maybe this should only write on dirty ?
151
if self._control_files._lock_mode != 'w':
439
if self._lock_mode != 'w':
152
440
raise errors.NotWriteLocked(self)
153
self._rewrite_index()
154
441
self.index.write()
155
self._inventory_is_modified = False
445
for path in self.index:
446
yield self.path2id(path)
448
for path in self._versioned_dirs:
449
yield self.path2id(path)
451
def has_or_had_id(self, file_id):
452
if self.has_id(file_id):
454
if self.had_id(file_id):
458
def had_id(self, file_id):
459
path = self._basis_fileid_map.lookup_file_id(file_id)
461
head = self.repository._git.head()
463
# Assume no if basis is not accessible
467
root_tree = self.store[head].tree
469
tree_lookup_path(self.store.__getitem__, root_tree, path)
475
def has_id(self, file_id):
477
self.id2path(file_id)
478
except errors.NoSuchId:
484
def id2path(self, file_id):
485
assert type(file_id) is str, "file id not a string: %r" % file_id
486
file_id = osutils.safe_utf8(file_id)
487
path = self._fileid_map.lookup_path(file_id)
488
# FIXME: What about directories?
489
if self._is_versioned(path):
490
return path.decode("utf-8")
491
raise errors.NoSuchId(self, file_id)
493
def get_file_mtime(self, file_id, path=None):
494
"""See Tree.get_file_mtime."""
496
path = self.id2path(file_id)
497
return os.lstat(self.abspath(path)).st_mtime
157
499
def get_ignore_list(self):
158
500
ignoreset = getattr(self, '_ignoreset', None)
205
551
def revision_tree(self, revid):
206
552
return self.repository.revision_tree(revid)
554
def _is_versioned(self, path):
555
assert self._lock_mode is not None
556
return (path in self.index or self._has_dir(path))
558
def filter_unversioned_files(self, files):
559
return set([p for p in files if not self._is_versioned(p.encode("utf-8"))])
561
def _get_dir_ie(self, path, parent_id):
562
file_id = self.path2id(path)
563
return inventory.InventoryDirectory(file_id,
564
posixpath.basename(path).strip("/"), parent_id)
566
def _add_missing_parent_ids(self, path, dir_ids):
569
parent = posixpath.dirname(path).strip("/")
570
ret = self._add_missing_parent_ids(parent, dir_ids)
571
parent_id = dir_ids[parent]
572
ie = self._get_dir_ie(path, parent_id)
573
dir_ids[path] = ie.file_id
574
ret.append((path, ie))
577
def _get_file_ie(self, name, path, value, parent_id):
578
assert isinstance(name, unicode)
579
assert isinstance(path, unicode)
580
assert isinstance(value, tuple) and len(value) == 10
581
(ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) = value
582
file_id = self.path2id(path)
583
if type(file_id) != str:
585
kind = mode_kind(mode)
586
ie = inventory.entry_factory[kind](file_id, name, parent_id)
587
if kind == 'symlink':
588
ie.symlink_target = self.get_symlink_target(file_id)
590
data = self.get_file_text(file_id, path)
591
ie.text_sha1 = osutils.sha_string(data)
592
ie.text_size = len(data)
593
ie.executable = self.is_executable(file_id, path)
597
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
598
mode = stat_result.st_mode
599
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
602
def stored_kind(self, file_id, path=None):
604
path = self.id2path(file_id)
606
return mode_kind(self.index[path.encode("utf-8")][4])
608
# Maybe it's a directory?
609
if self._has_dir(path):
611
raise errors.NoSuchId(self, file_id)
613
if not osutils.supports_executable():
614
def is_executable(self, file_id, path=None):
615
basis_tree = self.basis_tree()
616
if file_id in basis_tree:
617
return basis_tree.is_executable(file_id)
618
# Default to not executable
621
def is_executable(self, file_id, path=None):
623
path = self.id2path(file_id)
624
mode = os.lstat(self.abspath(path)).st_mode
625
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
627
_is_executable_from_path_and_stat = \
628
_is_executable_from_path_and_stat_from_stat
631
def list_files(self, include_root=False, from_dir=None, recursive=True):
632
# FIXME: Yield non-versioned files
636
fk_entries = {'directory': workingtree.TreeDirectory,
637
'file': workingtree.TreeFile,
638
'symlink': workingtree.TreeLink}
639
root_ie = self._get_dir_ie(u"", None)
640
if include_root and not from_dir:
641
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
642
dir_ids[u""] = root_ie.file_id
644
path_iterator = self._iter_files_recursive(from_dir)
649
start = os.path.join(self.basedir, from_dir)
650
path_iterator = sorted([os.path.join(from_dir, name) for name in
651
os.listdir(start) if not self.bzrdir.is_control_filename(name)
652
and not self.mapping.is_special_file(name)])
653
for path in path_iterator:
655
value = self.index[path]
658
path = path.decode("utf-8")
659
parent, name = posixpath.split(path)
660
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
661
yield dir_path, "V", dir_ie.kind, dir_ie.file_id, dir_ie
662
if value is not None:
663
ie = self._get_file_ie(name, path, value, dir_ids[parent])
664
yield path, "V", ie.kind, ie.file_id, ie
666
kind = osutils.file_kind(self.abspath(path))
667
ie = fk_entries[kind]()
668
yield path, "?", kind, None, ie
671
def all_file_ids(self):
672
ids = {u"": self.path2id("")}
673
for path in self.index:
674
if self.mapping.is_special_file(path):
676
path = path.decode("utf-8")
677
parent = posixpath.dirname(path).strip("/")
678
for e in self._add_missing_parent_ids(parent, ids):
680
ids[path] = self.path2id(path)
681
return set(ids.values())
683
def _directory_is_tree_reference(self, path):
684
# FIXME: Check .gitsubmodules for path
688
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
689
# FIXME: Is return order correct?
691
raise NotImplementedError(self.iter_entries_by_dir)
692
if specific_file_ids is not None:
693
specific_paths = [self.id2path(file_id) for file_id in specific_file_ids]
694
if specific_paths in ([u""], []):
695
specific_paths = None
697
specific_paths = set(specific_paths)
699
specific_paths = None
700
root_ie = self._get_dir_ie(u"", None)
701
if specific_paths is None:
703
dir_ids = {u"": root_ie.file_id}
704
for path, value in self.index.iteritems():
705
if self.mapping.is_special_file(path):
707
path = path.decode("utf-8")
708
if specific_paths is not None and not path in specific_paths:
710
(parent, name) = posixpath.split(path)
712
file_ie = self._get_file_ie(name, path, value, None)
715
for (dir_path, dir_ie) in self._add_missing_parent_ids(parent,
717
yield dir_path, dir_ie
718
file_ie.parent_id = self.path2id(parent)
209
722
def conflicts(self):
724
return _mod_conflicts.ConflictList()
726
def update_basis_by_delta(self, new_revid, delta):
727
# The index just contains content, which won't have changed.
731
def get_canonical_inventory_path(self, path):
733
if p.lower() == path.lower():
739
def _walkdirs(self, prefix=""):
742
per_dir = defaultdict(list)
743
for path, value in self.index.iteritems():
744
if self.mapping.is_special_file(path):
746
if not path.startswith(prefix):
748
(dirname, child_name) = posixpath.split(path)
749
dirname = dirname.decode("utf-8")
750
dir_file_id = self.path2id(dirname)
751
assert isinstance(value, tuple) and len(value) == 10
752
(ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) = value
753
stat_result = os.stat_result((mode, ino,
754
dev, 1, uid, gid, size,
756
per_dir[(dirname, dir_file_id)].append(
757
(path.decode("utf-8"), child_name.decode("utf-8"),
758
mode_kind(mode), stat_result,
759
self.path2id(path.decode("utf-8")),
761
return per_dir.iteritems()
214
764
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
766
_tree_class = GitWorkingTree
768
supports_versioned_directories = False
217
771
def _matchingbzrdir(self):
218
from bzrlib.plugins.git import LocalGitControlDirFormat
772
from bzrlib.plugins.git.dir import LocalGitControlDirFormat
219
773
return LocalGitControlDirFormat()
221
775
def get_format_description(self):
222
776
return "Git Working Tree"
778
def initialize(self, a_bzrdir, revision_id=None, from_branch=None,
779
accelerator_tree=None, hardlink=False):
780
"""See WorkingTreeFormat.initialize()."""
781
if not isinstance(a_bzrdir, LocalGitDir):
782
raise errors.IncompatibleFormat(self, a_bzrdir)
783
index = Index(a_bzrdir.root_transport.local_abspath(".git/index"))
785
return GitWorkingTree(a_bzrdir, a_bzrdir.open_repository(),
786
a_bzrdir.open_branch(), index)
225
789
class InterIndexGitTree(tree.InterTree):
226
790
"""InterTree that works between a Git revision tree and an index."""
228
792
def __init__(self, source, target):
229
793
super(InterIndexGitTree, self).__init__(source, target)
794
assert target.is_locked()
230
795
self._index = target.index
238
803
def compare(self, want_unchanged=False, specific_files=None,
239
804
extra_trees=None, require_versioned=False, include_root=False,
240
805
want_unversioned=False):
241
changes = self._index.changes_from_tree(
242
self.source._repository._git.object_store, self.source.tree,
243
want_unchanged=want_unchanged)
244
source_fileid_map = self.source.mapping.get_fileid_map(
245
self.source._repository._git.object_store.__getitem__,
247
if self.target.mapping.BZR_FILE_IDS_FILE is not None:
248
file_id = self.target.path2id(
249
self.target.mapping.BZR_FILE_IDS_FILE)
251
target_fileid_map = {}
253
target_fileid_map = self.target.mapping.import_fileid_map(Blob.from_string(self.target.get_file_text(file_id)))
255
target_fileid_map = {}
256
target_fileid_map = GitFileIdMap(target_fileid_map, self.target.mapping)
806
# FIXME: Handle include_root
807
changes = changes_between_git_tree_and_index(
808
self.source.store, self.source.tree,
809
self.target.basedir, self.target.index,
810
want_unchanged=want_unchanged,
811
want_unversioned=want_unversioned)
812
source_fileid_map = self.source._fileid_map
813
target_fileid_map = self.target._fileid_map
257
814
ret = tree_delta_from_git_changes(changes, self.target.mapping,
258
815
(source_fileid_map, target_fileid_map),
259
816
specific_file=specific_files, require_versioned=require_versioned)
260
817
if want_unversioned:
261
818
for e in self.target.extras():
262
ret.unversioned.append((e, None, osutils.file_kind(self.target.abspath(e))))
819
ret.unversioned.append((e, None,
820
osutils.file_kind(self.target.abspath(e))))
265
823
def iter_changes(self, include_unchanged=False, specific_files=None,
266
pb=None, extra_trees=[], require_versioned=True, want_unversioned=False):
267
changes = self._index.changes_from_tree(
268
self.source._repository._git.object_store, self.source.tree,
269
want_unchanged=include_unchanged)
270
# FIXME: Handle want_unversioned
271
return changes_from_git_changes(changes, self.target.mapping,
824
pb=None, extra_trees=[], require_versioned=True,
825
want_unversioned=False):
826
changes = changes_between_git_tree_and_index(
827
self.source.store, self.source.tree,
828
self.target.basedir, self.target.index,
829
want_unchanged=include_unchanged,
830
want_unversioned=want_unversioned)
831
return changes_from_git_changes(changes, self.target.mapping,
272
832
specific_file=specific_files)
274
835
tree.InterTree.register_optimiser(InterIndexGitTree)
838
def changes_between_git_tree_and_index(object_store, tree, base_path, index,
839
want_unchanged=False, want_unversioned=False, update_index=False):
840
"""Determine the changes between a git tree and a working tree with index.
843
names = index._byname.keys()
844
def lookup_entry(path):
846
index_mode = entry[-6]
847
index_sha = entry[-2]
848
disk_path = os.path.join(base_path, path)
849
disk_stat = os.lstat(disk_path)
850
disk_mtime = disk_stat.st_mtime
851
if isinstance(entry[1], tuple):
852
index_mtime = entry[1][0]
854
index_mtime = int(entry[1])
855
mtime_delta = (index_mtime - disk_mtime)
856
disk_mode = cleanup_mode(disk_stat.st_mode)
857
if (mtime_delta > 0 or
858
disk_mode != index_mode):
859
with open(disk_path, 'r') as f:
860
blob = Blob.from_string(f.read())
863
index[path] = index_entry_from_stat(disk_stat, blob.id, flags)
864
return (blob.id, disk_mode)
865
return (index_sha, index_mode)
866
for (name, mode, sha) in changes_from_tree(names, lookup_entry,
867
object_store, tree, want_unchanged=want_unchanged):
868
yield (name, mode, sha)