120
120
raise AssertionError('How is both old and new None?')
121
121
change = (file_id,
122
(old_path, new_path),
122
(old_path, new_path),
130
130
change = (file_id,
131
(old_path, new_path),
134
(None, ie.parent_id),
137
(None, ie.executable),
131
(old_path, new_path),
134
(None, ie.parent_id),
137
(None, ie.executable),
141
141
change = (file_id,
142
(old_path, new_path),
145
(old_ie.parent_id, None),
148
(old_ie.executable, None),
142
(old_path, new_path),
145
(old_ie.parent_id, None),
148
(old_ie.executable, None),
151
content_modified = (ie.text_sha1 != old_ie.text_sha1
152
or ie.text_size != old_ie.text_size)
151
content_modified = (ie.text_sha1 != old_ie.text_sha1 or
152
ie.text_size != old_ie.text_size)
153
153
# TODO: ie.kind != old_ie.kind
154
154
# TODO: symlinks changing targets, content_modified?
155
155
change = (file_id,
156
(old_path, new_path),
159
(old_ie.parent_id, ie.parent_id),
160
(old_ie.name, ie.name),
161
(old_ie.kind, ie.kind),
162
(old_ie.executable, ie.executable),
156
(old_path, new_path),
159
(old_ie.parent_id, ie.parent_id),
160
(old_ie.name, ie.name),
161
(old_ie.kind, ie.kind),
162
(old_ie.executable, ie.executable),
212
212
inv.id_to_entry = chk_map.CHKMap(chk_store, None, search_key_func)
213
213
inv.id_to_entry._root_node.set_maximum_size(maximum_size)
214
214
inv.parent_id_basename_to_file_id = chk_map.CHKMap(chk_store,
215
None, search_key_func)
215
None, search_key_func)
216
216
inv.parent_id_basename_to_file_id._root_node.set_maximum_size(
218
218
inv.parent_id_basename_to_file_id._root_node._key_width = 2
251
251
# new write group. We want one write group around a batch of imports
252
252
# where the default batch size is currently 10000. IGC 20090312
253
253
self._commit_builder = self.repo._commit_builder_class(self.repo,
254
parents, config, timestamp=revision.timestamp,
255
timezone=revision.timezone, committer=revision.committer,
256
revprops=revision.properties, revision_id=revision.revision_id)
254
parents, config, timestamp=revision.timestamp,
255
timezone=revision.timezone, committer=revision.committer,
256
revprops=revision.properties, revision_id=revision.revision_id)
258
258
def get_parents_and_revision_for_entry(self, ie):
259
259
"""Get the parents and revision for an inventory entry.
266
266
# Check for correct API usage
267
267
if self._current_rev_id is None:
268
268
raise AssertionError("start_new_revision() must be called"
269
" before get_parents_and_revision_for_entry()")
269
" before get_parents_and_revision_for_entry()")
270
270
if ie.revision != self._current_rev_id:
271
271
raise AssertionError("start_new_revision() registered a different"
272
" revision (%s) to that in the inventory entry (%s)" %
273
(self._current_rev_id, ie.revision))
272
" revision (%s) to that in the inventory entry (%s)" %
273
(self._current_rev_id, ie.revision))
275
275
# Find the heads. This code is lifted from
276
276
# repository.CommitBuilder.record_entry_contents().
277
277
parent_candidate_entries = ie.parent_candidates(self._rev_parent_invs)
278
278
head_set = self._commit_builder._heads(ie.file_id,
279
list(parent_candidate_entries))
279
list(parent_candidate_entries))
281
281
for inv in self._rev_parent_invs:
298
298
if len(heads) > 1:
300
elif (parent_entry.name != ie.name or parent_entry.kind != ie.kind or
301
parent_entry.parent_id != ie.parent_id):
300
elif (parent_entry.name != ie.name or parent_entry.kind != ie.kind
301
or parent_entry.parent_id != ie.parent_id):
303
303
elif ie.kind == 'file':
304
if (parent_entry.text_sha1 != ie.text_sha1 or
305
parent_entry.executable != ie.executable):
304
if (parent_entry.text_sha1 != ie.text_sha1
305
or parent_entry.executable != ie.executable):
307
307
elif ie.kind == 'symlink':
308
308
if parent_entry.symlink_target != ie.symlink_target:
314
314
return tuple(heads), rev_id
316
316
def load_using_delta(self, rev, basis_inv, inv_delta, signature,
317
text_provider, parents_provider, inventories_provider=None):
317
text_provider, parents_provider, inventories_provider=None):
318
318
"""Load a revision by applying a delta to a (CHK)Inventory.
320
320
:param rev: the Revision
335
335
# TODO: set revision_id = rev.revision_id
336
336
builder = self.repo._commit_builder_class(self.repo,
337
parents=rev.parent_ids, config=None, timestamp=rev.timestamp,
338
timezone=rev.timezone, committer=rev.committer,
339
revprops=rev.properties, revision_id=rev.revision_id)
337
parents=rev.parent_ids, config=None, timestamp=rev.timestamp,
338
timezone=rev.timezone, committer=rev.committer,
339
revprops=rev.properties, revision_id=rev.revision_id)
340
340
if self._graph is None and self._use_known_graph:
341
if (getattr(_mod_graph, 'GraphThunkIdsToKeys', None) and
342
getattr(_mod_graph.GraphThunkIdsToKeys, "add_node", None) and
343
getattr(self.repo, "get_known_graph_ancestry", None)):
341
if (getattr(_mod_graph, 'GraphThunkIdsToKeys', None)
342
and getattr(_mod_graph.GraphThunkIdsToKeys, "add_node", None)
343
and getattr(self.repo, "get_known_graph_ancestry", None)):
344
344
self._graph = self.repo.get_known_graph_ancestry(
347
347
self._use_known_graph = False
348
348
if self._graph is not None:
349
349
orig_heads = builder._heads
350
351
def thunked_heads(file_id, revision_ids):
351
352
# self._graph thinks in terms of keys, not ids, so translate
383
384
rev.inv_sha1 = builder.inv_sha1
384
385
config = builder._config_stack
385
386
builder.repository.add_revision(builder._new_revision_id, rev,
386
builder.revision_tree().root_inventory)
387
builder.revision_tree().root_inventory)
387
388
if self._graph is not None:
388
389
# TODO: Use StaticTuple and .intern() for these things
389
390
self._graph.add_node(builder._new_revision_id, rev.parent_ids)
396
397
def get_file_lines(self, revision_id, file_id):
397
398
record = next(self.repo.texts.get_record_stream([(file_id, revision_id)],
399
400
if record.storage_kind == 'absent':
400
401
raise errors.RevisionNotPresent(record.key, self.repo)
401
402
return osutils.split_lines(record.get_bytes_as('fulltext'))