104
109
# the default CommitBuilder does not manage trees whose root is versioned.
105
110
_versioned_root = False
107
def __init__(self, repository, parents, config, timestamp=None,
112
def __init__(self, repository, parents, config_stack, timestamp=None,
108
113
timezone=None, committer=None, revprops=None,
109
114
revision_id=None, lossy=False):
110
115
super(VersionedFileCommitBuilder, self).__init__(repository,
111
parents, config, timestamp, timezone, committer, revprops,
116
parents, config_stack, timestamp, timezone, committer, revprops,
112
117
revision_id, lossy)
114
119
basis_id = self.parents[0]
195
200
revision_id=self._new_revision_id,
196
201
properties=self._revprops)
197
202
rev.parent_ids = self.parents
198
self.repository.add_revision(self._new_revision_id, rev,
199
self.new_inventory, self._config)
203
if self._config_stack.get('create_signatures') == _mod_config.SIGN_ALWAYS:
204
testament = Testament(rev, self.revision_tree())
205
plaintext = testament.as_short_text()
206
self.repository.store_revision_signature(
207
gpg.GPGStrategy(self._config_stack), plaintext,
208
self._new_revision_id)
209
self.repository._add_revision(rev)
200
210
self._ensure_fallback_inventories()
201
211
self.repository.commit_write_group()
202
212
return self._new_revision_id
1028
1038
self.inventories._access.flush()
1031
def add_revision(self, revision_id, rev, inv=None, config=None):
1041
def add_revision(self, revision_id, rev, inv=None):
1032
1042
"""Add rev to the revision store as revision_id.
1034
1044
:param revision_id: the revision id to use.
1035
1045
:param rev: The revision object.
1036
1046
:param inv: The inventory for the revision. if None, it will be looked
1037
1047
up in the inventory storer
1038
:param config: If None no digital signature will be created.
1039
If supplied its signature_needed method will be used
1040
to determine if a signature should be made.
1042
1049
# TODO: jam 20070210 Shouldn't we check rev.revision_id and
1043
1050
# rev.parent_ids?
1044
1051
_mod_revision.check_not_reserved_id(revision_id)
1045
if config is not None and config.signature_needed():
1047
inv = self.get_inventory(revision_id)
1048
tree = InventoryRevisionTree(self, inv, revision_id)
1049
testament = Testament(rev, tree)
1050
plaintext = testament.as_short_text()
1051
self.store_revision_signature(
1052
gpg.GPGStrategy(config), plaintext, revision_id)
1053
1052
# check inventory present
1054
1053
if not self.inventories.get_parent_map([(revision_id,)]):
1055
1054
if inv is None:
1199
1198
"""Instantiate a VersionedFileRepository.
1201
1200
:param _format: The format of the repository on disk.
1202
:param a_bzrdir: The BzrDir of the repository.
1201
:param controldir: The ControlDir of the repository.
1203
1202
:param control_files: Control files to use for locking, etc.
1205
1204
# In the future we will have a single api for all stores for
1219
1218
# rather copying them?
1220
1219
self._safe_to_return_from_cache = False
1221
def fetch(self, source, revision_id=None, find_ghosts=False,
1223
"""Fetch the content required to construct revision_id from source.
1225
If revision_id is None and fetch_spec is None, then all content is
1228
fetch() may not be used when the repository is in a write group -
1229
either finish the current write group before using fetch, or use
1230
fetch before starting the write group.
1232
:param find_ghosts: Find and copy revisions in the source that are
1233
ghosts in the target (and not reachable directly by walking out to
1234
the first-present revision in target from revision_id).
1235
:param revision_id: If specified, all the content needed for this
1236
revision ID will be copied to the target. Fetch will determine for
1237
itself which content needs to be copied.
1238
:param fetch_spec: If specified, a SearchResult or
1239
PendingAncestryResult that describes which revisions to copy. This
1240
allows copying multiple heads at once. Mutually exclusive with
1243
if fetch_spec is not None and revision_id is not None:
1244
raise AssertionError(
1245
"fetch_spec and revision_id are mutually exclusive.")
1246
if self.is_in_write_group():
1247
raise errors.InternalBzrError(
1248
"May not fetch while in a write group.")
1249
# fast path same-url fetch operations
1250
# TODO: lift out to somewhere common with RemoteRepository
1251
# <https://bugs.launchpad.net/bzr/+bug/401646>
1252
if (self.has_same_location(source)
1253
and fetch_spec is None
1254
and self._has_same_fallbacks(source)):
1255
# check that last_revision is in 'from' and then return a
1257
if (revision_id is not None and
1258
not _mod_revision.is_null(revision_id)):
1259
self.get_revision(revision_id)
1261
inter = InterRepository.get(source, self)
1262
if (fetch_spec is not None and
1263
not getattr(inter, "supports_fetch_spec", False)):
1264
raise errors.UnsupportedOperation(
1265
"fetch_spec not supported for %r" % inter)
1266
return inter.fetch(revision_id=revision_id,
1267
find_ghosts=find_ghosts, fetch_spec=fetch_spec)
1222
1269
@needs_read_lock
1223
1270
def gather_stats(self, revid=None, committers=None):
1224
1271
"""See Repository.gather_stats()."""
1233
1280
# result['size'] = t
1236
def get_commit_builder(self, branch, parents, config, timestamp=None,
1283
def get_commit_builder(self, branch, parents, config_stack, timestamp=None,
1237
1284
timezone=None, committer=None, revprops=None,
1238
1285
revision_id=None, lossy=False):
1239
1286
"""Obtain a CommitBuilder for this repository.
1241
1288
:param branch: Branch to commit to.
1242
1289
:param parents: Revision ids of the parents of the new revision.
1243
:param config: Configuration to use.
1290
:param config_stack: Configuration stack to use.
1244
1291
:param timestamp: Optional timestamp recorded for commit.
1245
1292
:param timezone: Optional timezone for timestamp.
1246
1293
:param committer: Optional committer to set for commit.
1253
1300
raise errors.BzrError("Cannot commit directly to a stacked branch"
1254
1301
" in pre-2a formats. See "
1255
1302
"https://bugs.launchpad.net/bzr/+bug/375013 for details.")
1256
result = self._commit_builder_class(self, parents, config,
1303
result = self._commit_builder_class(self, parents, config_stack,
1257
1304
timestamp, timezone, committer, revprops, revision_id,
1259
1306
self.start_write_group()
1515
1562
text_keys[(file_id, revision_id)] = callable_data
1516
1563
for record in self.texts.get_record_stream(text_keys, 'unordered', True):
1517
1564
if record.storage_kind == 'absent':
1518
raise errors.RevisionNotPresent(record.key, self)
1565
raise errors.RevisionNotPresent(record.key[1], record.key[0])
1519
1566
yield text_keys[record.key], record.get_bytes_as('chunked')
1521
1568
def _generate_text_key_index(self, text_key_references=None,
1699
1746
if ((None in revision_ids)
1700
1747
or (_mod_revision.NULL_REVISION in revision_ids)):
1701
1748
raise ValueError('cannot get null revision inventory')
1702
return self._iter_inventories(revision_ids, ordering)
1749
for inv, revid in self._iter_inventories(revision_ids, ordering):
1751
raise errors.NoSuchRevision(self, revid)
1704
1754
def _iter_inventories(self, revision_ids, ordering):
1705
1755
"""single-document based inventory iteration."""
1706
1756
inv_xmls = self._iter_inventory_xmls(revision_ids, ordering)
1707
1757
for text, revision_id in inv_xmls:
1708
yield self._deserialise_inventory(revision_id, text)
1759
yield None, revision_id
1761
yield self._deserialise_inventory(revision_id, text), revision_id
1710
1763
def _iter_inventory_xmls(self, revision_ids, ordering):
1711
1764
if ordering is None:
1730
1783
yield ''.join(chunks), record.key[-1]
1732
raise errors.NoSuchRevision(self, record.key)
1785
yield None, record.key[-1]
1733
1786
if order_as_requested:
1734
1787
# Yield as many results as we can while preserving order.
1735
1788
while next_key in text_chunks:
1764
1817
def _get_inventory_xml(self, revision_id):
1765
1818
"""Get serialized inventory as a string."""
1766
1819
texts = self._iter_inventory_xmls([revision_id], 'unordered')
1768
text, revision_id = texts.next()
1769
except StopIteration:
1770
raise errors.HistoryMissing(self, 'inventory', revision_id)
1820
text, revision_id = texts.next()
1822
raise errors.NoSuchRevision(self, revision_id)
1773
1825
@needs_read_lock
1848
1900
"""Return the graph walker for text revisions."""
1849
1901
return graph.Graph(self.texts)
1903
def revision_ids_to_search_result(self, result_set):
1904
"""Convert a set of revision ids to a graph SearchResult."""
1905
result_parents = set()
1906
for parents in self.get_graph().get_parent_map(
1907
result_set).itervalues():
1908
result_parents.update(parents)
1909
included_keys = result_set.intersection(result_parents)
1910
start_keys = result_set.difference(included_keys)
1911
exclude_keys = result_parents.difference(result_set)
1912
result = vf_search.SearchResult(start_keys, exclude_keys,
1913
len(result_set), result_set)
1851
1916
def _get_versioned_file_checker(self, text_key_references=None,
1852
1917
ancestors=None):
1853
1918
"""Return an object suitable for checking versioned files.
2570
2637
searcher.stop_searching_any(stop_revs)
2571
2638
if searcher_exhausted:
2573
return searcher.get_result()
2640
(started_keys, excludes, included_keys) = searcher.get_state()
2641
return vf_search.SearchResult(started_keys, excludes,
2642
len(included_keys), included_keys)
2575
2644
@needs_read_lock
2576
2645
def search_missing_revision_ids(self,