13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
17
from bzrlib.lazy_import import lazy_import
18
18
lazy_import(globals(), """
19
19
from bzrlib import (
22
from bzrlib.store import revision
23
from bzrlib.store.revision.knit import KnitRevisionStore
26
revision as _mod_revision,
38
39
from bzrlib.decorators import needs_read_lock, needs_write_lock
39
40
from bzrlib.repository import (
77
class _KnitsParentsProvider(object):
79
def __init__(self, knit, prefix=()):
80
"""Create a parent provider for string keys mapped to tuple keys."""
85
return 'KnitsParentsProvider(%r)' % self._knit
87
def get_parent_map(self, keys):
88
"""See graph.StackedParentsProvider.get_parent_map"""
89
parent_map = self._knit.get_parent_map(
90
[self._prefix + (key,) for key in keys])
92
for key, parents in parent_map.items():
95
parents = (_mod_revision.NULL_REVISION,)
97
parents = tuple(parent[-1] for parent in parents)
98
result[revid] = parents
99
for revision_id in keys:
100
if revision_id == _mod_revision.NULL_REVISION:
101
result[revision_id] = ()
105
88
class KnitRepository(MetaDirRepository):
106
89
"""Knit format repository."""
112
95
_commit_builder_class = None
113
96
_serializer = None
115
def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
117
MetaDirRepository.__init__(self, _format, a_bzrdir, control_files)
98
def __init__(self, _format, a_bzrdir, control_files, _revision_store,
99
control_store, text_store, _commit_builder_class, _serializer):
100
MetaDirRepository.__init__(self, _format, a_bzrdir, control_files,
101
_revision_store, control_store, text_store)
118
102
self._commit_builder_class = _commit_builder_class
119
103
self._serializer = _serializer
120
104
self._reconcile_fixes_text_parents = True
105
control_store.get_scope = self.get_transaction
106
text_store.get_scope = self.get_transaction
107
_revision_store.get_scope = self.get_transaction
109
def _warn_if_deprecated(self):
110
# This class isn't deprecated
113
def _inventory_add_lines(self, inv_vf, revid, parents, lines, check_content):
114
return inv_vf.add_lines_with_ghosts(revid, parents, lines,
115
check_content=check_content)[0]
123
118
def _all_revision_ids(self):
124
119
"""See Repository.all_revision_ids()."""
125
return [key[0] for key in self.revisions.keys()]
127
def _activate_new_inventory(self):
128
"""Put a replacement inventory.new into use as inventories."""
129
# Copy the content across
131
t.copy('inventory.new.kndx', 'inventory.kndx')
133
t.copy('inventory.new.knit', 'inventory.knit')
134
except errors.NoSuchFile:
135
# empty inventories knit
136
t.delete('inventory.knit')
137
# delete the temp inventory
138
t.delete('inventory.new.kndx')
140
t.delete('inventory.new.knit')
141
except errors.NoSuchFile:
142
# empty inventories knit
144
# Force index reload (sanity check)
145
self.inventories._index._reset_cache()
146
self.inventories.keys()
148
def _backup_inventory(self):
150
t.copy('inventory.kndx', 'inventory.backup.kndx')
151
t.copy('inventory.knit', 'inventory.backup.knit')
153
def _move_file_id(self, from_id, to_id):
154
t = self._transport.clone('knits')
155
from_rel_url = self.texts._index._mapper.map((from_id, None))
156
to_rel_url = self.texts._index._mapper.map((to_id, None))
157
# We expect both files to always exist in this case.
158
for suffix in ('.knit', '.kndx'):
159
t.rename(from_rel_url + suffix, to_rel_url + suffix)
161
def _remove_file_id(self, file_id):
162
t = self._transport.clone('knits')
163
rel_url = self.texts._index._mapper.map((file_id, None))
164
for suffix in ('.kndx', '.knit'):
166
t.delete(rel_url + suffix)
167
except errors.NoSuchFile:
170
def _temp_inventories(self):
171
result = self._format._get_inventories(self._transport, self,
173
# Reconciling when the output has no revisions would result in no
174
# writes - but we want to ensure there is an inventory for
175
# compatibility with older clients that don't lazy-load.
176
result.get_parent_map([('A',)])
120
# Knits get the revision graph from the index of the revision knit, so
121
# it's always possible even if they're on an unlistable transport.
122
return self._revision_store.all_revision_ids(self.get_transaction())
179
124
def fileid_involved_between_revs(self, from_revid, to_revid):
180
125
"""Find file_id(s) which are involved in the changes between revisions.
202
147
return self._fileid_involved_by_set(changed)
150
def get_ancestry(self, revision_id, topo_sorted=True):
151
"""Return a list of revision-ids integrated by a revision.
153
This is topologically sorted, unless 'topo_sorted' is specified as
156
if _mod_revision.is_null(revision_id):
158
vf = self._get_revision_vf()
160
return [None] + vf.get_ancestry(revision_id, topo_sorted)
161
except errors.RevisionNotPresent:
162
raise errors.NoSuchRevision(self, revision_id)
164
@symbol_versioning.deprecated_method(symbol_versioning.one_two)
165
def get_data_stream(self, revision_ids):
166
"""See Repository.get_data_stream.
168
Deprecated in 1.2 for get_data_stream_for_search.
170
search_result = self.revision_ids_to_search_result(set(revision_ids))
171
return self.get_data_stream_for_search(search_result)
173
def get_data_stream_for_search(self, search):
174
"""See Repository.get_data_stream_for_search."""
175
item_keys = self.item_keys_introduced_by(search.get_keys())
176
for knit_kind, file_id, versions in item_keys:
178
if knit_kind == 'file':
179
name = ('file', file_id)
180
knit = self.weave_store.get_weave_or_empty(
181
file_id, self.get_transaction())
182
elif knit_kind == 'inventory':
183
knit = self.get_inventory_weave()
184
elif knit_kind == 'revisions':
185
knit = self._revision_store.get_revision_file(
186
self.get_transaction())
187
elif knit_kind == 'signatures':
188
knit = self._revision_store.get_signature_file(
189
self.get_transaction())
191
raise AssertionError('Unknown knit kind %r' % (knit_kind,))
192
yield name, _get_stream_as_bytes(knit, versions)
205
195
def get_revision(self, revision_id):
206
196
"""Return the Revision object for a named revision"""
207
197
revision_id = osutils.safe_revision_id(revision_id)
208
198
return self.get_revision_reconcile(revision_id)
210
def _refresh_data(self):
211
if not self.is_locked():
213
# Create a new transaction to force all knits to see the scope change.
214
# This is safe because we're outside a write group.
215
self.control_files._finish_transaction()
216
if self.is_write_locked():
217
self.control_files._set_write_transaction()
219
self.control_files._set_read_transaction()
200
def _get_revision_vf(self):
201
""":return: a versioned file containing the revisions."""
202
vf = self._revision_store.get_revision_file(self.get_transaction())
205
def has_revisions(self, revision_ids):
206
"""See Repository.has_revisions()."""
208
transaction = self.get_transaction()
209
for revision_id in revision_ids:
210
if self._revision_store.has_revision_id(revision_id, transaction):
211
result.add(revision_id)
221
214
@needs_write_lock
222
215
def reconcile(self, other=None, thorough=False):
225
218
reconciler = KnitReconciler(self, thorough=thorough)
226
219
reconciler.reconcile()
227
220
return reconciler
222
@symbol_versioning.deprecated_method(symbol_versioning.one_five)
223
def revision_parents(self, revision_id):
224
return self._get_revision_vf().get_parents(revision_id)
229
226
def _make_parents_provider(self):
230
return _KnitsParentsProvider(self.revisions)
227
return _KnitParentsProvider(self._get_revision_vf())
232
def _find_inconsistent_revision_parents(self, revisions_iterator=None):
229
def _find_inconsistent_revision_parents(self):
233
230
"""Find revisions with different parent lists in the revision object
234
231
and in the index graph.
236
:param revisions_iterator: None, or an iterator of (revid,
237
Revision-or-None). This iterator controls the revisions checked.
238
233
:returns: an iterator yielding tuples of (revison-id, parents-in-index,
239
234
parents-in-revision).
241
236
if not self.is_locked():
242
237
raise AssertionError()
244
if revisions_iterator is None:
245
revisions_iterator = self._iter_revisions(None)
246
for revid, revision in revisions_iterator:
249
parent_map = vf.get_parent_map([(revid,)])
250
parents_according_to_index = tuple(parent[-1] for parent in
251
parent_map[(revid,)])
238
vf = self._get_revision_vf()
239
for index_version in vf.versions():
240
parents_according_to_index = tuple(vf.get_parents_with_ghosts(
242
revision = self.get_revision(index_version)
252
243
parents_according_to_revision = tuple(revision.parent_ids)
253
244
if parents_according_to_index != parents_according_to_revision:
254
yield (revid, parents_according_to_index,
245
yield (index_version, parents_according_to_index,
255
246
parents_according_to_revision)
257
248
def _check_for_inconsistent_revision_parents(self):
289
280
_commit_builder_class = None
290
281
# Set this attribute in derived clases to control the _serializer that the
291
282
# repository objects will have passed to their constructor.
293
def _serializer(self):
294
return xml5.serializer_v5
283
_serializer = xml5.serializer_v5
295
284
# Knit based repositories handle ghosts reasonably well.
296
285
supports_ghosts = True
297
286
# External lookups are not supported in this format.
298
287
supports_external_lookups = False
300
supports_chks = False
301
_fetch_order = 'topological'
302
_fetch_uses_deltas = True
305
def _get_inventories(self, repo_transport, repo, name='inventory'):
306
mapper = versionedfile.ConstantMapper(name)
307
index = _mod_knit._KndxIndex(repo_transport, mapper,
308
repo.get_transaction, repo.is_write_locked, repo.is_locked)
309
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
310
return _mod_knit.KnitVersionedFiles(index, access, annotated=False)
312
def _get_revisions(self, repo_transport, repo):
313
mapper = versionedfile.ConstantMapper('revisions')
314
index = _mod_knit._KndxIndex(repo_transport, mapper,
315
repo.get_transaction, repo.is_write_locked, repo.is_locked)
316
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
317
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=0,
320
def _get_signatures(self, repo_transport, repo):
321
mapper = versionedfile.ConstantMapper('signatures')
322
index = _mod_knit._KndxIndex(repo_transport, mapper,
323
repo.get_transaction, repo.is_write_locked, repo.is_locked)
324
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
325
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=0,
328
def _get_texts(self, repo_transport, repo):
329
mapper = versionedfile.HashEscapedPrefixMapper()
330
base_transport = repo_transport.clone('knits')
331
index = _mod_knit._KndxIndex(base_transport, mapper,
332
repo.get_transaction, repo.is_write_locked, repo.is_locked)
333
access = _mod_knit._KnitKeyAccess(base_transport, mapper)
334
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=200,
289
def _get_control_store(self, repo_transport, control_files):
290
"""Return the control store for this repository."""
291
return VersionedFileStore(
294
file_mode=control_files._file_mode,
295
versionedfile_class=knit.make_file_knit,
296
versionedfile_kwargs={'factory':knit.KnitPlainFactory()},
299
def _get_revision_store(self, repo_transport, control_files):
300
"""See RepositoryFormat._get_revision_store()."""
301
versioned_file_store = VersionedFileStore(
303
file_mode=control_files._file_mode,
306
versionedfile_class=knit.make_file_knit,
307
versionedfile_kwargs={'delta':False,
308
'factory':knit.KnitPlainFactory(),
312
return KnitRevisionStore(versioned_file_store)
314
def _get_text_store(self, transport, control_files):
315
"""See RepositoryFormat._get_text_store()."""
316
return self._get_versioned_file_store('knits',
319
versionedfile_class=knit.make_file_knit,
320
versionedfile_kwargs={
321
'create_parent_dir':True,
323
'dir_mode':control_files._dir_mode,
337
327
def initialize(self, a_bzrdir, shared=False):
338
328
"""Create a knit format 1 repository.
342
332
:param shared: If true the repository will be initialized as a shared
345
trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
335
mutter('creating repository in %s.', a_bzrdir.transport.base)
348
338
utf8_files = [('format', self.get_format_string())]
350
340
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
351
341
repo_transport = a_bzrdir.get_repository_transport(None)
352
342
control_files = lockable_files.LockableFiles(repo_transport,
353
343
'lock', lockdir.LockDir)
344
control_store = self._get_control_store(repo_transport, control_files)
354
345
transaction = transactions.WriteTransaction()
355
result = self.open(a_bzrdir=a_bzrdir, _found=True)
346
# trigger a write of the inventory store.
347
control_store.get_weave_or_empty('inventory', transaction)
348
_revision_store = self._get_revision_store(repo_transport, control_files)
357
349
# the revision id here is irrelevant: it will not be stored, and cannot
358
# already exist, we do this to create files on disk for older clients.
359
result.inventories.get_parent_map([('A',)])
360
result.revisions.get_parent_map([('A',)])
361
result.signatures.get_parent_map([('A',)])
363
self._run_post_repo_init_hooks(result, a_bzrdir, shared)
351
_revision_store.has_revision_id('A', transaction)
352
_revision_store.get_signature_file(transaction)
353
return self.open(a_bzrdir=a_bzrdir, _found=True)
366
355
def open(self, a_bzrdir, _found=False, _override_transport=None):
367
356
"""See RepositoryFormat.open().
369
358
:param _override_transport: INTERNAL USE ONLY. Allows opening the
370
359
repository at a slightly different url
371
360
than normal. I.e. during 'upgrade'.
378
367
repo_transport = a_bzrdir.get_repository_transport(None)
379
368
control_files = lockable_files.LockableFiles(repo_transport,
380
369
'lock', lockdir.LockDir)
381
repo = self.repository_class(_format=self,
370
text_store = self._get_text_store(repo_transport, control_files)
371
control_store = self._get_control_store(repo_transport, control_files)
372
_revision_store = self._get_revision_store(repo_transport, control_files)
373
return self.repository_class(_format=self,
382
374
a_bzrdir=a_bzrdir,
383
375
control_files=control_files,
376
_revision_store=_revision_store,
377
control_store=control_store,
378
text_store=text_store,
384
379
_commit_builder_class=self._commit_builder_class,
385
380
_serializer=self._serializer)
386
repo.revisions = self._get_revisions(repo_transport, repo)
387
repo.signatures = self._get_signatures(repo_transport, repo)
388
repo.inventories = self._get_inventories(repo_transport, repo)
389
repo.texts = self._get_texts(repo_transport, repo)
390
repo.chk_bytes = None
391
repo._transport = repo_transport
395
383
class RepositoryFormatKnit1(RepositoryFormatKnit):
460
446
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
448
def check_conversion_target(self, target_format):
449
if not target_format.rich_root_data:
450
raise errors.BadConversionTarget(
451
'Does not support rich root data.', target_format)
452
if not getattr(target_format, 'supports_tree_reference', False):
453
raise errors.BadConversionTarget(
454
'Does not support nested trees', target_format)
462
456
def get_format_string(self):
463
457
"""See RepositoryFormat.get_format_string()."""
464
458
return "Bazaar Knit Repository Format 3 (bzr 0.15)\n"
507
504
def get_format_description(self):
508
505
"""See RepositoryFormat.get_format_description()."""
509
506
return "Knit repository format 4"
509
def _get_stream_as_bytes(knit, required_versions):
510
"""Generate a serialised data stream.
512
The format is a bencoding of a list. The first element of the list is a
513
string of the format signature, then each subsequent element is a list
514
corresponding to a record. Those lists contain:
521
:returns: a bencoded list.
523
knit_stream = knit.get_data_stream(required_versions)
524
format_signature, data_list, callable = knit_stream
526
data.append(format_signature)
527
for version, options, length, parents in data_list:
528
data.append([version, options, parents, callable(length)])
529
return bencode.bencode(data)