24
from io import BytesIO
24
from cStringIO import StringIO
27
from ...lazy_import import lazy_import
27
from bzrlib.lazy_import import lazy_import
28
28
lazy_import(globals(), """
32
31
graph as _mod_graph,
35
from breezy.bzr import (
41
revision as _mod_revision,
54
from ...repository import (
57
from ...bzr.repository import (
58
RepositoryFormatMetaDir,
60
from .store.text import TextStore
61
from ...bzr.versionedfile import (
47
from bzrlib.decorators import needs_read_lock, needs_write_lock
48
from bzrlib.repository import (
50
MetaDirVersionedFileRepository,
51
MetaDirRepositoryFormat,
55
from bzrlib.store.text import TextStore
56
from bzrlib.trace import mutter
57
from bzrlib.tuned_gzip import GzipFile, bytes_to_gzip
58
from bzrlib.versionedfile import (
62
59
AbsentContentFactory,
63
60
FulltextContentFactory,
66
from ...bzr.vf_repository import (
67
InterSameDataRepository,
68
VersionedFileCommitBuilder,
69
VersionedFileRepository,
70
VersionedFileRepositoryFormat,
71
MetaDirVersionedFileRepository,
72
MetaDirVersionedFileRepositoryFormat,
75
from . import bzrdir as weave_bzrdir
78
class AllInOneRepository(VersionedFileRepository):
65
class AllInOneRepository(Repository):
79
66
"""Legacy support - the repository behaviour for all-in-one branches."""
83
70
return xml5.serializer_v5
85
72
def _escape(self, file_or_path):
86
if not isinstance(file_or_path, str):
73
if not isinstance(file_or_path, basestring):
87
74
file_or_path = '/'.join(file_or_path)
88
75
if file_or_path == '':
90
77
return urlutils.escape(osutils.safe_unicode(file_or_path))
92
def __init__(self, _format, a_controldir):
79
def __init__(self, _format, a_bzrdir):
93
80
# we reuse one control files instance.
94
dir_mode = a_controldir._get_dir_mode()
95
file_mode = a_controldir._get_file_mode()
81
dir_mode = a_bzrdir._get_dir_mode()
82
file_mode = a_bzrdir._get_file_mode()
97
84
def get_store(name, compressed=True, prefixed=False):
98
85
# FIXME: This approach of assuming stores are all entirely compressed
113
100
# which allows access to this old info.
114
101
self.inventory_store = get_store('inventory-store')
115
102
self._text_store = get_store('text-store')
116
super(AllInOneRepository, self).__init__(
117
_format, a_controldir, a_controldir._control_files)
103
super(AllInOneRepository, self).__init__(_format, a_bzrdir, a_bzrdir._control_files)
119
106
def _all_possible_ids(self):
120
107
"""Return all the possible revisions that we could find."""
121
108
if 'evil' in debug.debug_flags:
122
trace.mutter_callsite(
123
3, "_all_possible_ids scales with size of history.")
124
with self.lock_read():
125
return [key[-1] for key in self.inventories.keys()]
109
mutter_callsite(3, "_all_possible_ids scales with size of history.")
110
return [key[-1] for key in self.inventories.keys()]
127
113
def _all_revision_ids(self):
128
114
"""Returns a list of all the revision ids in the repository.
145
130
self.inventories.keys()
147
132
def _backup_inventory(self):
148
t = self.controldir._control_files._transport
133
t = self.bzrdir._control_files._transport
149
134
t.copy('inventory.weave', 'inventory.backup.weave')
151
136
def _temp_inventories(self):
152
t = self.controldir._control_files._transport
137
t = self.bzrdir._control_files._transport
153
138
return self._format._get_inventories(t, self, 'inventory.new')
155
140
def get_commit_builder(self, branch, parents, config, timestamp=None,
156
141
timezone=None, committer=None, revprops=None,
157
revision_id=None, lossy=False):
158
143
self._check_ascii_revisionid(revision_id, self.get_commit_builder)
159
result = VersionedFileCommitBuilder(self, parents, config, timestamp,
160
timezone, committer, revprops, revision_id, lossy=lossy)
144
result = CommitBuilder(self, parents, config, timestamp, timezone,
145
committer, revprops, revision_id)
161
146
self.start_write_group()
150
def get_revisions(self, revision_ids):
151
revs = self._get_revisions(revision_ids)
164
154
def _inventory_add_lines(self, revision_id, parents, lines,
166
156
"""Store lines in inv_vf and return the sha1 of the inventory."""
167
157
present_parents = self.get_graph().get_parent_map(parents)
168
158
final_parents = []
185
176
:param new_value: True to restore the default, False to disable making
188
raise errors.RepositoryUpgradeRequired(self.user_url)
179
raise errors.RepositoryUpgradeRequired(self.bzrdir.root_transport.base)
190
181
def make_working_trees(self):
191
182
"""Returns the policy for making working trees on new branches."""
185
def revision_graph_can_have_wrong_parents(self):
186
# XXX: This is an old format that we don't support full checking on, so
187
# just claim that checking for this inconsistency is not required.
195
191
class WeaveMetaDirRepository(MetaDirVersionedFileRepository):
196
192
"""A subclass of MetaDirRepository to set weave specific policy."""
198
def __init__(self, _format, a_controldir, control_files):
199
super(WeaveMetaDirRepository, self).__init__(
200
_format, a_controldir, control_files)
194
def __init__(self, _format, a_bzrdir, control_files):
195
super(WeaveMetaDirRepository, self).__init__(_format, a_bzrdir, control_files)
201
196
self._serializer = _format._serializer
203
199
def _all_possible_ids(self):
204
200
"""Return all the possible revisions that we could find."""
205
201
if 'evil' in debug.debug_flags:
206
trace.mutter_callsite(
207
3, "_all_possible_ids scales with size of history.")
208
with self.lock_read():
209
return [key[-1] for key in self.inventories.keys()]
202
mutter_callsite(3, "_all_possible_ids scales with size of history.")
203
return [key[-1] for key in self.inventories.keys()]
211
206
def _all_revision_ids(self):
212
207
"""Returns a list of all the revision ids in the repository.
239
233
def get_commit_builder(self, branch, parents, config, timestamp=None,
240
234
timezone=None, committer=None, revprops=None,
241
revision_id=None, lossy=False):
242
236
self._check_ascii_revisionid(revision_id, self.get_commit_builder)
243
result = VersionedFileCommitBuilder(self, parents, config, timestamp,
244
timezone, committer, revprops, revision_id, lossy=lossy)
237
result = CommitBuilder(self, parents, config, timestamp, timezone,
238
committer, revprops, revision_id)
245
239
self.start_write_group()
248
243
def get_revision(self, revision_id):
249
244
"""Return the Revision object for a named revision"""
250
with self.lock_read():
251
return self.get_revision_reconcile(revision_id)
245
r = self.get_revision_reconcile(revision_id)
253
248
def _inventory_add_lines(self, revision_id, parents, lines,
255
250
"""Store lines in inv_vf and return the sha1 of the inventory."""
256
251
present_parents = self.get_graph().get_parent_map(parents)
257
252
final_parents = []
270
268
supports_ghosts = False
271
269
supports_external_lookups = False
272
270
supports_chks = False
273
supports_nesting_repositories = True
274
271
_fetch_order = 'topological'
275
272
_fetch_reconcile = True
276
273
fast_deltas = False
277
supports_leaving_lock = False
278
supports_overriding_transport = False
279
# XXX: This is an old format that we don't support full checking on, so
280
# just claim that checking for this inconsistency is not required.
281
revision_graph_can_have_wrong_parents = False
283
def initialize(self, a_controldir, shared=False, _internal=False):
275
def initialize(self, a_bzrdir, shared=False, _internal=False):
284
276
"""Create a weave repository."""
286
raise errors.IncompatibleFormat(self, a_controldir._format)
278
raise errors.IncompatibleFormat(self, a_bzrdir._format)
288
280
if not _internal:
289
281
# always initialized when the bzrdir is.
290
return self.open(a_controldir, _found=True)
282
return self.open(a_bzrdir, _found=True)
292
284
# Create an empty weave
294
286
weavefile.write_weave_v5(weave.Weave(), sio)
295
287
empty_weave = sio.getvalue()
297
trace.mutter('creating repository in %s.', a_controldir.transport.base)
289
mutter('creating repository in %s.', a_bzrdir.transport.base)
299
291
# FIXME: RBC 20060125 don't peek under the covers
300
292
# NB: no need to escape relative paths that are url safe.
301
control_files = lockable_files.LockableFiles(a_controldir.transport,
302
'branch-lock', lockable_files.TransportLock)
293
control_files = lockable_files.LockableFiles(a_bzrdir.transport,
294
'branch-lock', lockable_files.TransportLock)
303
295
control_files.create_lock()
304
296
control_files.lock_write()
305
transport = a_controldir.transport
297
transport = a_bzrdir.transport
307
transport.mkdir('revision-store',
308
mode=a_controldir._get_dir_mode())
309
transport.mkdir('weaves', mode=a_controldir._get_dir_mode())
299
transport.mkdir_multi(['revision-store', 'weaves'],
300
mode=a_bzrdir._get_dir_mode())
310
301
transport.put_bytes_non_atomic('inventory.weave', empty_weave,
311
mode=a_controldir._get_file_mode())
302
mode=a_bzrdir._get_file_mode())
313
304
control_files.unlock()
314
repository = self.open(a_controldir, _found=True)
315
self._run_post_repo_init_hooks(repository, a_controldir, shared)
305
return self.open(a_bzrdir, _found=True)
318
def open(self, a_controldir, _found=False):
307
def open(self, a_bzrdir, _found=False):
319
308
"""See RepositoryFormat.open()."""
321
310
# we are being called directly and must probe.
322
311
raise NotImplementedError
324
repo_transport = a_controldir.get_repository_transport(None)
325
result = AllInOneRepository(_format=self, a_controldir=a_controldir)
313
repo_transport = a_bzrdir.get_repository_transport(None)
314
control_files = a_bzrdir._control_files
315
result = AllInOneRepository(_format=self, a_bzrdir=a_bzrdir)
326
316
result.revisions = self._get_revisions(repo_transport, result)
327
317
result.signatures = self._get_signatures(repo_transport, result)
328
318
result.inventories = self._get_inventories(repo_transport, result)
374
359
def _get_revisions(self, repo_transport, repo):
375
from .xml4 import serializer_v4
360
from bzrlib.xml4 import serializer_v4
376
361
return RevisionTextStore(repo_transport.clone('revision-store'),
377
serializer_v4, True, versionedfile.PrefixMapper(),
378
repo.is_locked, repo.is_write_locked)
362
serializer_v4, True, versionedfile.PrefixMapper(),
363
repo.is_locked, repo.is_write_locked)
380
365
def _get_signatures(self, repo_transport, repo):
381
366
return SignatureTextStore(repo_transport.clone('revision-store'),
382
False, versionedfile.PrefixMapper(),
383
repo.is_locked, repo.is_write_locked)
367
False, versionedfile.PrefixMapper(),
368
repo.is_locked, repo.is_write_locked)
385
370
def _get_texts(self, repo_transport, repo):
410
393
def network_name(self):
411
394
"""The network name for this format is the control dirs disk label."""
412
return self._matchingcontroldir.get_format_string()
395
return self._matchingbzrdir.get_format_string()
414
397
def _get_inventories(self, repo_transport, repo, name='inventory'):
415
398
mapper = versionedfile.ConstantMapper(name)
416
399
return versionedfile.ThunkedVersionedFiles(repo_transport,
417
weave.WeaveFile, mapper, repo.is_locked)
400
weave.WeaveFile, mapper, repo.is_locked)
419
402
def _get_revisions(self, repo_transport, repo):
420
403
return RevisionTextStore(repo_transport.clone('revision-store'),
421
xml5.serializer_v5, False, versionedfile.PrefixMapper(),
422
repo.is_locked, repo.is_write_locked)
404
xml5.serializer_v5, False, versionedfile.PrefixMapper(),
405
repo.is_locked, repo.is_write_locked)
424
407
def _get_signatures(self, repo_transport, repo):
425
408
return SignatureTextStore(repo_transport.clone('revision-store'),
426
False, versionedfile.PrefixMapper(),
427
repo.is_locked, repo.is_write_locked)
409
False, versionedfile.PrefixMapper(),
410
repo.is_locked, repo.is_write_locked)
429
412
def _get_texts(self, repo_transport, repo):
430
413
mapper = versionedfile.PrefixMapper()
431
414
base_transport = repo_transport.clone('weaves')
432
415
return versionedfile.ThunkedVersionedFiles(base_transport,
433
weave.WeaveFile, mapper, repo.is_locked)
416
weave.WeaveFile, mapper, repo.is_locked)
436
419
class RepositoryFormat6(PreSplitOutRepositoryFormat):
457
438
def network_name(self):
458
439
"""The network name for this format is the control dirs disk label."""
459
return self._matchingcontroldir.get_format_string()
440
return self._matchingbzrdir.get_format_string()
461
442
def _get_inventories(self, repo_transport, repo, name='inventory'):
462
443
mapper = versionedfile.ConstantMapper(name)
463
444
return versionedfile.ThunkedVersionedFiles(repo_transport,
464
weave.WeaveFile, mapper, repo.is_locked)
445
weave.WeaveFile, mapper, repo.is_locked)
466
447
def _get_revisions(self, repo_transport, repo):
467
448
return RevisionTextStore(repo_transport.clone('revision-store'),
468
xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
469
repo.is_locked, repo.is_write_locked)
449
xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
450
repo.is_locked, repo.is_write_locked)
471
452
def _get_signatures(self, repo_transport, repo):
472
453
return SignatureTextStore(repo_transport.clone('revision-store'),
473
False, versionedfile.HashPrefixMapper(),
474
repo.is_locked, repo.is_write_locked)
454
False, versionedfile.HashPrefixMapper(),
455
repo.is_locked, repo.is_write_locked)
476
457
def _get_texts(self, repo_transport, repo):
477
458
mapper = versionedfile.HashPrefixMapper()
478
459
base_transport = repo_transport.clone('weaves')
479
460
return versionedfile.ThunkedVersionedFiles(base_transport,
480
weave.WeaveFile, mapper, repo.is_locked)
483
class RepositoryFormat7(MetaDirVersionedFileRepositoryFormat):
461
weave.WeaveFile, mapper, repo.is_locked)
464
class RepositoryFormat7(MetaDirRepositoryFormat):
484
465
"""Bzr repository 7.
486
467
This repository format has:
518
495
def _get_inventories(self, repo_transport, repo, name='inventory'):
519
496
mapper = versionedfile.ConstantMapper(name)
520
497
return versionedfile.ThunkedVersionedFiles(repo_transport,
521
weave.WeaveFile, mapper, repo.is_locked)
498
weave.WeaveFile, mapper, repo.is_locked)
523
500
def _get_revisions(self, repo_transport, repo):
524
501
return RevisionTextStore(repo_transport.clone('revision-store'),
525
xml5.serializer_v5, True, versionedfile.HashPrefixMapper(),
526
repo.is_locked, repo.is_write_locked)
502
xml5.serializer_v5, True, versionedfile.HashPrefixMapper(),
503
repo.is_locked, repo.is_write_locked)
528
505
def _get_signatures(self, repo_transport, repo):
529
506
return SignatureTextStore(repo_transport.clone('revision-store'),
530
True, versionedfile.HashPrefixMapper(),
531
repo.is_locked, repo.is_write_locked)
507
True, versionedfile.HashPrefixMapper(),
508
repo.is_locked, repo.is_write_locked)
533
510
def _get_texts(self, repo_transport, repo):
534
511
mapper = versionedfile.HashPrefixMapper()
535
512
base_transport = repo_transport.clone('weaves')
536
513
return versionedfile.ThunkedVersionedFiles(base_transport,
537
weave.WeaveFile, mapper, repo.is_locked)
514
weave.WeaveFile, mapper, repo.is_locked)
539
def initialize(self, a_controldir, shared=False):
516
def initialize(self, a_bzrdir, shared=False):
540
517
"""Create a weave repository.
542
519
:param shared: If true the repository will be initialized as a shared
545
522
# Create an empty weave
547
524
weavefile.write_weave_v5(weave.Weave(), sio)
548
525
empty_weave = sio.getvalue()
550
trace.mutter('creating repository in %s.', a_controldir.transport.base)
527
mutter('creating repository in %s.', a_bzrdir.transport.base)
551
528
dirs = ['revision-store', 'weaves']
552
files = [('inventory.weave', BytesIO(empty_weave)),
529
files = [('inventory.weave', StringIO(empty_weave)),
554
531
utf8_files = [('format', self.get_format_string())]
556
self._upload_blank_content(
557
a_controldir, dirs, files, utf8_files, shared)
558
return self.open(a_controldir=a_controldir, _found=True)
533
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
534
return self.open(a_bzrdir=a_bzrdir, _found=True)
560
def open(self, a_controldir, _found=False, _override_transport=None):
536
def open(self, a_bzrdir, _found=False, _override_transport=None):
561
537
"""See RepositoryFormat.open().
563
539
:param _override_transport: INTERNAL USE ONLY. Allows opening the
565
541
than normal. I.e. during 'upgrade'.
568
format = RepositoryFormatMetaDir.find_format(a_controldir)
544
format = RepositoryFormat.find_format(a_bzrdir)
569
545
if _override_transport is not None:
570
546
repo_transport = _override_transport
572
repo_transport = a_controldir.get_repository_transport(None)
548
repo_transport = a_bzrdir.get_repository_transport(None)
573
549
control_files = lockable_files.LockableFiles(repo_transport,
574
'lock', lockdir.LockDir)
575
result = WeaveMetaDirRepository(_format=self, a_controldir=a_controldir,
576
control_files=control_files)
550
'lock', lockdir.LockDir)
551
result = WeaveMetaDirRepository(_format=self, a_bzrdir=a_bzrdir,
552
control_files=control_files)
577
553
result.revisions = self._get_revisions(repo_transport, result)
578
554
result.signatures = self._get_signatures(repo_transport, result)
579
555
result.inventories = self._get_inventories(repo_transport, result)
623
594
if record.storage_kind == 'absent':
624
595
raise errors.RevisionNotPresent([record.key[0]], self)
625
596
# adapt to non-tuple interface
626
if record.storage_kind in ('fulltext', 'chunks', 'lines'):
597
if record.storage_kind == 'fulltext':
627
598
self.add_lines(record.key, None,
628
record.get_bytes_as('lines'))
599
osutils.split_lines(record.get_bytes_as('fulltext')))
630
adapter_key = record.storage_kind, 'lines'
601
adapter_key = record.storage_kind, 'fulltext'
632
603
adapter = adapters[adapter_key]
634
605
adapter_factory = adapter_registry.get(adapter_key)
635
606
adapter = adapter_factory(self)
636
607
adapters[adapter_key] = adapter
637
lines = adapter.get_bytes(
638
record, record.get_bytes_as(record.storage_kind))
608
lines = osutils.split_lines(adapter.get_bytes(
609
record, record.get_bytes_as(record.storage_kind)))
640
611
self.add_lines(record.key, None, lines)
641
except errors.RevisionAlreadyPresent:
612
except RevisionAlreadyPresent:
644
615
def _load_text(self, key):
762
733
relpaths.add(relpath[:-4])
763
734
paths = list(relpaths)
764
return {self._mapper.unmap(path) for path in paths}
767
class InterWeaveRepo(InterSameDataRepository):
768
"""Optimised code paths between Weave based repositories.
772
def _get_repo_format_to_test(self):
773
return RepositoryFormat7()
776
def is_compatible(source, target):
777
"""Be compatible with known Weave formats.
779
We don't test for the stores being of specific types because that
780
could lead to confusing results, and there is no need to be
784
return (isinstance(source._format, (RepositoryFormat5,
787
and isinstance(target._format, (RepositoryFormat5,
790
except AttributeError:
793
def copy_content(self, revision_id=None):
794
"""See InterRepository.copy_content()."""
795
with self.lock_write():
796
# weave specific optimised path:
798
self.target.set_make_working_trees(
799
self.source.make_working_trees())
800
except (errors.RepositoryUpgradeRequired, NotImplementedError):
803
if self.source._transport.listable():
804
with ui.ui_factory.nested_progress_bar() as pb:
805
self.target.texts.insert_record_stream(
806
self.source.texts.get_record_stream(
807
self.source.texts.keys(), 'topological', False))
808
pb.update('Copying inventory', 0, 1)
809
self.target.inventories.insert_record_stream(
810
self.source.inventories.get_record_stream(
811
self.source.inventories.keys(), 'topological', False))
812
self.target.signatures.insert_record_stream(
813
self.source.signatures.get_record_stream(
814
self.source.signatures.keys(),
816
self.target.revisions.insert_record_stream(
817
self.source.revisions.get_record_stream(
818
self.source.revisions.keys(),
819
'topological', True))
821
self.target.fetch(self.source, revision_id=revision_id)
823
def search_missing_revision_ids(self, find_ghosts=True, revision_ids=None,
824
if_present_ids=None, limit=None):
825
"""See InterRepository.search_missing_revision_ids()."""
826
with self.lock_read():
827
# we want all revisions to satisfy revision_id in source.
828
# but we don't want to stat every file here and there.
829
# we want then, all revisions other needs to satisfy revision_id
830
# checked, but not those that we have locally.
831
# so the first thing is to get a subset of the revisions to
832
# satisfy revision_id in source, and then eliminate those that
833
# we do already have.
834
# this is slow on high latency connection to self, but as this
835
# disk format scales terribly for push anyway due to rewriting
836
# inventory.weave, this is considered acceptable.
838
source_ids_set = self._present_source_revisions_for(
839
revision_ids, if_present_ids)
840
# source_ids is the worst possible case we may need to pull.
841
# now we want to filter source_ids against what we actually
842
# have in target, but don't try to check for existence where we
843
# know we do not have a revision as that would be pointless.
844
target_ids = set(self.target._all_possible_ids())
845
possibly_present_revisions = target_ids.intersection(
847
actually_present_revisions = set(
848
self.target._eliminate_revisions_not_present(
849
possibly_present_revisions))
850
required_revisions = source_ids_set.difference(
851
actually_present_revisions)
852
if revision_ids is not None:
853
# we used get_ancestry to determine source_ids then we are
854
# assured all revisions referenced are present as they are
855
# installed in topological order. and the tip revision was
856
# validated by get_ancestry.
857
result_set = required_revisions
859
# if we just grabbed the possibly available ids, then
860
# we only have an estimate of whats available and need to
861
# validate that against the revision records.
863
self.source._eliminate_revisions_not_present(
865
if limit is not None:
866
topo_ordered = self.source.get_graph().iter_topo_order(result_set)
867
result_set = set(itertools.islice(topo_ordered, limit))
868
return self.source.revision_ids_to_search_result(result_set)
871
InterRepository.register_optimiser(InterWeaveRepo)
874
def get_extra_interrepo_test_combinations():
875
from ...bzr import knitrepo
876
return [(InterRepository, RepositoryFormat5(),
877
knitrepo.RepositoryFormatKnit3())]
735
return set([self._mapper.unmap(path) for path in paths])
737
_legacy_formats = [RepositoryFormat4(),