/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/weaverepo.py

  • Committer: Robert Collins
  • Date: 2010-05-06 23:41:35 UTC
  • mto: This revision was merged to the branch mainline in revision 5223.
  • Revision ID: robertc@robertcollins.net-20100506234135-yivbzczw1sejxnxc
Lock methods on ``Tree``, ``Branch`` and ``Repository`` are now
expected to return an object which can be used to unlock them. This reduces
duplicate code when using cleanups. The previous 'tokens's returned by
``Branch.lock_write`` and ``Repository.lock_write`` are now attributes
on the result of the lock_write. ``repository.RepositoryWriteLockResult``
and ``branch.BranchWriteLockResult`` document this. (Robert Collins)

``log._get_info_for_log_files`` now takes an add_cleanup callable.
(Robert Collins)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2007-2011 Canonical Ltd
 
1
# Copyright (C) 2007-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
20
20
ghosts.
21
21
"""
22
22
 
23
 
from __future__ import absolute_import
24
 
 
25
 
import gzip
26
23
import os
 
24
from cStringIO import StringIO
 
25
import urllib
27
26
 
28
 
from ...lazy_import import lazy_import
 
27
from bzrlib.lazy_import import lazy_import
29
28
lazy_import(globals(), """
30
 
import itertools
31
 
 
32
 
from breezy import (
 
29
from bzrlib import (
 
30
    xml5,
33
31
    graph as _mod_graph,
34
 
    ui,
35
 
    )
36
 
from breezy.bzr import (
37
 
    xml5,
38
32
    )
39
33
""")
40
 
from ... import (
 
34
from bzrlib import (
 
35
    bzrdir,
41
36
    debug,
42
37
    errors,
43
38
    lockable_files,
44
39
    lockdir,
45
40
    osutils,
 
41
    revision as _mod_revision,
46
42
    trace,
47
 
    tuned_gzip,
48
43
    urlutils,
49
 
    )
50
 
from ...bzr import (
51
44
    versionedfile,
52
45
    weave,
53
46
    weavefile,
54
47
    )
55
 
from ...repository import (
56
 
    InterRepository,
57
 
    )
58
 
from ...bzr.repository import (
59
 
    RepositoryFormatMetaDir,
60
 
    )
61
 
from ...sixish import (
62
 
    BytesIO,
63
 
    text_type,
64
 
    )
65
 
from .store.text import TextStore
66
 
from ...bzr.versionedfile import (
 
48
from bzrlib.decorators import needs_read_lock, needs_write_lock
 
49
from bzrlib.repository import (
 
50
    CommitBuilder,
 
51
    MetaDirVersionedFileRepository,
 
52
    MetaDirRepositoryFormat,
 
53
    Repository,
 
54
    RepositoryFormat,
 
55
    )
 
56
from bzrlib.store.text import TextStore
 
57
from bzrlib.tuned_gzip import GzipFile, bytes_to_gzip
 
58
from bzrlib.versionedfile import (
67
59
    AbsentContentFactory,
68
60
    FulltextContentFactory,
69
61
    VersionedFiles,
70
62
    )
71
 
from ...bzr.vf_repository import (
72
 
    InterSameDataRepository,
73
 
    VersionedFileCommitBuilder,
74
 
    VersionedFileRepository,
75
 
    VersionedFileRepositoryFormat,
76
 
    MetaDirVersionedFileRepository,
77
 
    MetaDirVersionedFileRepositoryFormat,
78
 
    )
79
 
 
80
 
from . import bzrdir as weave_bzrdir
81
 
 
82
 
 
83
 
class AllInOneRepository(VersionedFileRepository):
 
63
 
 
64
 
 
65
class AllInOneRepository(Repository):
84
66
    """Legacy support - the repository behaviour for all-in-one branches."""
85
67
 
86
68
    @property
88
70
        return xml5.serializer_v5
89
71
 
90
72
    def _escape(self, file_or_path):
91
 
        if not isinstance(file_or_path, (str, text_type)):
 
73
        if not isinstance(file_or_path, basestring):
92
74
            file_or_path = '/'.join(file_or_path)
93
75
        if file_or_path == '':
94
76
            return u''
95
77
        return urlutils.escape(osutils.safe_unicode(file_or_path))
96
78
 
97
 
    def __init__(self, _format, a_controldir):
 
79
    def __init__(self, _format, a_bzrdir):
98
80
        # we reuse one control files instance.
99
 
        dir_mode = a_controldir._get_dir_mode()
100
 
        file_mode = a_controldir._get_file_mode()
 
81
        dir_mode = a_bzrdir._get_dir_mode()
 
82
        file_mode = a_bzrdir._get_file_mode()
101
83
 
102
84
        def get_store(name, compressed=True, prefixed=False):
103
85
            # FIXME: This approach of assuming stores are all entirely compressed
105
87
            # some existing branches where there's a mixture; we probably
106
88
            # still want the option to look for both.
107
89
            relpath = self._escape(name)
108
 
            store = TextStore(a_controldir.transport.clone(relpath),
 
90
            store = TextStore(a_bzrdir.transport.clone(relpath),
109
91
                              prefixed=prefixed, compressed=compressed,
110
92
                              dir_mode=dir_mode,
111
93
                              file_mode=file_mode)
118
100
            # which allows access to this old info.
119
101
            self.inventory_store = get_store('inventory-store')
120
102
            self._text_store = get_store('text-store')
121
 
        super(AllInOneRepository, self).__init__(
122
 
            _format, a_controldir, a_controldir._control_files)
 
103
        super(AllInOneRepository, self).__init__(_format, a_bzrdir, a_bzrdir._control_files)
123
104
 
 
105
    @needs_read_lock
124
106
    def _all_possible_ids(self):
125
107
        """Return all the possible revisions that we could find."""
126
108
        if 'evil' in debug.debug_flags:
127
109
            trace.mutter_callsite(
128
110
                3, "_all_possible_ids scales with size of history.")
129
 
        with self.lock_read():
130
 
            return [key[-1] for key in self.inventories.keys()]
 
111
        return [key[-1] for key in self.inventories.keys()]
131
112
 
 
113
    @needs_read_lock
132
114
    def _all_revision_ids(self):
133
115
        """Returns a list of all the revision ids in the repository.
134
116
 
136
118
        present: for weaves ghosts may lead to a lack of correctness until
137
119
        the reweave updates the parents list.
138
120
        """
139
 
        with self.lock_read():
140
 
            return [key[-1] for key in self.revisions.keys()]
 
121
        return [key[-1] for key in self.revisions.keys()]
141
122
 
142
123
    def _activate_new_inventory(self):
143
124
        """Put a replacement inventory.new into use as inventories."""
144
125
        # Copy the content across
145
 
        t = self.controldir._control_files._transport
 
126
        t = self.bzrdir._control_files._transport
146
127
        t.copy('inventory.new.weave', 'inventory.weave')
147
128
        # delete the temp inventory
148
129
        t.delete('inventory.new.weave')
150
131
        self.inventories.keys()
151
132
 
152
133
    def _backup_inventory(self):
153
 
        t = self.controldir._control_files._transport
 
134
        t = self.bzrdir._control_files._transport
154
135
        t.copy('inventory.weave', 'inventory.backup.weave')
155
136
 
156
137
    def _temp_inventories(self):
157
 
        t = self.controldir._control_files._transport
 
138
        t = self.bzrdir._control_files._transport
158
139
        return self._format._get_inventories(t, self, 'inventory.new')
159
140
 
160
141
    def get_commit_builder(self, branch, parents, config, timestamp=None,
161
142
                           timezone=None, committer=None, revprops=None,
162
 
                           revision_id=None, lossy=False):
 
143
                           revision_id=None):
163
144
        self._check_ascii_revisionid(revision_id, self.get_commit_builder)
164
 
        result = VersionedFileCommitBuilder(self, parents, config, timestamp,
165
 
                                            timezone, committer, revprops, revision_id, lossy=lossy)
 
145
        result = CommitBuilder(self, parents, config, timestamp, timezone,
 
146
                              committer, revprops, revision_id)
166
147
        self.start_write_group()
167
148
        return result
168
149
 
 
150
    @needs_read_lock
 
151
    def get_revisions(self, revision_ids):
 
152
        revs = self._get_revisions(revision_ids)
 
153
        return revs
 
154
 
169
155
    def _inventory_add_lines(self, revision_id, parents, lines,
170
 
                             check_content=True):
 
156
        check_content=True):
171
157
        """Store lines in inv_vf and return the sha1 of the inventory."""
172
158
        present_parents = self.get_graph().get_parent_map(parents)
173
159
        final_parents = []
175
161
            if parent in present_parents:
176
162
                final_parents.append((parent,))
177
163
        return self.inventories.add_lines((revision_id,), final_parents, lines,
178
 
                                          check_content=check_content)[0]
 
164
            check_content=check_content)[0]
179
165
 
180
166
    def is_shared(self):
181
167
        """AllInOne repositories cannot be shared."""
182
168
        return False
183
169
 
 
170
    @needs_write_lock
184
171
    def set_make_working_trees(self, new_value):
185
172
        """Set the policy flag for making working trees when creating branches.
186
173
 
196
183
        """Returns the policy for making working trees on new branches."""
197
184
        return True
198
185
 
 
186
    def revision_graph_can_have_wrong_parents(self):
 
187
        # XXX: This is an old format that we don't support full checking on, so
 
188
        # just claim that checking for this inconsistency is not required.
 
189
        return False
 
190
 
199
191
 
200
192
class WeaveMetaDirRepository(MetaDirVersionedFileRepository):
201
193
    """A subclass of MetaDirRepository to set weave specific policy."""
202
194
 
203
 
    def __init__(self, _format, a_controldir, control_files):
204
 
        super(WeaveMetaDirRepository, self).__init__(
205
 
            _format, a_controldir, control_files)
 
195
    def __init__(self, _format, a_bzrdir, control_files):
 
196
        super(WeaveMetaDirRepository, self).__init__(_format, a_bzrdir, control_files)
206
197
        self._serializer = _format._serializer
207
198
 
 
199
    @needs_read_lock
208
200
    def _all_possible_ids(self):
209
201
        """Return all the possible revisions that we could find."""
210
202
        if 'evil' in debug.debug_flags:
211
203
            trace.mutter_callsite(
212
204
                3, "_all_possible_ids scales with size of history.")
213
 
        with self.lock_read():
214
 
            return [key[-1] for key in self.inventories.keys()]
 
205
        return [key[-1] for key in self.inventories.keys()]
215
206
 
 
207
    @needs_read_lock
216
208
    def _all_revision_ids(self):
217
209
        """Returns a list of all the revision ids in the repository.
218
210
 
220
212
        present: for weaves ghosts may lead to a lack of correctness until
221
213
        the reweave updates the parents list.
222
214
        """
223
 
        with self.lock_read():
224
 
            return [key[-1] for key in self.revisions.keys()]
 
215
        return [key[-1] for key in self.revisions.keys()]
225
216
 
226
217
    def _activate_new_inventory(self):
227
218
        """Put a replacement inventory.new into use as inventories."""
243
234
 
244
235
    def get_commit_builder(self, branch, parents, config, timestamp=None,
245
236
                           timezone=None, committer=None, revprops=None,
246
 
                           revision_id=None, lossy=False):
 
237
                           revision_id=None):
247
238
        self._check_ascii_revisionid(revision_id, self.get_commit_builder)
248
 
        result = VersionedFileCommitBuilder(self, parents, config, timestamp,
249
 
                                            timezone, committer, revprops, revision_id, lossy=lossy)
 
239
        result = CommitBuilder(self, parents, config, timestamp, timezone,
 
240
                              committer, revprops, revision_id)
250
241
        self.start_write_group()
251
242
        return result
252
243
 
 
244
    @needs_read_lock
253
245
    def get_revision(self, revision_id):
254
246
        """Return the Revision object for a named revision"""
255
 
        with self.lock_read():
256
 
            return self.get_revision_reconcile(revision_id)
 
247
        r = self.get_revision_reconcile(revision_id)
 
248
        return r
257
249
 
258
250
    def _inventory_add_lines(self, revision_id, parents, lines,
259
 
                             check_content=True):
 
251
        check_content=True):
260
252
        """Store lines in inv_vf and return the sha1 of the inventory."""
261
253
        present_parents = self.get_graph().get_parent_map(parents)
262
254
        final_parents = []
264
256
            if parent in present_parents:
265
257
                final_parents.append((parent,))
266
258
        return self.inventories.add_lines((revision_id,), final_parents, lines,
267
 
                                          check_content=check_content)[0]
268
 
 
269
 
 
270
 
class PreSplitOutRepositoryFormat(VersionedFileRepositoryFormat):
 
259
            check_content=check_content)[0]
 
260
 
 
261
    def revision_graph_can_have_wrong_parents(self):
 
262
        return False
 
263
 
 
264
 
 
265
class PreSplitOutRepositoryFormat(RepositoryFormat):
271
266
    """Base class for the pre split out repository formats."""
272
267
 
273
268
    rich_root_data = False
275
270
    supports_ghosts = False
276
271
    supports_external_lookups = False
277
272
    supports_chks = False
278
 
    supports_nesting_repositories = True
279
273
    _fetch_order = 'topological'
280
274
    _fetch_reconcile = True
281
275
    fast_deltas = False
282
 
    supports_leaving_lock = False
283
 
    supports_overriding_transport = False
284
 
    # XXX: This is an old format that we don't support full checking on, so
285
 
    # just claim that checking for this inconsistency is not required.
286
 
    revision_graph_can_have_wrong_parents = False
287
276
 
288
 
    def initialize(self, a_controldir, shared=False, _internal=False):
 
277
    def initialize(self, a_bzrdir, shared=False, _internal=False):
289
278
        """Create a weave repository."""
290
279
        if shared:
291
 
            raise errors.IncompatibleFormat(self, a_controldir._format)
 
280
            raise errors.IncompatibleFormat(self, a_bzrdir._format)
292
281
 
293
282
        if not _internal:
294
283
            # always initialized when the bzrdir is.
295
 
            return self.open(a_controldir, _found=True)
 
284
            return self.open(a_bzrdir, _found=True)
296
285
 
297
286
        # Create an empty weave
298
 
        sio = BytesIO()
 
287
        sio = StringIO()
299
288
        weavefile.write_weave_v5(weave.Weave(), sio)
300
289
        empty_weave = sio.getvalue()
301
290
 
302
 
        trace.mutter('creating repository in %s.', a_controldir.transport.base)
 
291
        trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
303
292
 
304
293
        # FIXME: RBC 20060125 don't peek under the covers
305
294
        # NB: no need to escape relative paths that are url safe.
306
 
        control_files = lockable_files.LockableFiles(a_controldir.transport,
307
 
                                                     'branch-lock', lockable_files.TransportLock)
 
295
        control_files = lockable_files.LockableFiles(a_bzrdir.transport,
 
296
            'branch-lock', lockable_files.TransportLock)
308
297
        control_files.create_lock()
309
298
        control_files.lock_write()
310
 
        transport = a_controldir.transport
 
299
        transport = a_bzrdir.transport
311
300
        try:
312
 
            transport.mkdir('revision-store',
313
 
                            mode=a_controldir._get_dir_mode())
314
 
            transport.mkdir('weaves', mode=a_controldir._get_dir_mode())
 
301
            transport.mkdir_multi(['revision-store', 'weaves'],
 
302
                mode=a_bzrdir._get_dir_mode())
315
303
            transport.put_bytes_non_atomic('inventory.weave', empty_weave,
316
 
                                           mode=a_controldir._get_file_mode())
 
304
                mode=a_bzrdir._get_file_mode())
317
305
        finally:
318
306
            control_files.unlock()
319
 
        repository = self.open(a_controldir, _found=True)
320
 
        self._run_post_repo_init_hooks(repository, a_controldir, shared)
 
307
        repository = self.open(a_bzrdir, _found=True)
 
308
        self._run_post_repo_init_hooks(repository, a_bzrdir, shared)
321
309
        return repository
322
310
 
323
 
    def open(self, a_controldir, _found=False):
 
311
    def open(self, a_bzrdir, _found=False):
324
312
        """See RepositoryFormat.open()."""
325
313
        if not _found:
326
314
            # we are being called directly and must probe.
327
315
            raise NotImplementedError
328
316
 
329
 
        repo_transport = a_controldir.get_repository_transport(None)
330
 
        result = AllInOneRepository(_format=self, a_controldir=a_controldir)
 
317
        repo_transport = a_bzrdir.get_repository_transport(None)
 
318
        control_files = a_bzrdir._control_files
 
319
        result = AllInOneRepository(_format=self, a_bzrdir=a_bzrdir)
331
320
        result.revisions = self._get_revisions(repo_transport, result)
332
321
        result.signatures = self._get_signatures(repo_transport, result)
333
322
        result.inventories = self._get_inventories(repo_transport, result)
335
324
        result.chk_bytes = None
336
325
        return result
337
326
 
338
 
    def is_deprecated(self):
339
 
        return True
340
 
 
341
327
 
342
328
class RepositoryFormat4(PreSplitOutRepositoryFormat):
343
329
    """Bzr repository format 4.
351
337
    has been removed.
352
338
    """
353
339
 
354
 
    supports_funky_characters = False
355
 
 
356
 
    _matchingcontroldir = weave_bzrdir.BzrDirFormat4()
 
340
    _matchingbzrdir = bzrdir.BzrDirFormat4()
357
341
 
358
342
    def get_format_description(self):
359
343
        """See RepositoryFormat.get_format_description()."""
377
361
        return None
378
362
 
379
363
    def _get_revisions(self, repo_transport, repo):
380
 
        from .xml4 import serializer_v4
 
364
        from bzrlib.xml4 import serializer_v4
381
365
        return RevisionTextStore(repo_transport.clone('revision-store'),
382
 
                                 serializer_v4, True, versionedfile.PrefixMapper(),
383
 
                                 repo.is_locked, repo.is_write_locked)
 
366
            serializer_v4, True, versionedfile.PrefixMapper(),
 
367
            repo.is_locked, repo.is_write_locked)
384
368
 
385
369
    def _get_signatures(self, repo_transport, repo):
386
370
        return SignatureTextStore(repo_transport.clone('revision-store'),
387
 
                                  False, versionedfile.PrefixMapper(),
388
 
                                  repo.is_locked, repo.is_write_locked)
 
371
            False, versionedfile.PrefixMapper(),
 
372
            repo.is_locked, repo.is_write_locked)
389
373
 
390
374
    def _get_texts(self, repo_transport, repo):
391
375
        return None
401
385
    """
402
386
 
403
387
    _versionedfile_class = weave.WeaveFile
404
 
    _matchingcontroldir = weave_bzrdir.BzrDirFormat5()
405
 
    supports_funky_characters = False
406
 
 
 
388
    _matchingbzrdir = bzrdir.BzrDirFormat5()
407
389
    @property
408
390
    def _serializer(self):
409
391
        return xml5.serializer_v5
414
396
 
415
397
    def network_name(self):
416
398
        """The network name for this format is the control dirs disk label."""
417
 
        return self._matchingcontroldir.get_format_string()
 
399
        return self._matchingbzrdir.get_format_string()
418
400
 
419
401
    def _get_inventories(self, repo_transport, repo, name='inventory'):
420
402
        mapper = versionedfile.ConstantMapper(name)
421
403
        return versionedfile.ThunkedVersionedFiles(repo_transport,
422
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
404
            weave.WeaveFile, mapper, repo.is_locked)
423
405
 
424
406
    def _get_revisions(self, repo_transport, repo):
425
407
        return RevisionTextStore(repo_transport.clone('revision-store'),
426
 
                                 xml5.serializer_v5, False, versionedfile.PrefixMapper(),
427
 
                                 repo.is_locked, repo.is_write_locked)
 
408
            xml5.serializer_v5, False, versionedfile.PrefixMapper(),
 
409
            repo.is_locked, repo.is_write_locked)
428
410
 
429
411
    def _get_signatures(self, repo_transport, repo):
430
412
        return SignatureTextStore(repo_transport.clone('revision-store'),
431
 
                                  False, versionedfile.PrefixMapper(),
432
 
                                  repo.is_locked, repo.is_write_locked)
 
413
            False, versionedfile.PrefixMapper(),
 
414
            repo.is_locked, repo.is_write_locked)
433
415
 
434
416
    def _get_texts(self, repo_transport, repo):
435
417
        mapper = versionedfile.PrefixMapper()
436
418
        base_transport = repo_transport.clone('weaves')
437
419
        return versionedfile.ThunkedVersionedFiles(base_transport,
438
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
420
            weave.WeaveFile, mapper, repo.is_locked)
439
421
 
440
422
 
441
423
class RepositoryFormat6(PreSplitOutRepositoryFormat):
448
430
    """
449
431
 
450
432
    _versionedfile_class = weave.WeaveFile
451
 
    _matchingcontroldir = weave_bzrdir.BzrDirFormat6()
452
 
    supports_funky_characters = False
453
 
 
 
433
    _matchingbzrdir = bzrdir.BzrDirFormat6()
454
434
    @property
455
435
    def _serializer(self):
456
436
        return xml5.serializer_v5
461
441
 
462
442
    def network_name(self):
463
443
        """The network name for this format is the control dirs disk label."""
464
 
        return self._matchingcontroldir.get_format_string()
 
444
        return self._matchingbzrdir.get_format_string()
465
445
 
466
446
    def _get_inventories(self, repo_transport, repo, name='inventory'):
467
447
        mapper = versionedfile.ConstantMapper(name)
468
448
        return versionedfile.ThunkedVersionedFiles(repo_transport,
469
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
449
            weave.WeaveFile, mapper, repo.is_locked)
470
450
 
471
451
    def _get_revisions(self, repo_transport, repo):
472
452
        return RevisionTextStore(repo_transport.clone('revision-store'),
473
 
                                 xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
474
 
                                 repo.is_locked, repo.is_write_locked)
 
453
            xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
 
454
            repo.is_locked, repo.is_write_locked)
475
455
 
476
456
    def _get_signatures(self, repo_transport, repo):
477
457
        return SignatureTextStore(repo_transport.clone('revision-store'),
478
 
                                  False, versionedfile.HashPrefixMapper(),
479
 
                                  repo.is_locked, repo.is_write_locked)
 
458
            False, versionedfile.HashPrefixMapper(),
 
459
            repo.is_locked, repo.is_write_locked)
480
460
 
481
461
    def _get_texts(self, repo_transport, repo):
482
462
        mapper = versionedfile.HashPrefixMapper()
483
463
        base_transport = repo_transport.clone('weaves')
484
464
        return versionedfile.ThunkedVersionedFiles(base_transport,
485
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
486
 
 
487
 
 
488
 
class RepositoryFormat7(MetaDirVersionedFileRepositoryFormat):
 
465
            weave.WeaveFile, mapper, repo.is_locked)
 
466
 
 
467
 
 
468
class RepositoryFormat7(MetaDirRepositoryFormat):
489
469
    """Bzr repository 7.
490
470
 
491
471
    This repository format has:
500
480
    _versionedfile_class = weave.WeaveFile
501
481
    supports_ghosts = False
502
482
    supports_chks = False
503
 
    supports_funky_characters = False
504
 
    revision_graph_can_have_wrong_parents = False
505
483
 
506
484
    _fetch_order = 'topological'
507
485
    _fetch_reconcile = True
508
486
    fast_deltas = False
509
 
 
510
487
    @property
511
488
    def _serializer(self):
512
489
        return xml5.serializer_v5
513
490
 
514
 
    @classmethod
515
 
    def get_format_string(cls):
 
491
    def get_format_string(self):
516
492
        """See RepositoryFormat.get_format_string()."""
517
 
        return b"Bazaar-NG Repository format 7"
 
493
        return "Bazaar-NG Repository format 7"
518
494
 
519
495
    def get_format_description(self):
520
496
        """See RepositoryFormat.get_format_description()."""
523
499
    def _get_inventories(self, repo_transport, repo, name='inventory'):
524
500
        mapper = versionedfile.ConstantMapper(name)
525
501
        return versionedfile.ThunkedVersionedFiles(repo_transport,
526
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
502
            weave.WeaveFile, mapper, repo.is_locked)
527
503
 
528
504
    def _get_revisions(self, repo_transport, repo):
529
505
        return RevisionTextStore(repo_transport.clone('revision-store'),
530
 
                                 xml5.serializer_v5, True, versionedfile.HashPrefixMapper(),
531
 
                                 repo.is_locked, repo.is_write_locked)
 
506
            xml5.serializer_v5, True, versionedfile.HashPrefixMapper(),
 
507
            repo.is_locked, repo.is_write_locked)
532
508
 
533
509
    def _get_signatures(self, repo_transport, repo):
534
510
        return SignatureTextStore(repo_transport.clone('revision-store'),
535
 
                                  True, versionedfile.HashPrefixMapper(),
536
 
                                  repo.is_locked, repo.is_write_locked)
 
511
            True, versionedfile.HashPrefixMapper(),
 
512
            repo.is_locked, repo.is_write_locked)
537
513
 
538
514
    def _get_texts(self, repo_transport, repo):
539
515
        mapper = versionedfile.HashPrefixMapper()
540
516
        base_transport = repo_transport.clone('weaves')
541
517
        return versionedfile.ThunkedVersionedFiles(base_transport,
542
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
518
            weave.WeaveFile, mapper, repo.is_locked)
543
519
 
544
 
    def initialize(self, a_controldir, shared=False):
 
520
    def initialize(self, a_bzrdir, shared=False):
545
521
        """Create a weave repository.
546
522
 
547
523
        :param shared: If true the repository will be initialized as a shared
548
524
                       repository.
549
525
        """
550
526
        # Create an empty weave
551
 
        sio = BytesIO()
 
527
        sio = StringIO()
552
528
        weavefile.write_weave_v5(weave.Weave(), sio)
553
529
        empty_weave = sio.getvalue()
554
530
 
555
 
        trace.mutter('creating repository in %s.', a_controldir.transport.base)
 
531
        trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
556
532
        dirs = ['revision-store', 'weaves']
557
 
        files = [('inventory.weave', BytesIO(empty_weave)),
 
533
        files = [('inventory.weave', StringIO(empty_weave)),
558
534
                 ]
559
535
        utf8_files = [('format', self.get_format_string())]
560
536
 
561
 
        self._upload_blank_content(
562
 
            a_controldir, dirs, files, utf8_files, shared)
563
 
        return self.open(a_controldir=a_controldir, _found=True)
 
537
        self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
 
538
        return self.open(a_bzrdir=a_bzrdir, _found=True)
564
539
 
565
 
    def open(self, a_controldir, _found=False, _override_transport=None):
 
540
    def open(self, a_bzrdir, _found=False, _override_transport=None):
566
541
        """See RepositoryFormat.open().
567
542
 
568
543
        :param _override_transport: INTERNAL USE ONLY. Allows opening the
570
545
                                    than normal. I.e. during 'upgrade'.
571
546
        """
572
547
        if not _found:
573
 
            format = RepositoryFormatMetaDir.find_format(a_controldir)
 
548
            format = RepositoryFormat.find_format(a_bzrdir)
574
549
        if _override_transport is not None:
575
550
            repo_transport = _override_transport
576
551
        else:
577
 
            repo_transport = a_controldir.get_repository_transport(None)
 
552
            repo_transport = a_bzrdir.get_repository_transport(None)
578
553
        control_files = lockable_files.LockableFiles(repo_transport,
579
 
                                                     'lock', lockdir.LockDir)
580
 
        result = WeaveMetaDirRepository(_format=self, a_controldir=a_controldir,
581
 
                                        control_files=control_files)
 
554
                                'lock', lockdir.LockDir)
 
555
        result = WeaveMetaDirRepository(_format=self, a_bzrdir=a_bzrdir,
 
556
            control_files=control_files)
582
557
        result.revisions = self._get_revisions(repo_transport, result)
583
558
        result.signatures = self._get_signatures(repo_transport, result)
584
559
        result.inventories = self._get_inventories(repo_transport, result)
587
562
        result._transport = repo_transport
588
563
        return result
589
564
 
590
 
    def is_deprecated(self):
591
 
        return True
592
 
 
593
565
 
594
566
class TextVersionedFiles(VersionedFiles):
595
567
    """Just-a-bunch-of-files based VersionedFile stores."""
611
583
            raise errors.ObjectNotLocked(self)
612
584
        if not self._can_write():
613
585
            raise errors.ReadOnlyError(self)
614
 
        if b'/' in key[-1]:
 
586
        if '/' in key[-1]:
615
587
            raise ValueError('bad idea to put / in %r' % (key,))
616
 
        chunks = lines
 
588
        text = ''.join(lines)
617
589
        if self._compressed:
618
 
            chunks = tuned_gzip.chunks_to_gzip(chunks)
 
590
            text = bytes_to_gzip(text)
619
591
        path = self._map(key)
620
 
        self._transport.put_file_non_atomic(
621
 
            path, BytesIO(b''.join(chunks)),
622
 
            create_parent_dir=True)
 
592
        self._transport.put_bytes_non_atomic(path, text, create_parent_dir=True)
623
593
 
624
594
    def insert_record_stream(self, stream):
625
595
        adapters = {}
628
598
            if record.storage_kind == 'absent':
629
599
                raise errors.RevisionNotPresent([record.key[0]], self)
630
600
            # adapt to non-tuple interface
631
 
            if record.storage_kind in ('fulltext', 'chunks', 'lines'):
 
601
            if record.storage_kind == 'fulltext':
632
602
                self.add_lines(record.key, None,
633
 
                               record.get_bytes_as('lines'))
 
603
                    osutils.split_lines(record.get_bytes_as('fulltext')))
634
604
            else:
635
 
                adapter_key = record.storage_kind, 'lines'
 
605
                adapter_key = record.storage_kind, 'fulltext'
636
606
                try:
637
607
                    adapter = adapters[adapter_key]
638
608
                except KeyError:
639
609
                    adapter_factory = adapter_registry.get(adapter_key)
640
610
                    adapter = adapter_factory(self)
641
611
                    adapters[adapter_key] = adapter
642
 
                lines = adapter.get_bytes(
643
 
                    record, record.get_bytes_as(record.storage_kind))
 
612
                lines = osutils.split_lines(adapter.get_bytes(
 
613
                    record, record.get_bytes_as(record.storage_kind)))
644
614
                try:
645
615
                    self.add_lines(record.key, None, lines)
646
 
                except errors.RevisionAlreadyPresent:
 
616
                except RevisionAlreadyPresent:
647
617
                    pass
648
618
 
649
619
    def _load_text(self, key):
665
635
            else:
666
636
                return None
667
637
        if compressed:
668
 
            text = gzip.GzipFile(mode='rb', fileobj=BytesIO(text)).read()
 
638
            text = GzipFile(mode='rb', fileobj=StringIO(text)).read()
669
639
        return text
670
640
 
671
641
    def _map(self, key):
676
646
    """Legacy thunk for format 4 repositories."""
677
647
 
678
648
    def __init__(self, transport, serializer, compressed, mapper, is_locked,
679
 
                 can_write):
 
649
        can_write):
680
650
        """Create a RevisionTextStore at transport with serializer."""
681
651
        TextVersionedFiles.__init__(self, transport, compressed, mapper,
682
 
                                    is_locked, can_write)
 
652
            is_locked, can_write)
683
653
        self._serializer = serializer
684
654
 
685
655
    def _load_text_parents(self, key):
718
688
            raise errors.ObjectNotLocked(self)
719
689
        relpaths = set()
720
690
        for quoted_relpath in self._transport.iter_files_recursive():
721
 
            relpath = urlutils.unquote(quoted_relpath)
 
691
            relpath = urllib.unquote(quoted_relpath)
722
692
            path, ext = os.path.splitext(relpath)
723
693
            if ext == '.gz':
724
694
                relpath = path
725
695
            if not relpath.endswith('.sig'):
726
696
                relpaths.add(relpath)
727
697
        paths = list(relpaths)
728
 
        return {self._mapper.unmap(path) for path in paths}
 
698
        return set([self._mapper.unmap(path) for path in paths])
729
699
 
730
700
 
731
701
class SignatureTextStore(TextVersionedFiles):
733
703
 
734
704
    def __init__(self, transport, compressed, mapper, is_locked, can_write):
735
705
        TextVersionedFiles.__init__(self, transport, compressed, mapper,
736
 
                                    is_locked, can_write)
 
706
            is_locked, can_write)
737
707
        self._ext = '.sig' + self._ext
738
708
 
739
709
    def get_parent_map(self, keys):
758
728
            raise errors.ObjectNotLocked(self)
759
729
        relpaths = set()
760
730
        for quoted_relpath in self._transport.iter_files_recursive():
761
 
            relpath = urlutils.unquote(quoted_relpath)
 
731
            relpath = urllib.unquote(quoted_relpath)
762
732
            path, ext = os.path.splitext(relpath)
763
733
            if ext == '.gz':
764
734
                relpath = path
766
736
                continue
767
737
            relpaths.add(relpath[:-4])
768
738
        paths = list(relpaths)
769
 
        return {self._mapper.unmap(path) for path in paths}
770
 
 
771
 
 
772
 
class InterWeaveRepo(InterSameDataRepository):
773
 
    """Optimised code paths between Weave based repositories.
774
 
    """
775
 
 
776
 
    @classmethod
777
 
    def _get_repo_format_to_test(self):
778
 
        return RepositoryFormat7()
779
 
 
780
 
    @staticmethod
781
 
    def is_compatible(source, target):
782
 
        """Be compatible with known Weave formats.
783
 
 
784
 
        We don't test for the stores being of specific types because that
785
 
        could lead to confusing results, and there is no need to be
786
 
        overly general.
787
 
        """
788
 
        try:
789
 
            return (isinstance(source._format, (RepositoryFormat5,
790
 
                                                RepositoryFormat6,
791
 
                                                RepositoryFormat7))
792
 
                    and isinstance(target._format, (RepositoryFormat5,
793
 
                                                    RepositoryFormat6,
794
 
                                                    RepositoryFormat7)))
795
 
        except AttributeError:
796
 
            return False
797
 
 
798
 
    def copy_content(self, revision_id=None):
799
 
        """See InterRepository.copy_content()."""
800
 
        with self.lock_write():
801
 
            # weave specific optimised path:
802
 
            try:
803
 
                self.target.set_make_working_trees(
804
 
                    self.source.make_working_trees())
805
 
            except (errors.RepositoryUpgradeRequired, NotImplementedError):
806
 
                pass
807
 
            # FIXME do not peek!
808
 
            if self.source._transport.listable():
809
 
                with ui.ui_factory.nested_progress_bar() as pb:
810
 
                    self.target.texts.insert_record_stream(
811
 
                        self.source.texts.get_record_stream(
812
 
                            self.source.texts.keys(), 'topological', False))
813
 
                    pb.update('Copying inventory', 0, 1)
814
 
                    self.target.inventories.insert_record_stream(
815
 
                        self.source.inventories.get_record_stream(
816
 
                            self.source.inventories.keys(), 'topological', False))
817
 
                    self.target.signatures.insert_record_stream(
818
 
                        self.source.signatures.get_record_stream(
819
 
                            self.source.signatures.keys(),
820
 
                            'unordered', True))
821
 
                    self.target.revisions.insert_record_stream(
822
 
                        self.source.revisions.get_record_stream(
823
 
                            self.source.revisions.keys(),
824
 
                            'topological', True))
825
 
            else:
826
 
                self.target.fetch(self.source, revision_id=revision_id)
827
 
 
828
 
    def search_missing_revision_ids(self, find_ghosts=True, revision_ids=None,
829
 
                                    if_present_ids=None, limit=None):
830
 
        """See InterRepository.search_missing_revision_ids()."""
831
 
        with self.lock_read():
832
 
            # we want all revisions to satisfy revision_id in source.
833
 
            # but we don't want to stat every file here and there.
834
 
            # we want then, all revisions other needs to satisfy revision_id
835
 
            # checked, but not those that we have locally.
836
 
            # so the first thing is to get a subset of the revisions to
837
 
            # satisfy revision_id in source, and then eliminate those that
838
 
            # we do already have.
839
 
            # this is slow on high latency connection to self, but as this
840
 
            # disk format scales terribly for push anyway due to rewriting
841
 
            # inventory.weave, this is considered acceptable.
842
 
            # - RBC 20060209
843
 
            source_ids_set = self._present_source_revisions_for(
844
 
                revision_ids, if_present_ids)
845
 
            # source_ids is the worst possible case we may need to pull.
846
 
            # now we want to filter source_ids against what we actually
847
 
            # have in target, but don't try to check for existence where we
848
 
            # know we do not have a revision as that would be pointless.
849
 
            target_ids = set(self.target._all_possible_ids())
850
 
            possibly_present_revisions = target_ids.intersection(
851
 
                source_ids_set)
852
 
            actually_present_revisions = set(
853
 
                self.target._eliminate_revisions_not_present(
854
 
                    possibly_present_revisions))
855
 
            required_revisions = source_ids_set.difference(
856
 
                actually_present_revisions)
857
 
            if revision_ids is not None:
858
 
                # we used get_ancestry to determine source_ids then we are
859
 
                # assured all revisions referenced are present as they are
860
 
                # installed in topological order. and the tip revision was
861
 
                # validated by get_ancestry.
862
 
                result_set = required_revisions
863
 
            else:
864
 
                # if we just grabbed the possibly available ids, then
865
 
                # we only have an estimate of whats available and need to
866
 
                # validate that against the revision records.
867
 
                result_set = set(
868
 
                    self.source._eliminate_revisions_not_present(
869
 
                        required_revisions))
870
 
            if limit is not None:
871
 
                topo_ordered = self.get_graph().iter_topo_order(result_set)
872
 
                result_set = set(itertools.islice(topo_ordered, limit))
873
 
            return self.source.revision_ids_to_search_result(result_set)
874
 
 
875
 
 
876
 
InterRepository.register_optimiser(InterWeaveRepo)
877
 
 
878
 
 
879
 
def get_extra_interrepo_test_combinations():
880
 
    from ...bzr import knitrepo
881
 
    return [(InterRepository, RepositoryFormat5(),
882
 
             knitrepo.RepositoryFormatKnit3())]
 
739
        return set([self._mapper.unmap(path) for path in paths])
 
740
 
 
741
_legacy_formats = [RepositoryFormat4(),
 
742
                   RepositoryFormat5(),
 
743
                   RepositoryFormat6()]