/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/weaverepo.py

  • Committer: Robert Collins
  • Date: 2007-04-19 02:27:44 UTC
  • mto: This revision was merged to the branch mainline in revision 2426.
  • Revision ID: robertc@robertcollins.net-20070419022744-pfdqz42kp1wizh43
``make docs`` now creates a man page at ``man1/bzr.1`` fixing bug 107388.
(Robert Collins)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2007-2011 Canonical Ltd
 
1
# Copyright (C) 2005, 2006, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
 
 
17
 
"""Deprecated weave-based repository formats.
18
 
 
19
 
Weave based formats scaled linearly with history size and could not represent
20
 
ghosts.
21
 
"""
22
 
 
23
 
import gzip
24
 
from io import BytesIO
25
 
import os
26
 
 
27
 
from ...lazy_import import lazy_import
28
 
lazy_import(globals(), """
29
 
import itertools
30
 
 
31
 
from breezy import (
32
 
    graph as _mod_graph,
33
 
    ui,
34
 
    )
35
 
from breezy.bzr import (
36
 
    xml5,
37
 
    )
38
 
""")
39
 
from ... import (
40
 
    debug,
41
 
    errors,
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
16
 
 
17
 
 
18
"""Old weave-based repository formats"""
 
19
 
 
20
from StringIO import StringIO
 
21
 
 
22
from bzrlib import (
 
23
    bzrdir,
42
24
    lockable_files,
43
25
    lockdir,
44
 
    osutils,
45
 
    trace,
46
 
    tuned_gzip,
47
 
    urlutils,
48
 
    )
49
 
from ...bzr import (
50
 
    versionedfile,
51
26
    weave,
52
27
    weavefile,
53
 
    )
54
 
from ...repository import (
55
 
    InterRepository,
56
 
    )
57
 
from ...bzr.repository import (
58
 
    RepositoryFormatMetaDir,
59
 
    )
60
 
from .store.text import TextStore
61
 
from ...bzr.versionedfile import (
62
 
    AbsentContentFactory,
63
 
    FulltextContentFactory,
64
 
    VersionedFiles,
65
 
    )
66
 
from ...bzr.vf_repository import (
67
 
    InterSameDataRepository,
68
 
    VersionedFileCommitBuilder,
69
 
    VersionedFileRepository,
70
 
    VersionedFileRepositoryFormat,
71
 
    MetaDirVersionedFileRepository,
72
 
    MetaDirVersionedFileRepositoryFormat,
73
 
    )
74
 
 
75
 
from . import bzrdir as weave_bzrdir
76
 
 
77
 
 
78
 
class AllInOneRepository(VersionedFileRepository):
 
28
    xml5,
 
29
    )
 
30
from bzrlib.decorators import needs_read_lock, needs_write_lock
 
31
from bzrlib.repository import (
 
32
    MetaDirRepository,
 
33
    MetaDirRepositoryFormat,
 
34
    Repository,
 
35
    RepositoryFormat,
 
36
    )
 
37
from bzrlib.store.text import TextStore
 
38
from bzrlib.trace import mutter
 
39
 
 
40
 
 
41
class AllInOneRepository(Repository):
79
42
    """Legacy support - the repository behaviour for all-in-one branches."""
80
43
 
81
 
    @property
82
 
    def _serializer(self):
83
 
        return xml5.serializer_v5
84
 
 
85
 
    def _escape(self, file_or_path):
86
 
        if not isinstance(file_or_path, str):
87
 
            file_or_path = '/'.join(file_or_path)
88
 
        if file_or_path == '':
89
 
            return u''
90
 
        return urlutils.escape(osutils.safe_unicode(file_or_path))
91
 
 
92
 
    def __init__(self, _format, a_controldir):
 
44
    _serializer = xml5.serializer_v5
 
45
 
 
46
    def __init__(self, _format, a_bzrdir, _revision_store, control_store, text_store):
93
47
        # we reuse one control files instance.
94
 
        dir_mode = a_controldir._get_dir_mode()
95
 
        file_mode = a_controldir._get_file_mode()
 
48
        dir_mode = a_bzrdir._control_files._dir_mode
 
49
        file_mode = a_bzrdir._control_files._file_mode
96
50
 
97
51
        def get_store(name, compressed=True, prefixed=False):
98
52
            # FIXME: This approach of assuming stores are all entirely compressed
99
 
            # or entirely uncompressed is tidy, but breaks upgrade from
100
 
            # some existing branches where there's a mixture; we probably
 
53
            # or entirely uncompressed is tidy, but breaks upgrade from 
 
54
            # some existing branches where there's a mixture; we probably 
101
55
            # still want the option to look for both.
102
 
            relpath = self._escape(name)
103
 
            store = TextStore(a_controldir.transport.clone(relpath),
 
56
            relpath = a_bzrdir._control_files._escape(name)
 
57
            store = TextStore(a_bzrdir._control_files._transport.clone(relpath),
104
58
                              prefixed=prefixed, compressed=compressed,
105
59
                              dir_mode=dir_mode,
106
60
                              file_mode=file_mode)
 
61
            #if self._transport.should_cache():
 
62
            #    cache_path = os.path.join(self.cache_root, name)
 
63
            #    os.mkdir(cache_path)
 
64
            #    store = bzrlib.store.CachedStore(store, cache_path)
107
65
            return store
108
66
 
109
67
        # not broken out yet because the controlweaves|inventory_store
110
 
        # and texts bits are still different.
 
68
        # and text_store | weave_store bits are still different.
111
69
        if isinstance(_format, RepositoryFormat4):
112
 
            # cannot remove these - there is still no consistent api
 
70
            # cannot remove these - there is still no consistent api 
113
71
            # which allows access to this old info.
114
72
            self.inventory_store = get_store('inventory-store')
115
 
            self._text_store = get_store('text-store')
116
 
        super(AllInOneRepository, self).__init__(
117
 
            _format, a_controldir, a_controldir._control_files)
118
 
 
119
 
    def _all_possible_ids(self):
120
 
        """Return all the possible revisions that we could find."""
121
 
        if 'evil' in debug.debug_flags:
122
 
            trace.mutter_callsite(
123
 
                3, "_all_possible_ids scales with size of history.")
124
 
        with self.lock_read():
125
 
            return [key[-1] for key in self.inventories.keys()]
126
 
 
127
 
    def _all_revision_ids(self):
128
 
        """Returns a list of all the revision ids in the repository.
129
 
 
130
 
        These are in as much topological order as the underlying store can
131
 
        present: for weaves ghosts may lead to a lack of correctness until
132
 
        the reweave updates the parents list.
133
 
        """
134
 
        with self.lock_read():
135
 
            return [key[-1] for key in self.revisions.keys()]
136
 
 
137
 
    def _activate_new_inventory(self):
138
 
        """Put a replacement inventory.new into use as inventories."""
139
 
        # Copy the content across
140
 
        t = self.controldir._control_files._transport
141
 
        t.copy('inventory.new.weave', 'inventory.weave')
142
 
        # delete the temp inventory
143
 
        t.delete('inventory.new.weave')
144
 
        # Check we can parse the new weave properly as a sanity check
145
 
        self.inventories.keys()
146
 
 
147
 
    def _backup_inventory(self):
148
 
        t = self.controldir._control_files._transport
149
 
        t.copy('inventory.weave', 'inventory.backup.weave')
150
 
 
151
 
    def _temp_inventories(self):
152
 
        t = self.controldir._control_files._transport
153
 
        return self._format._get_inventories(t, self, 'inventory.new')
 
73
            text_store = get_store('text-store')
 
74
        super(AllInOneRepository, self).__init__(_format, a_bzrdir, a_bzrdir._control_files, _revision_store, control_store, text_store)
154
75
 
155
76
    def get_commit_builder(self, branch, parents, config, timestamp=None,
156
77
                           timezone=None, committer=None, revprops=None,
157
 
                           revision_id=None, lossy=False):
 
78
                           revision_id=None):
158
79
        self._check_ascii_revisionid(revision_id, self.get_commit_builder)
159
 
        result = VersionedFileCommitBuilder(self, parents, config, timestamp,
160
 
                                            timezone, committer, revprops, revision_id, lossy=lossy)
161
 
        self.start_write_group()
162
 
        return result
163
 
 
164
 
    def _inventory_add_lines(self, revision_id, parents, lines,
165
 
                             check_content=True):
166
 
        """Store lines in inv_vf and return the sha1 of the inventory."""
167
 
        present_parents = self.get_graph().get_parent_map(parents)
168
 
        final_parents = []
169
 
        for parent in parents:
170
 
            if parent in present_parents:
171
 
                final_parents.append((parent,))
172
 
        return self.inventories.add_lines((revision_id,), final_parents, lines,
173
 
                                          check_content=check_content)[0]
174
 
 
 
80
        return Repository.get_commit_builder(self, branch, parents, config,
 
81
            timestamp, timezone, committer, revprops, revision_id)
 
82
 
 
83
    @needs_read_lock
175
84
    def is_shared(self):
176
85
        """AllInOne repositories cannot be shared."""
177
86
        return False
178
87
 
 
88
    @needs_write_lock
179
89
    def set_make_working_trees(self, new_value):
180
90
        """Set the policy flag for making working trees when creating branches.
181
91
 
185
95
        :param new_value: True to restore the default, False to disable making
186
96
                          working trees.
187
97
        """
188
 
        raise errors.RepositoryUpgradeRequired(self.user_url)
189
 
 
 
98
        raise NotImplementedError(self.set_make_working_trees)
 
99
    
190
100
    def make_working_trees(self):
191
101
        """Returns the policy for making working trees on new branches."""
192
102
        return True
193
103
 
194
104
 
195
 
class WeaveMetaDirRepository(MetaDirVersionedFileRepository):
 
105
class WeaveMetaDirRepository(MetaDirRepository):
196
106
    """A subclass of MetaDirRepository to set weave specific policy."""
197
107
 
198
 
    def __init__(self, _format, a_controldir, control_files):
199
 
        super(WeaveMetaDirRepository, self).__init__(
200
 
            _format, a_controldir, control_files)
201
 
        self._serializer = _format._serializer
202
 
 
203
 
    def _all_possible_ids(self):
204
 
        """Return all the possible revisions that we could find."""
205
 
        if 'evil' in debug.debug_flags:
206
 
            trace.mutter_callsite(
207
 
                3, "_all_possible_ids scales with size of history.")
208
 
        with self.lock_read():
209
 
            return [key[-1] for key in self.inventories.keys()]
210
 
 
211
 
    def _all_revision_ids(self):
212
 
        """Returns a list of all the revision ids in the repository.
213
 
 
214
 
        These are in as much topological order as the underlying store can
215
 
        present: for weaves ghosts may lead to a lack of correctness until
216
 
        the reweave updates the parents list.
217
 
        """
218
 
        with self.lock_read():
219
 
            return [key[-1] for key in self.revisions.keys()]
220
 
 
221
 
    def _activate_new_inventory(self):
222
 
        """Put a replacement inventory.new into use as inventories."""
223
 
        # Copy the content across
224
 
        t = self._transport
225
 
        t.copy('inventory.new.weave', 'inventory.weave')
226
 
        # delete the temp inventory
227
 
        t.delete('inventory.new.weave')
228
 
        # Check we can parse the new weave properly as a sanity check
229
 
        self.inventories.keys()
230
 
 
231
 
    def _backup_inventory(self):
232
 
        t = self._transport
233
 
        t.copy('inventory.weave', 'inventory.backup.weave')
234
 
 
235
 
    def _temp_inventories(self):
236
 
        t = self._transport
237
 
        return self._format._get_inventories(t, self, 'inventory.new')
 
108
    _serializer = xml5.serializer_v5
238
109
 
239
110
    def get_commit_builder(self, branch, parents, config, timestamp=None,
240
111
                           timezone=None, committer=None, revprops=None,
241
 
                           revision_id=None, lossy=False):
 
112
                           revision_id=None):
242
113
        self._check_ascii_revisionid(revision_id, self.get_commit_builder)
243
 
        result = VersionedFileCommitBuilder(self, parents, config, timestamp,
244
 
                                            timezone, committer, revprops, revision_id, lossy=lossy)
245
 
        self.start_write_group()
246
 
        return result
247
 
 
248
 
    def get_revision(self, revision_id):
249
 
        """Return the Revision object for a named revision"""
250
 
        with self.lock_read():
251
 
            return self.get_revision_reconcile(revision_id)
252
 
 
253
 
    def _inventory_add_lines(self, revision_id, parents, lines,
254
 
                             check_content=True):
255
 
        """Store lines in inv_vf and return the sha1 of the inventory."""
256
 
        present_parents = self.get_graph().get_parent_map(parents)
257
 
        final_parents = []
258
 
        for parent in parents:
259
 
            if parent in present_parents:
260
 
                final_parents.append((parent,))
261
 
        return self.inventories.add_lines((revision_id,), final_parents, lines,
262
 
                                          check_content=check_content)[0]
263
 
 
264
 
 
265
 
class PreSplitOutRepositoryFormat(VersionedFileRepositoryFormat):
 
114
        return MetaDirRepository.get_commit_builder(self, branch, parents,
 
115
            config, timestamp, timezone, committer, revprops, revision_id)
 
116
 
 
117
 
 
118
class PreSplitOutRepositoryFormat(RepositoryFormat):
266
119
    """Base class for the pre split out repository formats."""
267
120
 
268
121
    rich_root_data = False
269
122
    supports_tree_reference = False
270
 
    supports_ghosts = False
271
 
    supports_external_lookups = False
272
 
    supports_chks = False
273
 
    supports_nesting_repositories = True
274
 
    _fetch_order = 'topological'
275
 
    _fetch_reconcile = True
276
 
    fast_deltas = False
277
 
    supports_leaving_lock = False
278
 
    supports_overriding_transport = False
279
 
    # XXX: This is an old format that we don't support full checking on, so
280
 
    # just claim that checking for this inconsistency is not required.
281
 
    revision_graph_can_have_wrong_parents = False
282
123
 
283
 
    def initialize(self, a_controldir, shared=False, _internal=False):
284
 
        """Create a weave repository."""
 
124
    def initialize(self, a_bzrdir, shared=False, _internal=False):
 
125
        """Create a weave repository.
 
126
        
 
127
        TODO: when creating split out bzr branch formats, move this to a common
 
128
        base for Format5, Format6. or something like that.
 
129
        """
285
130
        if shared:
286
 
            raise errors.IncompatibleFormat(self, a_controldir._format)
 
131
            raise errors.IncompatibleFormat(self, a_bzrdir._format)
287
132
 
288
133
        if not _internal:
289
134
            # always initialized when the bzrdir is.
290
 
            return self.open(a_controldir, _found=True)
291
 
 
 
135
            return self.open(a_bzrdir, _found=True)
 
136
        
292
137
        # Create an empty weave
293
 
        sio = BytesIO()
 
138
        sio = StringIO()
294
139
        weavefile.write_weave_v5(weave.Weave(), sio)
295
140
        empty_weave = sio.getvalue()
296
141
 
297
 
        trace.mutter('creating repository in %s.', a_controldir.transport.base)
298
 
 
 
142
        mutter('creating repository in %s.', a_bzrdir.transport.base)
 
143
        dirs = ['revision-store', 'weaves']
 
144
        files = [('inventory.weave', StringIO(empty_weave)),
 
145
                 ]
 
146
        
299
147
        # FIXME: RBC 20060125 don't peek under the covers
300
148
        # NB: no need to escape relative paths that are url safe.
301
 
        control_files = lockable_files.LockableFiles(a_controldir.transport,
302
 
                                                     'branch-lock', lockable_files.TransportLock)
 
149
        control_files = lockable_files.LockableFiles(a_bzrdir.transport,
 
150
                                'branch-lock', lockable_files.TransportLock)
303
151
        control_files.create_lock()
304
152
        control_files.lock_write()
305
 
        transport = a_controldir.transport
 
153
        control_files._transport.mkdir_multi(dirs,
 
154
                mode=control_files._dir_mode)
306
155
        try:
307
 
            transport.mkdir('revision-store',
308
 
                            mode=a_controldir._get_dir_mode())
309
 
            transport.mkdir('weaves', mode=a_controldir._get_dir_mode())
310
 
            transport.put_bytes_non_atomic('inventory.weave', empty_weave,
311
 
                                           mode=a_controldir._get_file_mode())
 
156
            for file, content in files:
 
157
                control_files.put(file, content)
312
158
        finally:
313
159
            control_files.unlock()
314
 
        repository = self.open(a_controldir, _found=True)
315
 
        self._run_post_repo_init_hooks(repository, a_controldir, shared)
316
 
        return repository
317
 
 
318
 
    def open(self, a_controldir, _found=False):
 
160
        return self.open(a_bzrdir, _found=True)
 
161
 
 
162
    def _get_control_store(self, repo_transport, control_files):
 
163
        """Return the control store for this repository."""
 
164
        return self._get_versioned_file_store('',
 
165
                                              repo_transport,
 
166
                                              control_files,
 
167
                                              prefixed=False)
 
168
 
 
169
    def _get_text_store(self, transport, control_files):
 
170
        """Get a store for file texts for this format."""
 
171
        raise NotImplementedError(self._get_text_store)
 
172
 
 
173
    def open(self, a_bzrdir, _found=False):
319
174
        """See RepositoryFormat.open()."""
320
175
        if not _found:
321
176
            # we are being called directly and must probe.
322
177
            raise NotImplementedError
323
178
 
324
 
        repo_transport = a_controldir.get_repository_transport(None)
325
 
        result = AllInOneRepository(_format=self, a_controldir=a_controldir)
326
 
        result.revisions = self._get_revisions(repo_transport, result)
327
 
        result.signatures = self._get_signatures(repo_transport, result)
328
 
        result.inventories = self._get_inventories(repo_transport, result)
329
 
        result.texts = self._get_texts(repo_transport, result)
330
 
        result.chk_bytes = None
331
 
        return result
 
179
        repo_transport = a_bzrdir.get_repository_transport(None)
 
180
        control_files = a_bzrdir._control_files
 
181
        text_store = self._get_text_store(repo_transport, control_files)
 
182
        control_store = self._get_control_store(repo_transport, control_files)
 
183
        _revision_store = self._get_revision_store(repo_transport, control_files)
 
184
        return AllInOneRepository(_format=self,
 
185
                                  a_bzrdir=a_bzrdir,
 
186
                                  _revision_store=_revision_store,
 
187
                                  control_store=control_store,
 
188
                                  text_store=text_store)
332
189
 
333
 
    def is_deprecated(self):
334
 
        return True
 
190
    def check_conversion_target(self, target_format):
 
191
        pass
335
192
 
336
193
 
337
194
class RepositoryFormat4(PreSplitOutRepositoryFormat):
346
203
    has been removed.
347
204
    """
348
205
 
349
 
    supports_funky_characters = False
 
206
    _matchingbzrdir = bzrdir.BzrDirFormat4()
350
207
 
351
 
    _matchingcontroldir = weave_bzrdir.BzrDirFormat4()
 
208
    def __init__(self):
 
209
        super(RepositoryFormat4, self).__init__()
352
210
 
353
211
    def get_format_description(self):
354
212
        """See RepositoryFormat.get_format_description()."""
362
220
        """Format 4 is not supported.
363
221
 
364
222
        It is not supported because the model changed from 4 to 5 and the
365
 
        conversion logic is expensive - so doing it on the fly was not
 
223
        conversion logic is expensive - so doing it on the fly was not 
366
224
        feasible.
367
225
        """
368
226
        return False
369
227
 
370
 
    def _get_inventories(self, repo_transport, repo, name='inventory'):
371
 
        # No inventories store written so far.
372
 
        return None
373
 
 
374
 
    def _get_revisions(self, repo_transport, repo):
375
 
        from .xml4 import serializer_v4
376
 
        return RevisionTextStore(repo_transport.clone('revision-store'),
377
 
                                 serializer_v4, True, versionedfile.PrefixMapper(),
378
 
                                 repo.is_locked, repo.is_write_locked)
379
 
 
380
 
    def _get_signatures(self, repo_transport, repo):
381
 
        return SignatureTextStore(repo_transport.clone('revision-store'),
382
 
                                  False, versionedfile.PrefixMapper(),
383
 
                                  repo.is_locked, repo.is_write_locked)
384
 
 
385
 
    def _get_texts(self, repo_transport, repo):
386
 
        return None
 
228
    def _get_control_store(self, repo_transport, control_files):
 
229
        """Format 4 repositories have no formal control store at this point.
 
230
        
 
231
        This will cause any control-file-needing apis to fail - this is desired.
 
232
        """
 
233
        return None
 
234
    
 
235
    def _get_revision_store(self, repo_transport, control_files):
 
236
        """See RepositoryFormat._get_revision_store()."""
 
237
        from bzrlib.xml4 import serializer_v4
 
238
        return self._get_text_rev_store(repo_transport,
 
239
                                        control_files,
 
240
                                        'revision-store',
 
241
                                        serializer=serializer_v4)
 
242
 
 
243
    def _get_text_store(self, transport, control_files):
 
244
        """See RepositoryFormat._get_text_store()."""
387
245
 
388
246
 
389
247
class RepositoryFormat5(PreSplitOutRepositoryFormat):
396
254
    """
397
255
 
398
256
    _versionedfile_class = weave.WeaveFile
399
 
    _matchingcontroldir = weave_bzrdir.BzrDirFormat5()
400
 
    supports_funky_characters = False
 
257
    _matchingbzrdir = bzrdir.BzrDirFormat5()
401
258
 
402
 
    @property
403
 
    def _serializer(self):
404
 
        return xml5.serializer_v5
 
259
    def __init__(self):
 
260
        super(RepositoryFormat5, self).__init__()
405
261
 
406
262
    def get_format_description(self):
407
263
        """See RepositoryFormat.get_format_description()."""
408
264
        return "Weave repository format 5"
409
265
 
410
 
    def network_name(self):
411
 
        """The network name for this format is the control dirs disk label."""
412
 
        return self._matchingcontroldir.get_format_string()
413
 
 
414
 
    def _get_inventories(self, repo_transport, repo, name='inventory'):
415
 
        mapper = versionedfile.ConstantMapper(name)
416
 
        return versionedfile.ThunkedVersionedFiles(repo_transport,
417
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
418
 
 
419
 
    def _get_revisions(self, repo_transport, repo):
420
 
        return RevisionTextStore(repo_transport.clone('revision-store'),
421
 
                                 xml5.serializer_v5, False, versionedfile.PrefixMapper(),
422
 
                                 repo.is_locked, repo.is_write_locked)
423
 
 
424
 
    def _get_signatures(self, repo_transport, repo):
425
 
        return SignatureTextStore(repo_transport.clone('revision-store'),
426
 
                                  False, versionedfile.PrefixMapper(),
427
 
                                  repo.is_locked, repo.is_write_locked)
428
 
 
429
 
    def _get_texts(self, repo_transport, repo):
430
 
        mapper = versionedfile.PrefixMapper()
431
 
        base_transport = repo_transport.clone('weaves')
432
 
        return versionedfile.ThunkedVersionedFiles(base_transport,
433
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
266
    def _get_revision_store(self, repo_transport, control_files):
 
267
        """See RepositoryFormat._get_revision_store()."""
 
268
        """Return the revision store object for this a_bzrdir."""
 
269
        return self._get_text_rev_store(repo_transport,
 
270
                                        control_files,
 
271
                                        'revision-store',
 
272
                                        compressed=False)
 
273
 
 
274
    def _get_text_store(self, transport, control_files):
 
275
        """See RepositoryFormat._get_text_store()."""
 
276
        return self._get_versioned_file_store('weaves', transport, control_files, prefixed=False)
434
277
 
435
278
 
436
279
class RepositoryFormat6(PreSplitOutRepositoryFormat):
443
286
    """
444
287
 
445
288
    _versionedfile_class = weave.WeaveFile
446
 
    _matchingcontroldir = weave_bzrdir.BzrDirFormat6()
447
 
    supports_funky_characters = False
 
289
    _matchingbzrdir = bzrdir.BzrDirFormat6()
448
290
 
449
 
    @property
450
 
    def _serializer(self):
451
 
        return xml5.serializer_v5
 
291
    def __init__(self):
 
292
        super(RepositoryFormat6, self).__init__()
452
293
 
453
294
    def get_format_description(self):
454
295
        """See RepositoryFormat.get_format_description()."""
455
296
        return "Weave repository format 6"
456
297
 
457
 
    def network_name(self):
458
 
        """The network name for this format is the control dirs disk label."""
459
 
        return self._matchingcontroldir.get_format_string()
460
 
 
461
 
    def _get_inventories(self, repo_transport, repo, name='inventory'):
462
 
        mapper = versionedfile.ConstantMapper(name)
463
 
        return versionedfile.ThunkedVersionedFiles(repo_transport,
464
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
465
 
 
466
 
    def _get_revisions(self, repo_transport, repo):
467
 
        return RevisionTextStore(repo_transport.clone('revision-store'),
468
 
                                 xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
469
 
                                 repo.is_locked, repo.is_write_locked)
470
 
 
471
 
    def _get_signatures(self, repo_transport, repo):
472
 
        return SignatureTextStore(repo_transport.clone('revision-store'),
473
 
                                  False, versionedfile.HashPrefixMapper(),
474
 
                                  repo.is_locked, repo.is_write_locked)
475
 
 
476
 
    def _get_texts(self, repo_transport, repo):
477
 
        mapper = versionedfile.HashPrefixMapper()
478
 
        base_transport = repo_transport.clone('weaves')
479
 
        return versionedfile.ThunkedVersionedFiles(base_transport,
480
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
481
 
 
482
 
 
483
 
class RepositoryFormat7(MetaDirVersionedFileRepositoryFormat):
 
298
    def _get_revision_store(self, repo_transport, control_files):
 
299
        """See RepositoryFormat._get_revision_store()."""
 
300
        return self._get_text_rev_store(repo_transport,
 
301
                                        control_files,
 
302
                                        'revision-store',
 
303
                                        compressed=False,
 
304
                                        prefixed=True)
 
305
 
 
306
    def _get_text_store(self, transport, control_files):
 
307
        """See RepositoryFormat._get_text_store()."""
 
308
        return self._get_versioned_file_store('weaves', transport, control_files)
 
309
 
 
310
 
 
311
class RepositoryFormat7(MetaDirRepositoryFormat):
484
312
    """Bzr repository 7.
485
313
 
486
314
    This repository format has:
493
321
    """
494
322
 
495
323
    _versionedfile_class = weave.WeaveFile
496
 
    supports_ghosts = False
497
 
    supports_chks = False
498
 
    supports_funky_characters = False
499
 
    revision_graph_can_have_wrong_parents = False
500
 
 
501
 
    _fetch_order = 'topological'
502
 
    _fetch_reconcile = True
503
 
    fast_deltas = False
504
 
 
505
 
    @property
506
 
    def _serializer(self):
507
 
        return xml5.serializer_v5
508
 
 
509
 
    @classmethod
510
 
    def get_format_string(cls):
 
324
 
 
325
    def _get_control_store(self, repo_transport, control_files):
 
326
        """Return the control store for this repository."""
 
327
        return self._get_versioned_file_store('',
 
328
                                              repo_transport,
 
329
                                              control_files,
 
330
                                              prefixed=False)
 
331
 
 
332
    def get_format_string(self):
511
333
        """See RepositoryFormat.get_format_string()."""
512
 
        return b"Bazaar-NG Repository format 7"
 
334
        return "Bazaar-NG Repository format 7"
513
335
 
514
336
    def get_format_description(self):
515
337
        """See RepositoryFormat.get_format_description()."""
516
338
        return "Weave repository format 7"
517
339
 
518
 
    def _get_inventories(self, repo_transport, repo, name='inventory'):
519
 
        mapper = versionedfile.ConstantMapper(name)
520
 
        return versionedfile.ThunkedVersionedFiles(repo_transport,
521
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
522
 
 
523
 
    def _get_revisions(self, repo_transport, repo):
524
 
        return RevisionTextStore(repo_transport.clone('revision-store'),
525
 
                                 xml5.serializer_v5, True, versionedfile.HashPrefixMapper(),
526
 
                                 repo.is_locked, repo.is_write_locked)
527
 
 
528
 
    def _get_signatures(self, repo_transport, repo):
529
 
        return SignatureTextStore(repo_transport.clone('revision-store'),
530
 
                                  True, versionedfile.HashPrefixMapper(),
531
 
                                  repo.is_locked, repo.is_write_locked)
532
 
 
533
 
    def _get_texts(self, repo_transport, repo):
534
 
        mapper = versionedfile.HashPrefixMapper()
535
 
        base_transport = repo_transport.clone('weaves')
536
 
        return versionedfile.ThunkedVersionedFiles(base_transport,
537
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
538
 
 
539
 
    def initialize(self, a_controldir, shared=False):
 
340
    def check_conversion_target(self, target_format):
 
341
        pass
 
342
 
 
343
    def _get_revision_store(self, repo_transport, control_files):
 
344
        """See RepositoryFormat._get_revision_store()."""
 
345
        return self._get_text_rev_store(repo_transport,
 
346
                                        control_files,
 
347
                                        'revision-store',
 
348
                                        compressed=False,
 
349
                                        prefixed=True,
 
350
                                        )
 
351
 
 
352
    def _get_text_store(self, transport, control_files):
 
353
        """See RepositoryFormat._get_text_store()."""
 
354
        return self._get_versioned_file_store('weaves',
 
355
                                              transport,
 
356
                                              control_files)
 
357
 
 
358
    def initialize(self, a_bzrdir, shared=False):
540
359
        """Create a weave repository.
541
360
 
542
361
        :param shared: If true the repository will be initialized as a shared
543
362
                       repository.
544
363
        """
545
364
        # Create an empty weave
546
 
        sio = BytesIO()
 
365
        sio = StringIO()
547
366
        weavefile.write_weave_v5(weave.Weave(), sio)
548
367
        empty_weave = sio.getvalue()
549
368
 
550
 
        trace.mutter('creating repository in %s.', a_controldir.transport.base)
 
369
        mutter('creating repository in %s.', a_bzrdir.transport.base)
551
370
        dirs = ['revision-store', 'weaves']
552
 
        files = [('inventory.weave', BytesIO(empty_weave)),
 
371
        files = [('inventory.weave', StringIO(empty_weave)), 
553
372
                 ]
554
373
        utf8_files = [('format', self.get_format_string())]
555
 
 
556
 
        self._upload_blank_content(
557
 
            a_controldir, dirs, files, utf8_files, shared)
558
 
        return self.open(a_controldir=a_controldir, _found=True)
559
 
 
560
 
    def open(self, a_controldir, _found=False, _override_transport=None):
 
374
 
 
375
        self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
 
376
        return self.open(a_bzrdir=a_bzrdir, _found=True)
 
377
 
 
378
    def open(self, a_bzrdir, _found=False, _override_transport=None):
561
379
        """See RepositoryFormat.open().
562
 
 
 
380
        
563
381
        :param _override_transport: INTERNAL USE ONLY. Allows opening the
564
382
                                    repository at a slightly different url
565
383
                                    than normal. I.e. during 'upgrade'.
566
384
        """
567
385
        if not _found:
568
 
            format = RepositoryFormatMetaDir.find_format(a_controldir)
 
386
            format = RepositoryFormat.find_format(a_bzrdir)
 
387
            assert format.__class__ ==  self.__class__
569
388
        if _override_transport is not None:
570
389
            repo_transport = _override_transport
571
390
        else:
572
 
            repo_transport = a_controldir.get_repository_transport(None)
 
391
            repo_transport = a_bzrdir.get_repository_transport(None)
573
392
        control_files = lockable_files.LockableFiles(repo_transport,
574
 
                                                     'lock', lockdir.LockDir)
575
 
        result = WeaveMetaDirRepository(_format=self, a_controldir=a_controldir,
576
 
                                        control_files=control_files)
577
 
        result.revisions = self._get_revisions(repo_transport, result)
578
 
        result.signatures = self._get_signatures(repo_transport, result)
579
 
        result.inventories = self._get_inventories(repo_transport, result)
580
 
        result.texts = self._get_texts(repo_transport, result)
581
 
        result.chk_bytes = None
582
 
        result._transport = repo_transport
583
 
        return result
584
 
 
585
 
    def is_deprecated(self):
586
 
        return True
587
 
 
588
 
 
589
 
class TextVersionedFiles(VersionedFiles):
590
 
    """Just-a-bunch-of-files based VersionedFile stores."""
591
 
 
592
 
    def __init__(self, transport, compressed, mapper, is_locked, can_write):
593
 
        self._compressed = compressed
594
 
        self._transport = transport
595
 
        self._mapper = mapper
596
 
        if self._compressed:
597
 
            self._ext = '.gz'
598
 
        else:
599
 
            self._ext = ''
600
 
        self._is_locked = is_locked
601
 
        self._can_write = can_write
602
 
 
603
 
    def add_lines(self, key, parents, lines):
604
 
        """Add a revision to the store."""
605
 
        if not self._is_locked():
606
 
            raise errors.ObjectNotLocked(self)
607
 
        if not self._can_write():
608
 
            raise errors.ReadOnlyError(self)
609
 
        if b'/' in key[-1]:
610
 
            raise ValueError('bad idea to put / in %r' % (key,))
611
 
        chunks = lines
612
 
        if self._compressed:
613
 
            chunks = tuned_gzip.chunks_to_gzip(chunks)
614
 
        path = self._map(key)
615
 
        self._transport.put_file_non_atomic(
616
 
            path, BytesIO(b''.join(chunks)),
617
 
            create_parent_dir=True)
618
 
 
619
 
    def insert_record_stream(self, stream):
620
 
        adapters = {}
621
 
        for record in stream:
622
 
            # Raise an error when a record is missing.
623
 
            if record.storage_kind == 'absent':
624
 
                raise errors.RevisionNotPresent([record.key[0]], self)
625
 
            # adapt to non-tuple interface
626
 
            if record.storage_kind in ('fulltext', 'chunks', 'lines'):
627
 
                self.add_lines(record.key, None,
628
 
                               record.get_bytes_as('lines'))
629
 
            else:
630
 
                adapter_key = record.storage_kind, 'lines'
631
 
                try:
632
 
                    adapter = adapters[adapter_key]
633
 
                except KeyError:
634
 
                    adapter_factory = adapter_registry.get(adapter_key)
635
 
                    adapter = adapter_factory(self)
636
 
                    adapters[adapter_key] = adapter
637
 
                lines = adapter.get_bytes(
638
 
                    record, record.get_bytes_as(record.storage_kind))
639
 
                try:
640
 
                    self.add_lines(record.key, None, lines)
641
 
                except errors.RevisionAlreadyPresent:
642
 
                    pass
643
 
 
644
 
    def _load_text(self, key):
645
 
        if not self._is_locked():
646
 
            raise errors.ObjectNotLocked(self)
647
 
        path = self._map(key)
648
 
        try:
649
 
            text = self._transport.get_bytes(path)
650
 
            compressed = self._compressed
651
 
        except errors.NoSuchFile:
652
 
            if self._compressed:
653
 
                # try without the .gz
654
 
                path = path[:-3]
655
 
                try:
656
 
                    text = self._transport.get_bytes(path)
657
 
                    compressed = False
658
 
                except errors.NoSuchFile:
659
 
                    return None
660
 
            else:
661
 
                return None
662
 
        if compressed:
663
 
            text = gzip.GzipFile(mode='rb', fileobj=BytesIO(text)).read()
664
 
        return text
665
 
 
666
 
    def _map(self, key):
667
 
        return self._mapper.map(key) + self._ext
668
 
 
669
 
 
670
 
class RevisionTextStore(TextVersionedFiles):
671
 
    """Legacy thunk for format 4 repositories."""
672
 
 
673
 
    def __init__(self, transport, serializer, compressed, mapper, is_locked,
674
 
                 can_write):
675
 
        """Create a RevisionTextStore at transport with serializer."""
676
 
        TextVersionedFiles.__init__(self, transport, compressed, mapper,
677
 
                                    is_locked, can_write)
678
 
        self._serializer = serializer
679
 
 
680
 
    def _load_text_parents(self, key):
681
 
        text = self._load_text(key)
682
 
        if text is None:
683
 
            return None, None
684
 
        parents = self._serializer.read_revision_from_string(text).parent_ids
685
 
        return text, tuple((parent,) for parent in parents)
686
 
 
687
 
    def get_parent_map(self, keys):
688
 
        result = {}
689
 
        for key in keys:
690
 
            parents = self._load_text_parents(key)[1]
691
 
            if parents is None:
692
 
                continue
693
 
            result[key] = parents
694
 
        return result
695
 
 
696
 
    def get_known_graph_ancestry(self, keys):
697
 
        """Get a KnownGraph instance with the ancestry of keys."""
698
 
        keys = self.keys()
699
 
        parent_map = self.get_parent_map(keys)
700
 
        kg = _mod_graph.KnownGraph(parent_map)
701
 
        return kg
702
 
 
703
 
    def get_record_stream(self, keys, sort_order, include_delta_closure):
704
 
        for key in keys:
705
 
            text, parents = self._load_text_parents(key)
706
 
            if text is None:
707
 
                yield AbsentContentFactory(key)
708
 
            else:
709
 
                yield FulltextContentFactory(key, parents, None, text)
710
 
 
711
 
    def keys(self):
712
 
        if not self._is_locked():
713
 
            raise errors.ObjectNotLocked(self)
714
 
        relpaths = set()
715
 
        for quoted_relpath in self._transport.iter_files_recursive():
716
 
            relpath = urlutils.unquote(quoted_relpath)
717
 
            path, ext = os.path.splitext(relpath)
718
 
            if ext == '.gz':
719
 
                relpath = path
720
 
            if not relpath.endswith('.sig'):
721
 
                relpaths.add(relpath)
722
 
        paths = list(relpaths)
723
 
        return {self._mapper.unmap(path) for path in paths}
724
 
 
725
 
 
726
 
class SignatureTextStore(TextVersionedFiles):
727
 
    """Legacy thunk for format 4-7 repositories."""
728
 
 
729
 
    def __init__(self, transport, compressed, mapper, is_locked, can_write):
730
 
        TextVersionedFiles.__init__(self, transport, compressed, mapper,
731
 
                                    is_locked, can_write)
732
 
        self._ext = '.sig' + self._ext
733
 
 
734
 
    def get_parent_map(self, keys):
735
 
        result = {}
736
 
        for key in keys:
737
 
            text = self._load_text(key)
738
 
            if text is None:
739
 
                continue
740
 
            result[key] = None
741
 
        return result
742
 
 
743
 
    def get_record_stream(self, keys, sort_order, include_delta_closure):
744
 
        for key in keys:
745
 
            text = self._load_text(key)
746
 
            if text is None:
747
 
                yield AbsentContentFactory(key)
748
 
            else:
749
 
                yield FulltextContentFactory(key, None, None, text)
750
 
 
751
 
    def keys(self):
752
 
        if not self._is_locked():
753
 
            raise errors.ObjectNotLocked(self)
754
 
        relpaths = set()
755
 
        for quoted_relpath in self._transport.iter_files_recursive():
756
 
            relpath = urlutils.unquote(quoted_relpath)
757
 
            path, ext = os.path.splitext(relpath)
758
 
            if ext == '.gz':
759
 
                relpath = path
760
 
            if not relpath.endswith('.sig'):
761
 
                continue
762
 
            relpaths.add(relpath[:-4])
763
 
        paths = list(relpaths)
764
 
        return {self._mapper.unmap(path) for path in paths}
765
 
 
766
 
 
767
 
class InterWeaveRepo(InterSameDataRepository):
768
 
    """Optimised code paths between Weave based repositories.
769
 
    """
770
 
 
771
 
    @classmethod
772
 
    def _get_repo_format_to_test(self):
773
 
        return RepositoryFormat7()
774
 
 
775
 
    @staticmethod
776
 
    def is_compatible(source, target):
777
 
        """Be compatible with known Weave formats.
778
 
 
779
 
        We don't test for the stores being of specific types because that
780
 
        could lead to confusing results, and there is no need to be
781
 
        overly general.
782
 
        """
783
 
        try:
784
 
            return (isinstance(source._format, (RepositoryFormat5,
785
 
                                                RepositoryFormat6,
786
 
                                                RepositoryFormat7))
787
 
                    and isinstance(target._format, (RepositoryFormat5,
788
 
                                                    RepositoryFormat6,
789
 
                                                    RepositoryFormat7)))
790
 
        except AttributeError:
791
 
            return False
792
 
 
793
 
    def copy_content(self, revision_id=None):
794
 
        """See InterRepository.copy_content()."""
795
 
        with self.lock_write():
796
 
            # weave specific optimised path:
797
 
            try:
798
 
                self.target.set_make_working_trees(
799
 
                    self.source.make_working_trees())
800
 
            except (errors.RepositoryUpgradeRequired, NotImplementedError):
801
 
                pass
802
 
            # FIXME do not peek!
803
 
            if self.source._transport.listable():
804
 
                with ui.ui_factory.nested_progress_bar() as pb:
805
 
                    self.target.texts.insert_record_stream(
806
 
                        self.source.texts.get_record_stream(
807
 
                            self.source.texts.keys(), 'topological', False))
808
 
                    pb.update('Copying inventory', 0, 1)
809
 
                    self.target.inventories.insert_record_stream(
810
 
                        self.source.inventories.get_record_stream(
811
 
                            self.source.inventories.keys(), 'topological', False))
812
 
                    self.target.signatures.insert_record_stream(
813
 
                        self.source.signatures.get_record_stream(
814
 
                            self.source.signatures.keys(),
815
 
                            'unordered', True))
816
 
                    self.target.revisions.insert_record_stream(
817
 
                        self.source.revisions.get_record_stream(
818
 
                            self.source.revisions.keys(),
819
 
                            'topological', True))
820
 
            else:
821
 
                self.target.fetch(self.source, revision_id=revision_id)
822
 
 
823
 
    def search_missing_revision_ids(self, find_ghosts=True, revision_ids=None,
824
 
                                    if_present_ids=None, limit=None):
825
 
        """See InterRepository.search_missing_revision_ids()."""
826
 
        with self.lock_read():
827
 
            # we want all revisions to satisfy revision_id in source.
828
 
            # but we don't want to stat every file here and there.
829
 
            # we want then, all revisions other needs to satisfy revision_id
830
 
            # checked, but not those that we have locally.
831
 
            # so the first thing is to get a subset of the revisions to
832
 
            # satisfy revision_id in source, and then eliminate those that
833
 
            # we do already have.
834
 
            # this is slow on high latency connection to self, but as this
835
 
            # disk format scales terribly for push anyway due to rewriting
836
 
            # inventory.weave, this is considered acceptable.
837
 
            # - RBC 20060209
838
 
            source_ids_set = self._present_source_revisions_for(
839
 
                revision_ids, if_present_ids)
840
 
            # source_ids is the worst possible case we may need to pull.
841
 
            # now we want to filter source_ids against what we actually
842
 
            # have in target, but don't try to check for existence where we
843
 
            # know we do not have a revision as that would be pointless.
844
 
            target_ids = set(self.target._all_possible_ids())
845
 
            possibly_present_revisions = target_ids.intersection(
846
 
                source_ids_set)
847
 
            actually_present_revisions = set(
848
 
                self.target._eliminate_revisions_not_present(
849
 
                    possibly_present_revisions))
850
 
            required_revisions = source_ids_set.difference(
851
 
                actually_present_revisions)
852
 
            if revision_ids is not None:
853
 
                # we used get_ancestry to determine source_ids then we are
854
 
                # assured all revisions referenced are present as they are
855
 
                # installed in topological order. and the tip revision was
856
 
                # validated by get_ancestry.
857
 
                result_set = required_revisions
858
 
            else:
859
 
                # if we just grabbed the possibly available ids, then
860
 
                # we only have an estimate of whats available and need to
861
 
                # validate that against the revision records.
862
 
                result_set = set(
863
 
                    self.source._eliminate_revisions_not_present(
864
 
                        required_revisions))
865
 
            if limit is not None:
866
 
                topo_ordered = self.source.get_graph().iter_topo_order(result_set)
867
 
                result_set = set(itertools.islice(topo_ordered, limit))
868
 
            return self.source.revision_ids_to_search_result(result_set)
869
 
 
870
 
 
871
 
InterRepository.register_optimiser(InterWeaveRepo)
872
 
 
873
 
 
874
 
def get_extra_interrepo_test_combinations():
875
 
    from ...bzr import knitrepo
876
 
    return [(InterRepository, RepositoryFormat5(),
877
 
             knitrepo.RepositoryFormatKnit3())]
 
393
                                'lock', lockdir.LockDir)
 
394
        text_store = self._get_text_store(repo_transport, control_files)
 
395
        control_store = self._get_control_store(repo_transport, control_files)
 
396
        _revision_store = self._get_revision_store(repo_transport, control_files)
 
397
        return WeaveMetaDirRepository(_format=self,
 
398
            a_bzrdir=a_bzrdir,
 
399
            control_files=control_files,
 
400
            _revision_store=_revision_store,
 
401
            control_store=control_store,
 
402
            text_store=text_store)
 
403
 
 
404
 
 
405
_legacy_formats = [RepositoryFormat4(),
 
406
                   RepositoryFormat5(),
 
407
                   RepositoryFormat6()]