/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/weaverepo.py

  • Committer: John Ferlito
  • Date: 2009-09-02 04:31:45 UTC
  • mto: (4665.7.1 serve-init)
  • mto: This revision was merged to the branch mainline in revision 4913.
  • Revision ID: johnf@inodes.org-20090902043145-gxdsfw03ilcwbyn5
Add a debian init script for bzr --serve

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2007-2011 Canonical Ltd
 
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
20
20
ghosts.
21
21
"""
22
22
 
23
 
import gzip
24
 
from io import BytesIO
25
23
import os
 
24
from cStringIO import StringIO
 
25
import urllib
26
26
 
27
 
from ...lazy_import import lazy_import
 
27
from bzrlib.lazy_import import lazy_import
28
28
lazy_import(globals(), """
29
 
import itertools
30
 
 
31
 
from breezy import (
 
29
from bzrlib import (
 
30
    xml5,
32
31
    graph as _mod_graph,
33
 
    ui,
34
 
    )
35
 
from breezy.bzr import (
36
 
    xml5,
37
32
    )
38
33
""")
39
 
from ... import (
 
34
from bzrlib import (
 
35
    bzrdir,
40
36
    debug,
41
37
    errors,
42
38
    lockable_files,
43
39
    lockdir,
44
40
    osutils,
45
 
    trace,
46
 
    tuned_gzip,
 
41
    revision as _mod_revision,
47
42
    urlutils,
48
 
    )
49
 
from ...bzr import (
50
43
    versionedfile,
51
44
    weave,
52
45
    weavefile,
53
46
    )
54
 
from ...repository import (
55
 
    InterRepository,
56
 
    )
57
 
from ...bzr.repository import (
58
 
    RepositoryFormatMetaDir,
59
 
    )
60
 
from .store.text import TextStore
61
 
from ...bzr.versionedfile import (
 
47
from bzrlib.decorators import needs_read_lock, needs_write_lock
 
48
from bzrlib.repository import (
 
49
    CommitBuilder,
 
50
    MetaDirVersionedFileRepository,
 
51
    MetaDirRepositoryFormat,
 
52
    Repository,
 
53
    RepositoryFormat,
 
54
    )
 
55
from bzrlib.store.text import TextStore
 
56
from bzrlib.trace import mutter
 
57
from bzrlib.tuned_gzip import GzipFile, bytes_to_gzip
 
58
from bzrlib.versionedfile import (
62
59
    AbsentContentFactory,
63
60
    FulltextContentFactory,
64
61
    VersionedFiles,
65
62
    )
66
 
from ...bzr.vf_repository import (
67
 
    InterSameDataRepository,
68
 
    VersionedFileCommitBuilder,
69
 
    VersionedFileRepository,
70
 
    VersionedFileRepositoryFormat,
71
 
    MetaDirVersionedFileRepository,
72
 
    MetaDirVersionedFileRepositoryFormat,
73
 
    )
74
 
 
75
 
from . import bzrdir as weave_bzrdir
76
 
 
77
 
 
78
 
class AllInOneRepository(VersionedFileRepository):
 
63
 
 
64
 
 
65
class AllInOneRepository(Repository):
79
66
    """Legacy support - the repository behaviour for all-in-one branches."""
80
67
 
81
68
    @property
83
70
        return xml5.serializer_v5
84
71
 
85
72
    def _escape(self, file_or_path):
86
 
        if not isinstance(file_or_path, str):
 
73
        if not isinstance(file_or_path, basestring):
87
74
            file_or_path = '/'.join(file_or_path)
88
75
        if file_or_path == '':
89
76
            return u''
90
77
        return urlutils.escape(osutils.safe_unicode(file_or_path))
91
78
 
92
 
    def __init__(self, _format, a_controldir):
 
79
    def __init__(self, _format, a_bzrdir):
93
80
        # we reuse one control files instance.
94
 
        dir_mode = a_controldir._get_dir_mode()
95
 
        file_mode = a_controldir._get_file_mode()
 
81
        dir_mode = a_bzrdir._get_dir_mode()
 
82
        file_mode = a_bzrdir._get_file_mode()
96
83
 
97
84
        def get_store(name, compressed=True, prefixed=False):
98
85
            # FIXME: This approach of assuming stores are all entirely compressed
100
87
            # some existing branches where there's a mixture; we probably
101
88
            # still want the option to look for both.
102
89
            relpath = self._escape(name)
103
 
            store = TextStore(a_controldir.transport.clone(relpath),
 
90
            store = TextStore(a_bzrdir.transport.clone(relpath),
104
91
                              prefixed=prefixed, compressed=compressed,
105
92
                              dir_mode=dir_mode,
106
93
                              file_mode=file_mode)
113
100
            # which allows access to this old info.
114
101
            self.inventory_store = get_store('inventory-store')
115
102
            self._text_store = get_store('text-store')
116
 
        super(AllInOneRepository, self).__init__(
117
 
            _format, a_controldir, a_controldir._control_files)
 
103
        super(AllInOneRepository, self).__init__(_format, a_bzrdir, a_bzrdir._control_files)
118
104
 
 
105
    @needs_read_lock
119
106
    def _all_possible_ids(self):
120
107
        """Return all the possible revisions that we could find."""
121
108
        if 'evil' in debug.debug_flags:
122
 
            trace.mutter_callsite(
123
 
                3, "_all_possible_ids scales with size of history.")
124
 
        with self.lock_read():
125
 
            return [key[-1] for key in self.inventories.keys()]
 
109
            mutter_callsite(3, "_all_possible_ids scales with size of history.")
 
110
        return [key[-1] for key in self.inventories.keys()]
126
111
 
 
112
    @needs_read_lock
127
113
    def _all_revision_ids(self):
128
114
        """Returns a list of all the revision ids in the repository.
129
115
 
131
117
        present: for weaves ghosts may lead to a lack of correctness until
132
118
        the reweave updates the parents list.
133
119
        """
134
 
        with self.lock_read():
135
 
            return [key[-1] for key in self.revisions.keys()]
 
120
        return [key[-1] for key in self.revisions.keys()]
136
121
 
137
122
    def _activate_new_inventory(self):
138
123
        """Put a replacement inventory.new into use as inventories."""
139
124
        # Copy the content across
140
 
        t = self.controldir._control_files._transport
 
125
        t = self.bzrdir._control_files._transport
141
126
        t.copy('inventory.new.weave', 'inventory.weave')
142
127
        # delete the temp inventory
143
128
        t.delete('inventory.new.weave')
145
130
        self.inventories.keys()
146
131
 
147
132
    def _backup_inventory(self):
148
 
        t = self.controldir._control_files._transport
 
133
        t = self.bzrdir._control_files._transport
149
134
        t.copy('inventory.weave', 'inventory.backup.weave')
150
135
 
151
136
    def _temp_inventories(self):
152
 
        t = self.controldir._control_files._transport
 
137
        t = self.bzrdir._control_files._transport
153
138
        return self._format._get_inventories(t, self, 'inventory.new')
154
139
 
155
140
    def get_commit_builder(self, branch, parents, config, timestamp=None,
156
141
                           timezone=None, committer=None, revprops=None,
157
 
                           revision_id=None, lossy=False):
 
142
                           revision_id=None):
158
143
        self._check_ascii_revisionid(revision_id, self.get_commit_builder)
159
 
        result = VersionedFileCommitBuilder(self, parents, config, timestamp,
160
 
                                            timezone, committer, revprops, revision_id, lossy=lossy)
 
144
        result = CommitBuilder(self, parents, config, timestamp, timezone,
 
145
                              committer, revprops, revision_id)
161
146
        self.start_write_group()
162
147
        return result
163
148
 
 
149
    @needs_read_lock
 
150
    def get_revisions(self, revision_ids):
 
151
        revs = self._get_revisions(revision_ids)
 
152
        return revs
 
153
 
164
154
    def _inventory_add_lines(self, revision_id, parents, lines,
165
 
                             check_content=True):
 
155
        check_content=True):
166
156
        """Store lines in inv_vf and return the sha1 of the inventory."""
167
157
        present_parents = self.get_graph().get_parent_map(parents)
168
158
        final_parents = []
170
160
            if parent in present_parents:
171
161
                final_parents.append((parent,))
172
162
        return self.inventories.add_lines((revision_id,), final_parents, lines,
173
 
                                          check_content=check_content)[0]
 
163
            check_content=check_content)[0]
174
164
 
175
165
    def is_shared(self):
176
166
        """AllInOne repositories cannot be shared."""
177
167
        return False
178
168
 
 
169
    @needs_write_lock
179
170
    def set_make_working_trees(self, new_value):
180
171
        """Set the policy flag for making working trees when creating branches.
181
172
 
185
176
        :param new_value: True to restore the default, False to disable making
186
177
                          working trees.
187
178
        """
188
 
        raise errors.RepositoryUpgradeRequired(self.user_url)
 
179
        raise errors.RepositoryUpgradeRequired(self.bzrdir.root_transport.base)
189
180
 
190
181
    def make_working_trees(self):
191
182
        """Returns the policy for making working trees on new branches."""
192
183
        return True
193
184
 
 
185
    def revision_graph_can_have_wrong_parents(self):
 
186
        # XXX: This is an old format that we don't support full checking on, so
 
187
        # just claim that checking for this inconsistency is not required.
 
188
        return False
 
189
 
194
190
 
195
191
class WeaveMetaDirRepository(MetaDirVersionedFileRepository):
196
192
    """A subclass of MetaDirRepository to set weave specific policy."""
197
193
 
198
 
    def __init__(self, _format, a_controldir, control_files):
199
 
        super(WeaveMetaDirRepository, self).__init__(
200
 
            _format, a_controldir, control_files)
 
194
    def __init__(self, _format, a_bzrdir, control_files):
 
195
        super(WeaveMetaDirRepository, self).__init__(_format, a_bzrdir, control_files)
201
196
        self._serializer = _format._serializer
202
197
 
 
198
    @needs_read_lock
203
199
    def _all_possible_ids(self):
204
200
        """Return all the possible revisions that we could find."""
205
201
        if 'evil' in debug.debug_flags:
206
 
            trace.mutter_callsite(
207
 
                3, "_all_possible_ids scales with size of history.")
208
 
        with self.lock_read():
209
 
            return [key[-1] for key in self.inventories.keys()]
 
202
            mutter_callsite(3, "_all_possible_ids scales with size of history.")
 
203
        return [key[-1] for key in self.inventories.keys()]
210
204
 
 
205
    @needs_read_lock
211
206
    def _all_revision_ids(self):
212
207
        """Returns a list of all the revision ids in the repository.
213
208
 
215
210
        present: for weaves ghosts may lead to a lack of correctness until
216
211
        the reweave updates the parents list.
217
212
        """
218
 
        with self.lock_read():
219
 
            return [key[-1] for key in self.revisions.keys()]
 
213
        return [key[-1] for key in self.revisions.keys()]
220
214
 
221
215
    def _activate_new_inventory(self):
222
216
        """Put a replacement inventory.new into use as inventories."""
238
232
 
239
233
    def get_commit_builder(self, branch, parents, config, timestamp=None,
240
234
                           timezone=None, committer=None, revprops=None,
241
 
                           revision_id=None, lossy=False):
 
235
                           revision_id=None):
242
236
        self._check_ascii_revisionid(revision_id, self.get_commit_builder)
243
 
        result = VersionedFileCommitBuilder(self, parents, config, timestamp,
244
 
                                            timezone, committer, revprops, revision_id, lossy=lossy)
 
237
        result = CommitBuilder(self, parents, config, timestamp, timezone,
 
238
                              committer, revprops, revision_id)
245
239
        self.start_write_group()
246
240
        return result
247
241
 
 
242
    @needs_read_lock
248
243
    def get_revision(self, revision_id):
249
244
        """Return the Revision object for a named revision"""
250
 
        with self.lock_read():
251
 
            return self.get_revision_reconcile(revision_id)
 
245
        r = self.get_revision_reconcile(revision_id)
 
246
        return r
252
247
 
253
248
    def _inventory_add_lines(self, revision_id, parents, lines,
254
 
                             check_content=True):
 
249
        check_content=True):
255
250
        """Store lines in inv_vf and return the sha1 of the inventory."""
256
251
        present_parents = self.get_graph().get_parent_map(parents)
257
252
        final_parents = []
259
254
            if parent in present_parents:
260
255
                final_parents.append((parent,))
261
256
        return self.inventories.add_lines((revision_id,), final_parents, lines,
262
 
                                          check_content=check_content)[0]
263
 
 
264
 
 
265
 
class PreSplitOutRepositoryFormat(VersionedFileRepositoryFormat):
 
257
            check_content=check_content)[0]
 
258
 
 
259
    def revision_graph_can_have_wrong_parents(self):
 
260
        return False
 
261
 
 
262
 
 
263
class PreSplitOutRepositoryFormat(RepositoryFormat):
266
264
    """Base class for the pre split out repository formats."""
267
265
 
268
266
    rich_root_data = False
270
268
    supports_ghosts = False
271
269
    supports_external_lookups = False
272
270
    supports_chks = False
273
 
    supports_nesting_repositories = True
274
271
    _fetch_order = 'topological'
275
272
    _fetch_reconcile = True
276
273
    fast_deltas = False
277
 
    supports_leaving_lock = False
278
 
    supports_overriding_transport = False
279
 
    # XXX: This is an old format that we don't support full checking on, so
280
 
    # just claim that checking for this inconsistency is not required.
281
 
    revision_graph_can_have_wrong_parents = False
282
274
 
283
 
    def initialize(self, a_controldir, shared=False, _internal=False):
 
275
    def initialize(self, a_bzrdir, shared=False, _internal=False):
284
276
        """Create a weave repository."""
285
277
        if shared:
286
 
            raise errors.IncompatibleFormat(self, a_controldir._format)
 
278
            raise errors.IncompatibleFormat(self, a_bzrdir._format)
287
279
 
288
280
        if not _internal:
289
281
            # always initialized when the bzrdir is.
290
 
            return self.open(a_controldir, _found=True)
 
282
            return self.open(a_bzrdir, _found=True)
291
283
 
292
284
        # Create an empty weave
293
 
        sio = BytesIO()
 
285
        sio = StringIO()
294
286
        weavefile.write_weave_v5(weave.Weave(), sio)
295
287
        empty_weave = sio.getvalue()
296
288
 
297
 
        trace.mutter('creating repository in %s.', a_controldir.transport.base)
 
289
        mutter('creating repository in %s.', a_bzrdir.transport.base)
298
290
 
299
291
        # FIXME: RBC 20060125 don't peek under the covers
300
292
        # NB: no need to escape relative paths that are url safe.
301
 
        control_files = lockable_files.LockableFiles(a_controldir.transport,
302
 
                                                     'branch-lock', lockable_files.TransportLock)
 
293
        control_files = lockable_files.LockableFiles(a_bzrdir.transport,
 
294
            'branch-lock', lockable_files.TransportLock)
303
295
        control_files.create_lock()
304
296
        control_files.lock_write()
305
 
        transport = a_controldir.transport
 
297
        transport = a_bzrdir.transport
306
298
        try:
307
 
            transport.mkdir('revision-store',
308
 
                            mode=a_controldir._get_dir_mode())
309
 
            transport.mkdir('weaves', mode=a_controldir._get_dir_mode())
 
299
            transport.mkdir_multi(['revision-store', 'weaves'],
 
300
                mode=a_bzrdir._get_dir_mode())
310
301
            transport.put_bytes_non_atomic('inventory.weave', empty_weave,
311
 
                                           mode=a_controldir._get_file_mode())
 
302
                mode=a_bzrdir._get_file_mode())
312
303
        finally:
313
304
            control_files.unlock()
314
 
        repository = self.open(a_controldir, _found=True)
315
 
        self._run_post_repo_init_hooks(repository, a_controldir, shared)
316
 
        return repository
 
305
        return self.open(a_bzrdir, _found=True)
317
306
 
318
 
    def open(self, a_controldir, _found=False):
 
307
    def open(self, a_bzrdir, _found=False):
319
308
        """See RepositoryFormat.open()."""
320
309
        if not _found:
321
310
            # we are being called directly and must probe.
322
311
            raise NotImplementedError
323
312
 
324
 
        repo_transport = a_controldir.get_repository_transport(None)
325
 
        result = AllInOneRepository(_format=self, a_controldir=a_controldir)
 
313
        repo_transport = a_bzrdir.get_repository_transport(None)
 
314
        control_files = a_bzrdir._control_files
 
315
        result = AllInOneRepository(_format=self, a_bzrdir=a_bzrdir)
326
316
        result.revisions = self._get_revisions(repo_transport, result)
327
317
        result.signatures = self._get_signatures(repo_transport, result)
328
318
        result.inventories = self._get_inventories(repo_transport, result)
330
320
        result.chk_bytes = None
331
321
        return result
332
322
 
333
 
    def is_deprecated(self):
334
 
        return True
335
 
 
336
323
 
337
324
class RepositoryFormat4(PreSplitOutRepositoryFormat):
338
325
    """Bzr repository format 4.
346
333
    has been removed.
347
334
    """
348
335
 
349
 
    supports_funky_characters = False
350
 
 
351
 
    _matchingcontroldir = weave_bzrdir.BzrDirFormat4()
 
336
    _matchingbzrdir = bzrdir.BzrDirFormat4()
352
337
 
353
338
    def get_format_description(self):
354
339
        """See RepositoryFormat.get_format_description()."""
372
357
        return None
373
358
 
374
359
    def _get_revisions(self, repo_transport, repo):
375
 
        from .xml4 import serializer_v4
 
360
        from bzrlib.xml4 import serializer_v4
376
361
        return RevisionTextStore(repo_transport.clone('revision-store'),
377
 
                                 serializer_v4, True, versionedfile.PrefixMapper(),
378
 
                                 repo.is_locked, repo.is_write_locked)
 
362
            serializer_v4, True, versionedfile.PrefixMapper(),
 
363
            repo.is_locked, repo.is_write_locked)
379
364
 
380
365
    def _get_signatures(self, repo_transport, repo):
381
366
        return SignatureTextStore(repo_transport.clone('revision-store'),
382
 
                                  False, versionedfile.PrefixMapper(),
383
 
                                  repo.is_locked, repo.is_write_locked)
 
367
            False, versionedfile.PrefixMapper(),
 
368
            repo.is_locked, repo.is_write_locked)
384
369
 
385
370
    def _get_texts(self, repo_transport, repo):
386
371
        return None
396
381
    """
397
382
 
398
383
    _versionedfile_class = weave.WeaveFile
399
 
    _matchingcontroldir = weave_bzrdir.BzrDirFormat5()
400
 
    supports_funky_characters = False
401
 
 
 
384
    _matchingbzrdir = bzrdir.BzrDirFormat5()
402
385
    @property
403
386
    def _serializer(self):
404
387
        return xml5.serializer_v5
409
392
 
410
393
    def network_name(self):
411
394
        """The network name for this format is the control dirs disk label."""
412
 
        return self._matchingcontroldir.get_format_string()
 
395
        return self._matchingbzrdir.get_format_string()
413
396
 
414
397
    def _get_inventories(self, repo_transport, repo, name='inventory'):
415
398
        mapper = versionedfile.ConstantMapper(name)
416
399
        return versionedfile.ThunkedVersionedFiles(repo_transport,
417
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
400
            weave.WeaveFile, mapper, repo.is_locked)
418
401
 
419
402
    def _get_revisions(self, repo_transport, repo):
420
403
        return RevisionTextStore(repo_transport.clone('revision-store'),
421
 
                                 xml5.serializer_v5, False, versionedfile.PrefixMapper(),
422
 
                                 repo.is_locked, repo.is_write_locked)
 
404
            xml5.serializer_v5, False, versionedfile.PrefixMapper(),
 
405
            repo.is_locked, repo.is_write_locked)
423
406
 
424
407
    def _get_signatures(self, repo_transport, repo):
425
408
        return SignatureTextStore(repo_transport.clone('revision-store'),
426
 
                                  False, versionedfile.PrefixMapper(),
427
 
                                  repo.is_locked, repo.is_write_locked)
 
409
            False, versionedfile.PrefixMapper(),
 
410
            repo.is_locked, repo.is_write_locked)
428
411
 
429
412
    def _get_texts(self, repo_transport, repo):
430
413
        mapper = versionedfile.PrefixMapper()
431
414
        base_transport = repo_transport.clone('weaves')
432
415
        return versionedfile.ThunkedVersionedFiles(base_transport,
433
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
416
            weave.WeaveFile, mapper, repo.is_locked)
434
417
 
435
418
 
436
419
class RepositoryFormat6(PreSplitOutRepositoryFormat):
443
426
    """
444
427
 
445
428
    _versionedfile_class = weave.WeaveFile
446
 
    _matchingcontroldir = weave_bzrdir.BzrDirFormat6()
447
 
    supports_funky_characters = False
448
 
 
 
429
    _matchingbzrdir = bzrdir.BzrDirFormat6()
449
430
    @property
450
431
    def _serializer(self):
451
432
        return xml5.serializer_v5
456
437
 
457
438
    def network_name(self):
458
439
        """The network name for this format is the control dirs disk label."""
459
 
        return self._matchingcontroldir.get_format_string()
 
440
        return self._matchingbzrdir.get_format_string()
460
441
 
461
442
    def _get_inventories(self, repo_transport, repo, name='inventory'):
462
443
        mapper = versionedfile.ConstantMapper(name)
463
444
        return versionedfile.ThunkedVersionedFiles(repo_transport,
464
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
445
            weave.WeaveFile, mapper, repo.is_locked)
465
446
 
466
447
    def _get_revisions(self, repo_transport, repo):
467
448
        return RevisionTextStore(repo_transport.clone('revision-store'),
468
 
                                 xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
469
 
                                 repo.is_locked, repo.is_write_locked)
 
449
            xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
 
450
            repo.is_locked, repo.is_write_locked)
470
451
 
471
452
    def _get_signatures(self, repo_transport, repo):
472
453
        return SignatureTextStore(repo_transport.clone('revision-store'),
473
 
                                  False, versionedfile.HashPrefixMapper(),
474
 
                                  repo.is_locked, repo.is_write_locked)
 
454
            False, versionedfile.HashPrefixMapper(),
 
455
            repo.is_locked, repo.is_write_locked)
475
456
 
476
457
    def _get_texts(self, repo_transport, repo):
477
458
        mapper = versionedfile.HashPrefixMapper()
478
459
        base_transport = repo_transport.clone('weaves')
479
460
        return versionedfile.ThunkedVersionedFiles(base_transport,
480
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
481
 
 
482
 
 
483
 
class RepositoryFormat7(MetaDirVersionedFileRepositoryFormat):
 
461
            weave.WeaveFile, mapper, repo.is_locked)
 
462
 
 
463
 
 
464
class RepositoryFormat7(MetaDirRepositoryFormat):
484
465
    """Bzr repository 7.
485
466
 
486
467
    This repository format has:
495
476
    _versionedfile_class = weave.WeaveFile
496
477
    supports_ghosts = False
497
478
    supports_chks = False
498
 
    supports_funky_characters = False
499
 
    revision_graph_can_have_wrong_parents = False
500
479
 
501
480
    _fetch_order = 'topological'
502
481
    _fetch_reconcile = True
503
482
    fast_deltas = False
504
 
 
505
483
    @property
506
484
    def _serializer(self):
507
485
        return xml5.serializer_v5
508
486
 
509
 
    @classmethod
510
 
    def get_format_string(cls):
 
487
    def get_format_string(self):
511
488
        """See RepositoryFormat.get_format_string()."""
512
 
        return b"Bazaar-NG Repository format 7"
 
489
        return "Bazaar-NG Repository format 7"
513
490
 
514
491
    def get_format_description(self):
515
492
        """See RepositoryFormat.get_format_description()."""
518
495
    def _get_inventories(self, repo_transport, repo, name='inventory'):
519
496
        mapper = versionedfile.ConstantMapper(name)
520
497
        return versionedfile.ThunkedVersionedFiles(repo_transport,
521
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
498
            weave.WeaveFile, mapper, repo.is_locked)
522
499
 
523
500
    def _get_revisions(self, repo_transport, repo):
524
501
        return RevisionTextStore(repo_transport.clone('revision-store'),
525
 
                                 xml5.serializer_v5, True, versionedfile.HashPrefixMapper(),
526
 
                                 repo.is_locked, repo.is_write_locked)
 
502
            xml5.serializer_v5, True, versionedfile.HashPrefixMapper(),
 
503
            repo.is_locked, repo.is_write_locked)
527
504
 
528
505
    def _get_signatures(self, repo_transport, repo):
529
506
        return SignatureTextStore(repo_transport.clone('revision-store'),
530
 
                                  True, versionedfile.HashPrefixMapper(),
531
 
                                  repo.is_locked, repo.is_write_locked)
 
507
            True, versionedfile.HashPrefixMapper(),
 
508
            repo.is_locked, repo.is_write_locked)
532
509
 
533
510
    def _get_texts(self, repo_transport, repo):
534
511
        mapper = versionedfile.HashPrefixMapper()
535
512
        base_transport = repo_transport.clone('weaves')
536
513
        return versionedfile.ThunkedVersionedFiles(base_transport,
537
 
                                                   weave.WeaveFile, mapper, repo.is_locked)
 
514
            weave.WeaveFile, mapper, repo.is_locked)
538
515
 
539
 
    def initialize(self, a_controldir, shared=False):
 
516
    def initialize(self, a_bzrdir, shared=False):
540
517
        """Create a weave repository.
541
518
 
542
519
        :param shared: If true the repository will be initialized as a shared
543
520
                       repository.
544
521
        """
545
522
        # Create an empty weave
546
 
        sio = BytesIO()
 
523
        sio = StringIO()
547
524
        weavefile.write_weave_v5(weave.Weave(), sio)
548
525
        empty_weave = sio.getvalue()
549
526
 
550
 
        trace.mutter('creating repository in %s.', a_controldir.transport.base)
 
527
        mutter('creating repository in %s.', a_bzrdir.transport.base)
551
528
        dirs = ['revision-store', 'weaves']
552
 
        files = [('inventory.weave', BytesIO(empty_weave)),
 
529
        files = [('inventory.weave', StringIO(empty_weave)),
553
530
                 ]
554
531
        utf8_files = [('format', self.get_format_string())]
555
532
 
556
 
        self._upload_blank_content(
557
 
            a_controldir, dirs, files, utf8_files, shared)
558
 
        return self.open(a_controldir=a_controldir, _found=True)
 
533
        self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
 
534
        return self.open(a_bzrdir=a_bzrdir, _found=True)
559
535
 
560
 
    def open(self, a_controldir, _found=False, _override_transport=None):
 
536
    def open(self, a_bzrdir, _found=False, _override_transport=None):
561
537
        """See RepositoryFormat.open().
562
538
 
563
539
        :param _override_transport: INTERNAL USE ONLY. Allows opening the
565
541
                                    than normal. I.e. during 'upgrade'.
566
542
        """
567
543
        if not _found:
568
 
            format = RepositoryFormatMetaDir.find_format(a_controldir)
 
544
            format = RepositoryFormat.find_format(a_bzrdir)
569
545
        if _override_transport is not None:
570
546
            repo_transport = _override_transport
571
547
        else:
572
 
            repo_transport = a_controldir.get_repository_transport(None)
 
548
            repo_transport = a_bzrdir.get_repository_transport(None)
573
549
        control_files = lockable_files.LockableFiles(repo_transport,
574
 
                                                     'lock', lockdir.LockDir)
575
 
        result = WeaveMetaDirRepository(_format=self, a_controldir=a_controldir,
576
 
                                        control_files=control_files)
 
550
                                'lock', lockdir.LockDir)
 
551
        result = WeaveMetaDirRepository(_format=self, a_bzrdir=a_bzrdir,
 
552
            control_files=control_files)
577
553
        result.revisions = self._get_revisions(repo_transport, result)
578
554
        result.signatures = self._get_signatures(repo_transport, result)
579
555
        result.inventories = self._get_inventories(repo_transport, result)
582
558
        result._transport = repo_transport
583
559
        return result
584
560
 
585
 
    def is_deprecated(self):
586
 
        return True
587
 
 
588
561
 
589
562
class TextVersionedFiles(VersionedFiles):
590
563
    """Just-a-bunch-of-files based VersionedFile stores."""
606
579
            raise errors.ObjectNotLocked(self)
607
580
        if not self._can_write():
608
581
            raise errors.ReadOnlyError(self)
609
 
        if b'/' in key[-1]:
 
582
        if '/' in key[-1]:
610
583
            raise ValueError('bad idea to put / in %r' % (key,))
611
 
        chunks = lines
 
584
        text = ''.join(lines)
612
585
        if self._compressed:
613
 
            chunks = tuned_gzip.chunks_to_gzip(chunks)
 
586
            text = bytes_to_gzip(text)
614
587
        path = self._map(key)
615
 
        self._transport.put_file_non_atomic(
616
 
            path, BytesIO(b''.join(chunks)),
617
 
            create_parent_dir=True)
 
588
        self._transport.put_bytes_non_atomic(path, text, create_parent_dir=True)
618
589
 
619
590
    def insert_record_stream(self, stream):
620
591
        adapters = {}
623
594
            if record.storage_kind == 'absent':
624
595
                raise errors.RevisionNotPresent([record.key[0]], self)
625
596
            # adapt to non-tuple interface
626
 
            if record.storage_kind in ('fulltext', 'chunks', 'lines'):
 
597
            if record.storage_kind == 'fulltext':
627
598
                self.add_lines(record.key, None,
628
 
                               record.get_bytes_as('lines'))
 
599
                    osutils.split_lines(record.get_bytes_as('fulltext')))
629
600
            else:
630
 
                adapter_key = record.storage_kind, 'lines'
 
601
                adapter_key = record.storage_kind, 'fulltext'
631
602
                try:
632
603
                    adapter = adapters[adapter_key]
633
604
                except KeyError:
634
605
                    adapter_factory = adapter_registry.get(adapter_key)
635
606
                    adapter = adapter_factory(self)
636
607
                    adapters[adapter_key] = adapter
637
 
                lines = adapter.get_bytes(
638
 
                    record, record.get_bytes_as(record.storage_kind))
 
608
                lines = osutils.split_lines(adapter.get_bytes(
 
609
                    record, record.get_bytes_as(record.storage_kind)))
639
610
                try:
640
611
                    self.add_lines(record.key, None, lines)
641
 
                except errors.RevisionAlreadyPresent:
 
612
                except RevisionAlreadyPresent:
642
613
                    pass
643
614
 
644
615
    def _load_text(self, key):
660
631
            else:
661
632
                return None
662
633
        if compressed:
663
 
            text = gzip.GzipFile(mode='rb', fileobj=BytesIO(text)).read()
 
634
            text = GzipFile(mode='rb', fileobj=StringIO(text)).read()
664
635
        return text
665
636
 
666
637
    def _map(self, key):
671
642
    """Legacy thunk for format 4 repositories."""
672
643
 
673
644
    def __init__(self, transport, serializer, compressed, mapper, is_locked,
674
 
                 can_write):
 
645
        can_write):
675
646
        """Create a RevisionTextStore at transport with serializer."""
676
647
        TextVersionedFiles.__init__(self, transport, compressed, mapper,
677
 
                                    is_locked, can_write)
 
648
            is_locked, can_write)
678
649
        self._serializer = serializer
679
650
 
680
651
    def _load_text_parents(self, key):
713
684
            raise errors.ObjectNotLocked(self)
714
685
        relpaths = set()
715
686
        for quoted_relpath in self._transport.iter_files_recursive():
716
 
            relpath = urlutils.unquote(quoted_relpath)
 
687
            relpath = urllib.unquote(quoted_relpath)
717
688
            path, ext = os.path.splitext(relpath)
718
689
            if ext == '.gz':
719
690
                relpath = path
720
 
            if not relpath.endswith('.sig'):
 
691
            if '.sig' not in relpath:
721
692
                relpaths.add(relpath)
722
693
        paths = list(relpaths)
723
 
        return {self._mapper.unmap(path) for path in paths}
 
694
        return set([self._mapper.unmap(path) for path in paths])
724
695
 
725
696
 
726
697
class SignatureTextStore(TextVersionedFiles):
728
699
 
729
700
    def __init__(self, transport, compressed, mapper, is_locked, can_write):
730
701
        TextVersionedFiles.__init__(self, transport, compressed, mapper,
731
 
                                    is_locked, can_write)
 
702
            is_locked, can_write)
732
703
        self._ext = '.sig' + self._ext
733
704
 
734
705
    def get_parent_map(self, keys):
753
724
            raise errors.ObjectNotLocked(self)
754
725
        relpaths = set()
755
726
        for quoted_relpath in self._transport.iter_files_recursive():
756
 
            relpath = urlutils.unquote(quoted_relpath)
 
727
            relpath = urllib.unquote(quoted_relpath)
757
728
            path, ext = os.path.splitext(relpath)
758
729
            if ext == '.gz':
759
730
                relpath = path
761
732
                continue
762
733
            relpaths.add(relpath[:-4])
763
734
        paths = list(relpaths)
764
 
        return {self._mapper.unmap(path) for path in paths}
765
 
 
766
 
 
767
 
class InterWeaveRepo(InterSameDataRepository):
768
 
    """Optimised code paths between Weave based repositories.
769
 
    """
770
 
 
771
 
    @classmethod
772
 
    def _get_repo_format_to_test(self):
773
 
        return RepositoryFormat7()
774
 
 
775
 
    @staticmethod
776
 
    def is_compatible(source, target):
777
 
        """Be compatible with known Weave formats.
778
 
 
779
 
        We don't test for the stores being of specific types because that
780
 
        could lead to confusing results, and there is no need to be
781
 
        overly general.
782
 
        """
783
 
        try:
784
 
            return (isinstance(source._format, (RepositoryFormat5,
785
 
                                                RepositoryFormat6,
786
 
                                                RepositoryFormat7))
787
 
                    and isinstance(target._format, (RepositoryFormat5,
788
 
                                                    RepositoryFormat6,
789
 
                                                    RepositoryFormat7)))
790
 
        except AttributeError:
791
 
            return False
792
 
 
793
 
    def copy_content(self, revision_id=None):
794
 
        """See InterRepository.copy_content()."""
795
 
        with self.lock_write():
796
 
            # weave specific optimised path:
797
 
            try:
798
 
                self.target.set_make_working_trees(
799
 
                    self.source.make_working_trees())
800
 
            except (errors.RepositoryUpgradeRequired, NotImplementedError):
801
 
                pass
802
 
            # FIXME do not peek!
803
 
            if self.source._transport.listable():
804
 
                with ui.ui_factory.nested_progress_bar() as pb:
805
 
                    self.target.texts.insert_record_stream(
806
 
                        self.source.texts.get_record_stream(
807
 
                            self.source.texts.keys(), 'topological', False))
808
 
                    pb.update('Copying inventory', 0, 1)
809
 
                    self.target.inventories.insert_record_stream(
810
 
                        self.source.inventories.get_record_stream(
811
 
                            self.source.inventories.keys(), 'topological', False))
812
 
                    self.target.signatures.insert_record_stream(
813
 
                        self.source.signatures.get_record_stream(
814
 
                            self.source.signatures.keys(),
815
 
                            'unordered', True))
816
 
                    self.target.revisions.insert_record_stream(
817
 
                        self.source.revisions.get_record_stream(
818
 
                            self.source.revisions.keys(),
819
 
                            'topological', True))
820
 
            else:
821
 
                self.target.fetch(self.source, revision_id=revision_id)
822
 
 
823
 
    def search_missing_revision_ids(self, find_ghosts=True, revision_ids=None,
824
 
                                    if_present_ids=None, limit=None):
825
 
        """See InterRepository.search_missing_revision_ids()."""
826
 
        with self.lock_read():
827
 
            # we want all revisions to satisfy revision_id in source.
828
 
            # but we don't want to stat every file here and there.
829
 
            # we want then, all revisions other needs to satisfy revision_id
830
 
            # checked, but not those that we have locally.
831
 
            # so the first thing is to get a subset of the revisions to
832
 
            # satisfy revision_id in source, and then eliminate those that
833
 
            # we do already have.
834
 
            # this is slow on high latency connection to self, but as this
835
 
            # disk format scales terribly for push anyway due to rewriting
836
 
            # inventory.weave, this is considered acceptable.
837
 
            # - RBC 20060209
838
 
            source_ids_set = self._present_source_revisions_for(
839
 
                revision_ids, if_present_ids)
840
 
            # source_ids is the worst possible case we may need to pull.
841
 
            # now we want to filter source_ids against what we actually
842
 
            # have in target, but don't try to check for existence where we
843
 
            # know we do not have a revision as that would be pointless.
844
 
            target_ids = set(self.target._all_possible_ids())
845
 
            possibly_present_revisions = target_ids.intersection(
846
 
                source_ids_set)
847
 
            actually_present_revisions = set(
848
 
                self.target._eliminate_revisions_not_present(
849
 
                    possibly_present_revisions))
850
 
            required_revisions = source_ids_set.difference(
851
 
                actually_present_revisions)
852
 
            if revision_ids is not None:
853
 
                # we used get_ancestry to determine source_ids then we are
854
 
                # assured all revisions referenced are present as they are
855
 
                # installed in topological order. and the tip revision was
856
 
                # validated by get_ancestry.
857
 
                result_set = required_revisions
858
 
            else:
859
 
                # if we just grabbed the possibly available ids, then
860
 
                # we only have an estimate of whats available and need to
861
 
                # validate that against the revision records.
862
 
                result_set = set(
863
 
                    self.source._eliminate_revisions_not_present(
864
 
                        required_revisions))
865
 
            if limit is not None:
866
 
                topo_ordered = self.source.get_graph().iter_topo_order(result_set)
867
 
                result_set = set(itertools.islice(topo_ordered, limit))
868
 
            return self.source.revision_ids_to_search_result(result_set)
869
 
 
870
 
 
871
 
InterRepository.register_optimiser(InterWeaveRepo)
872
 
 
873
 
 
874
 
def get_extra_interrepo_test_combinations():
875
 
    from ...bzr import knitrepo
876
 
    return [(InterRepository, RepositoryFormat5(),
877
 
             knitrepo.RepositoryFormatKnit3())]
 
735
        return set([self._mapper.unmap(path) for path in paths])
 
736
 
 
737
_legacy_formats = [RepositoryFormat4(),
 
738
                   RepositoryFormat5(),
 
739
                   RepositoryFormat6()]