/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_fetch.py

  • Committer: Martin Pool
  • Date: 2007-10-03 08:06:44 UTC
  • mto: This revision was merged to the branch mainline in revision 2901.
  • Revision ID: mbp@sourcefrog.net-20071003080644-oivy0gkg98sex0ed
Avoid internal error tracebacks on failure to lock on readonly transport (#129701).

Add new LockFailed, which doesn't imply that we failed to get it because of
contention.  Raise this if we fail to create the pending or lock directories
because of Transport errors.

UnlockableTransport is not an internal error.

ReadOnlyLockError has a message which didn't match its name or usage; it's now
deprecated and callers are updated to use LockFailed which is more appropriate.

Add zero_ninetytwo deprecation symbol.

Unify assertMatchesRe with TestCase.assertContainsRe.

When the constructor is deprecated, just say that the class is deprecated, not
the __init__ method - this works better with applyDeprecated in tests.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005, 2007, 2010 Canonical Ltd
 
1
# Copyright (C) 2005, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
import os
18
18
import re
22
22
from bzrlib import (
23
23
    bzrdir,
24
24
    errors,
25
 
    osutils,
26
25
    merge,
27
26
    repository,
28
 
    versionedfile,
29
27
    )
30
28
from bzrlib.branch import Branch
31
29
from bzrlib.bzrdir import BzrDir
32
30
from bzrlib.repofmt import knitrepo
 
31
from bzrlib.symbol_versioning import (
 
32
    zero_ninetyone,
 
33
    )
33
34
from bzrlib.tests import TestCaseWithTransport
 
35
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
34
36
from bzrlib.tests.test_revision import make_branches
35
37
from bzrlib.trace import mutter
36
38
from bzrlib.upgrade import Convert
37
39
from bzrlib.workingtree import WorkingTree
38
40
 
39
41
# These tests are a bit old; please instead add new tests into
40
 
# per_interrepository/ so they'll run on all relevant
 
42
# interrepository_implementations/ so they'll run on all relevant
41
43
# combinations.
42
44
 
43
45
 
46
48
 
47
49
def fetch_steps(self, br_a, br_b, writable_a):
48
50
    """A foreign test method for testing fetch locally and remotely."""
49
 
 
 
51
     
50
52
    # TODO RBC 20060201 make this a repository test.
51
53
    repo_b = br_b.repository
52
54
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
53
55
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[2]))
54
56
    self.assertEquals(len(br_b.revision_history()), 7)
55
 
    br_b.fetch(br_a, br_a.revision_history()[2])
 
57
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[2])[0], 0)
56
58
    # branch.fetch is not supposed to alter the revision history
57
59
    self.assertEquals(len(br_b.revision_history()), 7)
58
60
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
59
61
 
60
62
    # fetching the next revision up in sample data copies one revision
61
 
    br_b.fetch(br_a, br_a.revision_history()[3])
 
63
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[3])[0], 1)
62
64
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[3]))
63
65
    self.assertFalse(has_revision(br_a, br_b.revision_history()[6]))
64
66
    self.assertTrue(br_a.repository.has_revision(br_b.revision_history()[5]))
66
68
    # When a non-branch ancestor is missing, it should be unlisted...
67
69
    # as its not reference from the inventory weave.
68
70
    br_b4 = self.make_branch('br_4')
69
 
    br_b4.fetch(br_b)
 
71
    count, failures = br_b4.fetch(br_b)
 
72
    self.assertEqual(count, 7)
 
73
    self.assertEqual(failures, [])
70
74
 
71
 
    writable_a.fetch(br_b)
 
75
    self.assertEqual(writable_a.fetch(br_b)[0], 1)
72
76
    self.assertTrue(has_revision(br_a, br_b.revision_history()[3]))
73
77
    self.assertTrue(has_revision(br_a, br_b.revision_history()[4]))
74
 
 
 
78
        
75
79
    br_b2 = self.make_branch('br_b2')
76
 
    br_b2.fetch(br_b)
 
80
    self.assertEquals(br_b2.fetch(br_b)[0], 7)
77
81
    self.assertTrue(has_revision(br_b2, br_b.revision_history()[4]))
78
82
    self.assertTrue(has_revision(br_b2, br_a.revision_history()[2]))
79
83
    self.assertFalse(has_revision(br_b2, br_a.revision_history()[3]))
80
84
 
81
85
    br_a2 = self.make_branch('br_a2')
82
 
    br_a2.fetch(br_a)
 
86
    self.assertEquals(br_a2.fetch(br_a)[0], 9)
83
87
    self.assertTrue(has_revision(br_a2, br_b.revision_history()[4]))
84
88
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[3]))
85
89
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[2]))
86
90
 
87
91
    br_a3 = self.make_branch('br_a3')
88
 
    # pulling a branch with no revisions grabs nothing, regardless of
 
92
    # pulling a branch with no revisions grabs nothing, regardless of 
89
93
    # whats in the inventory.
90
 
    br_a3.fetch(br_a2)
 
94
    self.assertEquals(br_a3.fetch(br_a2)[0], 0)
91
95
    for revno in range(4):
92
96
        self.assertFalse(
93
97
            br_a3.repository.has_revision(br_a.revision_history()[revno]))
94
 
    br_a3.fetch(br_a2, br_a.revision_history()[2])
 
98
    self.assertEqual(br_a3.fetch(br_a2, br_a.revision_history()[2])[0], 3)
95
99
    # pull the 3 revisions introduced by a@u-0-3
96
 
    br_a3.fetch(br_a2, br_a.revision_history()[3])
97
 
    # NoSuchRevision should be raised if the branch is missing the revision
 
100
    fetched = br_a3.fetch(br_a2, br_a.revision_history()[3])[0]
 
101
    self.assertEquals(fetched, 3, "fetched %d instead of 3" % fetched)
 
102
    # InstallFailed should be raised if the branch is missing the revision
98
103
    # that was requested.
99
 
    self.assertRaises(errors.NoSuchRevision, br_a3.fetch, br_a2, 'pizza')
 
104
    self.assertRaises(errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
100
105
 
101
106
    # TODO: Test trying to fetch from a branch that points to a revision not
102
107
    # actually present in its repository.  Not every branch format allows you
105
110
    # every branch supports that.  -- mbp 20070814
106
111
 
107
112
    #TODO: test that fetch correctly does reweaving when needed. RBC 20051008
108
 
    # Note that this means - updating the weave when ghosts are filled in to
 
113
    # Note that this means - updating the weave when ghosts are filled in to 
109
114
    # add the right parents.
110
115
 
111
116
 
118
123
 
119
124
    def test_fetch_self(self):
120
125
        wt = self.make_branch_and_tree('br')
121
 
        wt.branch.fetch(wt.branch)
 
126
        self.assertEqual(wt.branch.fetch(wt.branch), (0, []))
122
127
 
123
128
    def test_fetch_root_knit(self):
124
129
        """Ensure that knit2.fetch() updates the root knit
125
 
 
 
130
        
126
131
        This tests the case where the root has a new revision, but there are no
127
132
        corresponding filename, parent, contents or other changes.
128
133
        """
144
149
        branch = self.make_branch('branch', format=knit2_format)
145
150
        branch.pull(tree.branch, stop_revision='rev1')
146
151
        repo = branch.repository
147
 
        repo.lock_read()
148
 
        try:
149
 
            # Make sure fetch retrieved only what we requested
150
 
            self.assertEqual({('tree-root', 'rev1'):()},
151
 
                repo.texts.get_parent_map(
152
 
                    [('tree-root', 'rev1'), ('tree-root', 'rev2')]))
153
 
        finally:
154
 
            repo.unlock()
 
152
        root_knit = repo.weave_store.get_weave('tree-root',
 
153
                                                repo.get_transaction())
 
154
        # Make sure fetch retrieved only what we requested
 
155
        self.assertTrue('rev1' in root_knit)
 
156
        self.assertTrue('rev2' not in root_knit)
155
157
        branch.pull(tree.branch)
 
158
        root_knit = repo.weave_store.get_weave('tree-root',
 
159
                                                repo.get_transaction())
156
160
        # Make sure that the next revision in the root knit was retrieved,
157
161
        # even though the text, name, parent_id, etc., were unchanged.
158
 
        repo.lock_read()
159
 
        try:
160
 
            # Make sure fetch retrieved only what we requested
161
 
            self.assertEqual({('tree-root', 'rev2'):(('tree-root', 'rev1'),)},
162
 
                repo.texts.get_parent_map([('tree-root', 'rev2')]))
163
 
        finally:
164
 
            repo.unlock()
 
162
        self.assertTrue('rev2' in root_knit)
165
163
 
166
164
    def test_fetch_incompatible(self):
167
165
        knit_tree = self.make_branch_and_tree('knit', format='knit')
168
166
        knit3_tree = self.make_branch_and_tree('knit3',
169
167
            format='dirstate-with-subtree')
170
168
        knit3_tree.commit('blah')
171
 
        e = self.assertRaises(errors.IncompatibleRepositories,
172
 
                              knit_tree.branch.fetch, knit3_tree.branch)
173
 
        self.assertContainsRe(str(e),
174
 
            r"(?m).*/knit.*\nis not compatible with\n.*/knit3/.*\n"
175
 
            r"different rich-root support")
 
169
        self.assertRaises(errors.IncompatibleRepositories,
 
170
                          knit_tree.branch.fetch, knit3_tree.branch)
176
171
 
177
172
 
178
173
class TestMergeFetch(TestCaseWithTransport):
236
231
        br2 = Branch.open('br2')
237
232
        br1 = Branch.open('br1')
238
233
        wt2 = WorkingTree.open('br2').merge_from_branch(br1)
239
 
        br2.lock_read()
240
 
        self.addCleanup(br2.unlock)
241
234
        for rev_id, text in [('1-2', 'original from 1\n'),
242
235
                             ('1-3', 'agreement\n'),
243
236
                             ('2-1', 'contents in 2\n'),
247
240
                    rev_id).get_file_text('this-file-id'), text)
248
241
 
249
242
 
250
 
class TestKnitToPackFetch(TestCaseWithTransport):
251
 
 
252
 
    def find_get_record_stream(self, calls, expected_count=1):
253
 
        """In a list of calls, find the last 'get_record_stream'.
254
 
 
255
 
        :param expected_count: The number of calls we should exepect to find.
256
 
            If a different number is found, an assertion is raised.
257
 
        """
258
 
        get_record_call = None
259
 
        call_count = 0
260
 
        for call in calls:
261
 
            if call[0] == 'get_record_stream':
262
 
                call_count += 1
263
 
                get_record_call = call
264
 
        self.assertEqual(expected_count, call_count)
265
 
        return get_record_call
266
 
 
267
 
    def test_fetch_with_deltas_no_delta_closure(self):
268
 
        tree = self.make_branch_and_tree('source', format='dirstate')
269
 
        target = self.make_repository('target', format='pack-0.92')
270
 
        self.build_tree(['source/file'])
271
 
        tree.set_root_id('root-id')
272
 
        tree.add('file', 'file-id')
273
 
        tree.commit('one', rev_id='rev-one')
274
 
        source = tree.branch.repository
275
 
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
276
 
                        source.texts)
277
 
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
278
 
                        source.signatures)
279
 
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
280
 
                        source.revisions)
281
 
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
282
 
                        source.inventories)
283
 
        # precondition
284
 
        self.assertTrue(target._format._fetch_uses_deltas)
285
 
        target.fetch(source, revision_id='rev-one')
286
 
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
287
 
                          target._format._fetch_order, False),
288
 
                         self.find_get_record_stream(source.texts.calls))
289
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
290
 
          target._format._fetch_order, False),
291
 
          self.find_get_record_stream(source.inventories.calls, 2))
292
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
293
 
                          target._format._fetch_order, False),
294
 
                         self.find_get_record_stream(source.revisions.calls))
295
 
        # XXX: Signatures is special, and slightly broken. The
296
 
        # standard item_keys_introduced_by actually does a lookup for every
297
 
        # signature to see if it exists, rather than waiting to do them all at
298
 
        # once at the end. The fetch code then does an all-at-once and just
299
 
        # allows for some of them to be missing.
300
 
        # So we know there will be extra calls, but the *last* one is the one
301
 
        # we care about.
302
 
        signature_calls = source.signatures.calls[-1:]
303
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
304
 
                          target._format._fetch_order, False),
305
 
                         self.find_get_record_stream(signature_calls))
306
 
 
307
 
    def test_fetch_no_deltas_with_delta_closure(self):
308
 
        tree = self.make_branch_and_tree('source', format='dirstate')
309
 
        target = self.make_repository('target', format='pack-0.92')
310
 
        self.build_tree(['source/file'])
311
 
        tree.set_root_id('root-id')
312
 
        tree.add('file', 'file-id')
313
 
        tree.commit('one', rev_id='rev-one')
314
 
        source = tree.branch.repository
315
 
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
316
 
                        source.texts)
317
 
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
318
 
                        source.signatures)
319
 
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
320
 
                        source.revisions)
321
 
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
322
 
                        source.inventories)
323
 
        # XXX: This won't work in general, but for the dirstate format it does.
324
 
        self.overrideAttr(target._format, '_fetch_uses_deltas', False)
325
 
        target.fetch(source, revision_id='rev-one')
326
 
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
327
 
                          target._format._fetch_order, True),
328
 
                         self.find_get_record_stream(source.texts.calls))
329
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
330
 
            target._format._fetch_order, True),
331
 
            self.find_get_record_stream(source.inventories.calls, 2))
332
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
333
 
                          target._format._fetch_order, True),
334
 
                         self.find_get_record_stream(source.revisions.calls))
335
 
        # XXX: Signatures is special, and slightly broken. The
336
 
        # standard item_keys_introduced_by actually does a lookup for every
337
 
        # signature to see if it exists, rather than waiting to do them all at
338
 
        # once at the end. The fetch code then does an all-at-once and just
339
 
        # allows for some of them to be missing.
340
 
        # So we know there will be extra calls, but the *last* one is the one
341
 
        # we care about.
342
 
        signature_calls = source.signatures.calls[-1:]
343
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
344
 
                          target._format._fetch_order, True),
345
 
                         self.find_get_record_stream(signature_calls))
346
 
 
347
 
    def test_fetch_revisions_with_deltas_into_pack(self):
348
 
        # See BUG #261339, dev versions of bzr could accidentally create deltas
349
 
        # in revision texts in knit branches (when fetching from packs). So we
350
 
        # ensure that *if* a knit repository has a delta in revisions, that it
351
 
        # gets properly expanded back into a fulltext when stored in the pack
352
 
        # file.
353
 
        tree = self.make_branch_and_tree('source', format='dirstate')
354
 
        target = self.make_repository('target', format='pack-0.92')
355
 
        self.build_tree(['source/file'])
356
 
        tree.set_root_id('root-id')
357
 
        tree.add('file', 'file-id')
358
 
        tree.commit('one', rev_id='rev-one')
359
 
        # Hack the KVF for revisions so that it "accidentally" allows a delta
360
 
        tree.branch.repository.revisions._max_delta_chain = 200
361
 
        tree.commit('two', rev_id='rev-two')
362
 
        source = tree.branch.repository
363
 
        # Ensure that we stored a delta
364
 
        source.lock_read()
365
 
        self.addCleanup(source.unlock)
366
 
        record = source.revisions.get_record_stream([('rev-two',)],
367
 
            'unordered', False).next()
368
 
        self.assertEqual('knit-delta-gz', record.storage_kind)
369
 
        target.fetch(tree.branch.repository, revision_id='rev-two')
370
 
        # The record should get expanded back to a fulltext
371
 
        target.lock_read()
372
 
        self.addCleanup(target.unlock)
373
 
        record = target.revisions.get_record_stream([('rev-two',)],
374
 
            'unordered', False).next()
375
 
        self.assertEqual('knit-ft-gz', record.storage_kind)
376
 
 
377
 
    def test_fetch_with_fallback_and_merge(self):
378
 
        builder = self.make_branch_builder('source', format='pack-0.92')
379
 
        builder.start_series()
380
 
        # graph
381
 
        #   A
382
 
        #   |\
383
 
        #   B C
384
 
        #   | |
385
 
        #   | D
386
 
        #   | |
387
 
        #   | E
388
 
        #    \|
389
 
        #     F
390
 
        # A & B are present in the base (stacked-on) repository, A-E are
391
 
        # present in the source.
392
 
        # This reproduces bug #304841
393
 
        # We need a large enough inventory that total size of compressed deltas
394
 
        # is shorter than the size of a compressed fulltext. We have to use
395
 
        # random ids because otherwise the inventory fulltext compresses too
396
 
        # well and the deltas get bigger.
397
 
        to_add = [
398
 
            ('add', ('', 'TREE_ROOT', 'directory', None))]
399
 
        for i in xrange(10):
400
 
            fname = 'file%03d' % (i,)
401
 
            fileid = '%s-%s' % (fname, osutils.rand_chars(64))
402
 
            to_add.append(('add', (fname, fileid, 'file', 'content\n')))
403
 
        builder.build_snapshot('A', None, to_add)
404
 
        builder.build_snapshot('B', ['A'], [])
405
 
        builder.build_snapshot('C', ['A'], [])
406
 
        builder.build_snapshot('D', ['C'], [])
407
 
        builder.build_snapshot('E', ['D'], [])
408
 
        builder.build_snapshot('F', ['E', 'B'], [])
409
 
        builder.finish_series()
410
 
        source_branch = builder.get_branch()
411
 
        source_branch.bzrdir.sprout('base', revision_id='B')
412
 
        target_branch = self.make_branch('target', format='1.6')
413
 
        target_branch.set_stacked_on_url('../base')
414
 
        source = source_branch.repository
415
 
        source.lock_read()
416
 
        self.addCleanup(source.unlock)
417
 
        source.inventories = versionedfile.OrderingVersionedFilesDecorator(
418
 
                        source.inventories,
419
 
                        key_priority={('E',): 1, ('D',): 2, ('C',): 4,
420
 
                                      ('F',): 3})
421
 
        # Ensure that the content is yielded in the proper order, and given as
422
 
        # the expected kinds
423
 
        records = [(record.key, record.storage_kind)
424
 
                   for record in source.inventories.get_record_stream(
425
 
                        [('D',), ('C',), ('E',), ('F',)], 'unordered', False)]
426
 
        self.assertEqual([(('E',), 'knit-delta-gz'), (('D',), 'knit-delta-gz'),
427
 
                          (('F',), 'knit-delta-gz'), (('C',), 'knit-delta-gz')],
428
 
                          records)
429
 
 
430
 
        target_branch.lock_write()
431
 
        self.addCleanup(target_branch.unlock)
432
 
        target = target_branch.repository
433
 
        target.fetch(source, revision_id='F')
434
 
        # 'C' should be expanded to a fulltext, but D and E should still be
435
 
        # deltas
436
 
        stream = target.inventories.get_record_stream(
437
 
            [('C',), ('D',), ('E',), ('F',)],
438
 
            'unordered', False)
439
 
        kinds = dict((record.key, record.storage_kind) for record in stream)
440
 
        self.assertEqual({('C',): 'knit-ft-gz', ('D',): 'knit-delta-gz',
441
 
                          ('E',): 'knit-delta-gz', ('F',): 'knit-delta-gz'},
442
 
                         kinds)
443
 
 
444
 
 
445
 
class Test1To2Fetch(TestCaseWithTransport):
446
 
    """Tests for Model1To2 failure modes"""
447
 
 
448
 
    def make_tree_and_repo(self):
449
 
        self.tree = self.make_branch_and_tree('tree', format='pack-0.92')
450
 
        self.repo = self.make_repository('rich-repo', format='rich-root-pack')
451
 
        self.repo.lock_write()
452
 
        self.addCleanup(self.repo.unlock)
453
 
 
454
 
    def do_fetch_order_test(self, first, second):
455
 
        """Test that fetch works no matter what the set order of revision is.
456
 
 
457
 
        This test depends on the order of items in a set, which is
458
 
        implementation-dependant, so we test A, B and then B, A.
459
 
        """
460
 
        self.make_tree_and_repo()
461
 
        self.tree.commit('Commit 1', rev_id=first)
462
 
        self.tree.commit('Commit 2', rev_id=second)
463
 
        self.repo.fetch(self.tree.branch.repository, second)
464
 
 
465
 
    def test_fetch_order_AB(self):
466
 
        """See do_fetch_order_test"""
467
 
        self.do_fetch_order_test('A', 'B')
468
 
 
469
 
    def test_fetch_order_BA(self):
470
 
        """See do_fetch_order_test"""
471
 
        self.do_fetch_order_test('B', 'A')
472
 
 
473
 
    def get_parents(self, file_id, revision_id):
474
 
        self.repo.lock_read()
475
 
        try:
476
 
            parent_map = self.repo.texts.get_parent_map([(file_id, revision_id)])
477
 
            return parent_map[(file_id, revision_id)]
478
 
        finally:
479
 
            self.repo.unlock()
480
 
 
481
 
    def test_fetch_ghosts(self):
482
 
        self.make_tree_and_repo()
483
 
        self.tree.commit('first commit', rev_id='left-parent')
484
 
        self.tree.add_parent_tree_id('ghost-parent')
485
 
        fork = self.tree.bzrdir.sprout('fork', 'null:').open_workingtree()
486
 
        fork.commit('not a ghost', rev_id='not-ghost-parent')
487
 
        self.tree.branch.repository.fetch(fork.branch.repository,
488
 
                                     'not-ghost-parent')
489
 
        self.tree.add_parent_tree_id('not-ghost-parent')
490
 
        self.tree.commit('second commit', rev_id='second-id')
491
 
        self.repo.fetch(self.tree.branch.repository, 'second-id')
492
 
        root_id = self.tree.get_root_id()
493
 
        self.assertEqual(
494
 
            ((root_id, 'left-parent'), (root_id, 'not-ghost-parent')),
495
 
            self.get_parents(root_id, 'second-id'))
496
 
 
497
 
    def make_two_commits(self, change_root, fetch_twice):
498
 
        self.make_tree_and_repo()
499
 
        self.tree.commit('first commit', rev_id='first-id')
500
 
        if change_root:
501
 
            self.tree.set_root_id('unique-id')
502
 
        self.tree.commit('second commit', rev_id='second-id')
503
 
        if fetch_twice:
504
 
            self.repo.fetch(self.tree.branch.repository, 'first-id')
505
 
        self.repo.fetch(self.tree.branch.repository, 'second-id')
506
 
 
507
 
    def test_fetch_changed_root(self):
508
 
        self.make_two_commits(change_root=True, fetch_twice=False)
509
 
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
510
 
 
511
 
    def test_two_fetch_changed_root(self):
512
 
        self.make_two_commits(change_root=True, fetch_twice=True)
513
 
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
514
 
 
515
 
    def test_two_fetches(self):
516
 
        self.make_two_commits(change_root=False, fetch_twice=True)
517
 
        self.assertEqual((('TREE_ROOT', 'first-id'),),
518
 
            self.get_parents('TREE_ROOT', 'second-id'))
 
243
class TestHttpFetch(TestCaseWithWebserver):
 
244
    # FIXME RBC 20060124 this really isn't web specific, perhaps an
 
245
    # instrumented readonly transport? Can we do an instrumented
 
246
    # adapter and use self.get_readonly_url ?
 
247
 
 
248
    def test_fetch(self):
 
249
        #highest indices a: 5, b: 7
 
250
        br_a, br_b = make_branches(self)
 
251
        br_rem_a = Branch.open(self.get_readonly_url('branch1'))
 
252
        fetch_steps(self, br_rem_a, br_b, br_a)
 
253
 
 
254
    def _count_log_matches(self, target, logs):
 
255
        """Count the number of times the target file pattern was fetched in an http log"""
 
256
        get_succeeds_re = re.compile(
 
257
            '.*"GET .*%s HTTP/1.1" 20[06] - "-" "bzr/%s' %
 
258
            (     target,                    bzrlib.__version__))
 
259
        c = 0
 
260
        for line in logs:
 
261
            if get_succeeds_re.match(line):
 
262
                c += 1
 
263
        return c
 
264
 
 
265
    def test_weaves_are_retrieved_once(self):
 
266
        self.build_tree(("source/", "source/file", "target/"))
 
267
        wt = self.make_branch_and_tree('source')
 
268
        branch = wt.branch
 
269
        wt.add(["file"], ["id"])
 
270
        wt.commit("added file")
 
271
        print >>open("source/file", 'w'), "blah"
 
272
        wt.commit("changed file")
 
273
        target = BzrDir.create_branch_and_repo("target/")
 
274
        source = Branch.open(self.get_readonly_url("source/"))
 
275
        self.assertEqual(target.fetch(source), (2, []))
 
276
        # this is the path to the literal file. As format changes 
 
277
        # occur it needs to be updated. FIXME: ask the store for the
 
278
        # path.
 
279
        self.log("web server logs are:")
 
280
        http_logs = self.get_readonly_server().logs
 
281
        self.log('\n'.join(http_logs))
 
282
        # unfortunately this log entry is branch format specific. We could 
 
283
        # factor out the 'what files does this format use' to a method on the 
 
284
        # repository, which would let us to this generically. RBC 20060419
 
285
        self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
 
286
        self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
 
287
        self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
 
288
        # this r-h check test will prevent regressions, but it currently already 
 
289
        # passes, before the patch to cache-rh is applied :[
 
290
        self.assertTrue(1 >= self._count_log_matches('revision-history',
 
291
                                                     http_logs))
 
292
        self.assertTrue(1 >= self._count_log_matches('last-revision',
 
293
                                                     http_logs))
 
294
        # FIXME naughty poking in there.
 
295
        self.get_readonly_server().logs = []
 
296
        # check there is nothing more to fetch
 
297
        source = Branch.open(self.get_readonly_url("source/"))
 
298
        self.assertEqual(target.fetch(source), (0, []))
 
299
        # should make just two requests
 
300
        http_logs = self.get_readonly_server().logs
 
301
        self.log("web server logs are:")
 
302
        self.log('\n'.join(http_logs))
 
303
        self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
 
304
        self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
 
305
        self.assertEqual(1, self._count_log_matches('repository/format', http_logs))
 
306
        self.assertTrue(1 >= self._count_log_matches('revision-history',
 
307
                                                     http_logs))
 
308
        self.assertTrue(1 >= self._count_log_matches('last-revision',
 
309
                                                     http_logs))
 
310
        self.assertEqual(4, len(http_logs))