7
7
# it under the terms of the GNU General Public License as published by
8
8
# the Free Software Foundation; either version 2 of the License, or
9
9
# (at your option) any later version.
11
11
# This program is distributed in the hope that it will be useful,
12
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
14
# GNU General Public License for more details.
16
16
# You should have received a copy of the GNU General Public License
17
17
# along with this program; if not, write to the Free Software
18
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
# Remaing to do is to figure out if get_graph should return a simple
21
# map, or a graph object of some kind.
24
20
"""Versioned text file storage api."""
27
from copy import deepcopy
28
from unittest import TestSuite
31
import bzrlib.errors as errors
22
from bzrlib.lazy_import import lazy_import
23
lazy_import(globals(), """
33
from bzrlib.graph import Graph
34
from bzrlib.transport.memory import MemoryTransport
37
from cStringIO import StringIO
32
39
from bzrlib.inter import InterObject
40
from bzrlib.registry import Registry
33
41
from bzrlib.symbol_versioning import *
34
from bzrlib.transport.memory import MemoryTransport
35
from bzrlib.tsort import topo_sort
42
from bzrlib.textmerge import TextMerge
45
adapter_registry = Registry()
46
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'bzrlib.knit',
47
'DeltaPlainToFullText')
48
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'bzrlib.knit',
50
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'knit-delta-gz'),
51
'bzrlib.knit', 'DeltaAnnotatedToUnannotated')
52
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'fulltext'),
53
'bzrlib.knit', 'DeltaAnnotatedToFullText')
54
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'knit-ft-gz'),
55
'bzrlib.knit', 'FTAnnotatedToUnannotated')
56
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'fulltext'),
57
'bzrlib.knit', 'FTAnnotatedToFullText')
60
class ContentFactory(object):
61
"""Abstract interface for insertion and retrieval from a VersionedFile.
63
:ivar sha1: None, or the sha1 of the content fulltext.
64
:ivar storage_kind: The native storage kind of this factory. One of
65
'mpdiff', 'knit-annotated-ft', 'knit-annotated-delta', 'knit-ft',
66
'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
67
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'.
68
:ivar key: The key of this content. Each key is a tuple with a single
70
:ivar parents: A tuple of parent keys for self.key. If the object has
71
no parent information, None (as opposed to () for an empty list of
76
"""Create a ContentFactory."""
78
self.storage_kind = None
83
class AbsentContentFactory(object):
84
"""A placeholder content factory for unavailable texts.
87
:ivar storage_kind: 'absent'.
88
:ivar key: The key of this content. Each key is a tuple with a single
93
def __init__(self, key):
94
"""Create a ContentFactory."""
96
self.storage_kind = 'absent'
101
def filter_absent(record_stream):
102
"""Adapt a record stream to remove absent records."""
103
for record in record_stream:
104
if record.storage_kind != 'absent':
39
108
class VersionedFile(object):
50
119
Texts are identified by a version-id string.
53
def __init__(self, access_mode):
55
self._access_mode = access_mode
123
def check_not_reserved_id(version_id):
124
revision.check_not_reserved_id(version_id)
57
126
def copy_to(self, name, transport):
58
127
"""Copy this versioned file to name on transport."""
59
128
raise NotImplementedError(self.copy_to)
61
@deprecated_method(zero_eight)
63
"""Return a list of all the versions in this versioned file.
65
Please use versionedfile.versions() now.
130
def get_record_stream(self, versions, ordering, include_delta_closure):
131
"""Get a stream of records for versions.
133
:param versions: The versions to include. Each version is a tuple
135
:param ordering: Either 'unordered' or 'topological'. A topologically
136
sorted stream has compression parents strictly before their
138
:param include_delta_closure: If True then the closure across any
139
compression parents will be included (in the data content of the
140
stream, not in the emitted records). This guarantees that
141
'fulltext' can be used successfully on every record.
142
:return: An iterator of ContentFactory objects, each of which is only
143
valid until the iterator is advanced.
67
return self.versions()
70
"""Return a unsorted list of versions."""
71
raise NotImplementedError(self.versions)
73
def has_ghost(self, version_id):
74
"""Returns whether version is present as a ghost."""
75
raise NotImplementedError(self.has_ghost)
145
raise NotImplementedError(self.get_record_stream)
77
147
def has_version(self, version_id):
78
148
"""Returns whether version is present."""
79
149
raise NotImplementedError(self.has_version)
81
def add_delta(self, version_id, parents, delta_parent, sha1, noeol, delta):
82
"""Add a text to the versioned file via a pregenerated delta.
84
:param version_id: The version id being added.
85
:param parents: The parents of the version_id.
86
:param delta_parent: The parent this delta was created against.
87
:param sha1: The sha1 of the full text.
88
:param delta: The delta instructions. See get_delta for details.
90
self._check_write_ok()
91
if self.has_version(version_id):
92
raise errors.RevisionAlreadyPresent(version_id, self)
93
return self._add_delta(version_id, parents, delta_parent, sha1, noeol, delta)
95
def _add_delta(self, version_id, parents, delta_parent, sha1, noeol, delta):
96
"""Class specific routine to add a delta.
98
This generic version simply applies the delta to the delta_parent and
101
# strip annotation from delta
103
for start, stop, delta_len, delta_lines in delta:
104
new_delta.append((start, stop, delta_len, [text for origin, text in delta_lines]))
105
if delta_parent is not None:
106
parent_full = self.get_lines(delta_parent)
109
new_full = self._apply_delta(parent_full, new_delta)
110
# its impossible to have noeol on an empty file
111
if noeol and new_full[-1][-1] == '\n':
112
new_full[-1] = new_full[-1][:-1]
113
self.add_lines(version_id, parents, new_full)
115
def add_lines(self, version_id, parents, lines, parent_texts=None):
151
def insert_record_stream(self, stream):
152
"""Insert a record stream into this versioned file.
154
:param stream: A stream of records to insert.
156
:seealso VersionedFile.get_record_stream:
158
raise NotImplementedError
160
def add_lines(self, version_id, parents, lines, parent_texts=None,
161
left_matching_blocks=None, nostore_sha=None, random_id=False,
116
163
"""Add a single text on top of the versioned file.
118
165
Must raise RevisionAlreadyPresent if the new version is
121
168
Must raise RevisionNotPresent if any of the given parents are
122
169
not present in file history.
171
:param lines: A list of lines. Each line must be a bytestring. And all
172
of them except the last must be terminated with \n and contain no
173
other \n's. The last line may either contain no \n's or a single
174
terminated \n. If the lines list does meet this constraint the add
175
routine may error or may succeed - but you will be unable to read
176
the data back accurately. (Checking the lines have been split
177
correctly is expensive and extremely unlikely to catch bugs so it
178
is not done at runtime unless check_content is True.)
123
179
:param parent_texts: An optional dictionary containing the opaque
124
representations of some or all of the parents of
125
version_id to allow delta optimisations.
126
VERY IMPORTANT: the texts must be those returned
127
by add_lines or data corruption can be caused.
128
:return: An opaque representation of the inserted version which can be
129
provided back to future add_lines calls in the parent_texts
180
representations of some or all of the parents of version_id to
181
allow delta optimisations. VERY IMPORTANT: the texts must be those
182
returned by add_lines or data corruption can be caused.
183
:param left_matching_blocks: a hint about which areas are common
184
between the text and its left-hand-parent. The format is
185
the SequenceMatcher.get_matching_blocks format.
186
:param nostore_sha: Raise ExistingContent and do not add the lines to
187
the versioned file if the digest of the lines matches this.
188
:param random_id: If True a random id has been selected rather than
189
an id determined by some deterministic process such as a converter
190
from a foreign VCS. When True the backend may choose not to check
191
for uniqueness of the resulting key within the versioned file, so
192
this should only be done when the result is expected to be unique
194
:param check_content: If True, the lines supplied are verified to be
195
bytestrings that are correctly formed lines.
196
:return: The text sha1, the number of bytes in the text, and an opaque
197
representation of the inserted version which can be provided
198
back to future add_lines calls in the parent_texts dictionary.
132
200
self._check_write_ok()
133
return self._add_lines(version_id, parents, lines, parent_texts)
201
return self._add_lines(version_id, parents, lines, parent_texts,
202
left_matching_blocks, nostore_sha, random_id, check_content)
135
def _add_lines(self, version_id, parents, lines, parent_texts):
204
def _add_lines(self, version_id, parents, lines, parent_texts,
205
left_matching_blocks, nostore_sha, random_id, check_content):
136
206
"""Helper to do the class specific add_lines."""
137
207
raise NotImplementedError(self.add_lines)
139
209
def add_lines_with_ghosts(self, version_id, parents, lines,
210
parent_texts=None, nostore_sha=None, random_id=False,
211
check_content=True, left_matching_blocks=None):
141
212
"""Add lines to the versioned file, allowing ghosts to be present.
143
This takes the same parameters as add_lines.
214
This takes the same parameters as add_lines and returns the same.
145
216
self._check_write_ok()
146
217
return self._add_lines_with_ghosts(version_id, parents, lines,
218
parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
149
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts):
220
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
221
nostore_sha, random_id, check_content, left_matching_blocks):
150
222
"""Helper to do class specific add_lines_with_ghosts."""
151
223
raise NotImplementedError(self.add_lines_with_ghosts)
154
226
"""Check the versioned file for integrity."""
155
227
raise NotImplementedError(self.check)
157
def _check_write_ok(self):
158
"""Is the versioned file marked as 'finished' ? Raise if it is."""
160
raise errors.OutSideTransaction()
161
if self._access_mode != 'w':
162
raise errors.ReadOnlyObjectDirtiedError(self)
164
def clear_cache(self):
165
"""Remove any data cached in the versioned file object."""
167
def clone_text(self, new_version_id, old_version_id, parents):
168
"""Add an identical text to old_version_id as new_version_id.
170
Must raise RevisionNotPresent if the old version or any of the
171
parents are not present in file history.
173
Must raise RevisionAlreadyPresent if the new version is
174
already present in file history."""
175
self._check_write_ok()
176
return self._clone_text(new_version_id, old_version_id, parents)
178
def _clone_text(self, new_version_id, old_version_id, parents):
179
"""Helper function to do the _clone_text work."""
180
raise NotImplementedError(self.clone_text)
182
def create_empty(self, name, transport, mode=None):
183
"""Create a new versioned file of this exact type.
185
:param name: the file name
186
:param transport: the transport
187
:param mode: optional file mode.
189
raise NotImplementedError(self.create_empty)
191
def fix_parents(self, version, new_parents):
192
"""Fix the parents list for version.
194
This is done by appending a new version to the index
195
with identical data except for the parents list.
196
the parents list must be a superset of the current
199
self._check_write_ok()
200
return self._fix_parents(version, new_parents)
202
def _fix_parents(self, version, new_parents):
203
"""Helper for fix_parents."""
204
raise NotImplementedError(self.fix_parents)
206
def get_delta(self, version):
207
"""Get a delta for constructing version from some other version.
209
:return: (delta_parent, sha1, noeol, delta)
210
Where delta_parent is a version id or None to indicate no parent.
212
raise NotImplementedError(self.get_delta)
214
def get_deltas(self, versions):
215
"""Get multiple deltas at once for constructing versions.
217
:return: dict(version_id:(delta_parent, sha1, noeol, delta))
218
Where delta_parent is a version id or None to indicate no parent, and
219
version_id is the version_id created by that delta.
222
for version in versions:
223
result[version] = self.get_delta(version)
226
def get_suffixes(self):
227
"""Return the file suffixes associated with this versioned file."""
228
raise NotImplementedError(self.get_suffixes)
229
def _check_lines_not_unicode(self, lines):
230
"""Check that lines being added to a versioned file are not unicode."""
232
if line.__class__ is not str:
233
raise errors.BzrBadParameterUnicode("lines")
235
def _check_lines_are_lines(self, lines):
236
"""Check that the lines really are full lines without inline EOL."""
238
if '\n' in line[:-1]:
239
raise errors.BzrBadParameterContainsNewline("lines")
241
def get_format_signature(self):
242
"""Get a text description of the data encoding in this file.
246
raise NotImplementedError(self.get_format_signature)
248
def make_mpdiffs(self, version_ids):
249
"""Create multiparent diffs for specified versions."""
250
knit_versions = set()
251
knit_versions.update(version_ids)
252
parent_map = self.get_parent_map(version_ids)
253
for version_id in version_ids:
255
knit_versions.update(parent_map[version_id])
257
raise RevisionNotPresent(version_id, self)
258
# We need to filter out ghosts, because we can't diff against them.
259
knit_versions = set(self.get_parent_map(knit_versions).keys())
260
lines = dict(zip(knit_versions,
261
self._get_lf_split_line_list(knit_versions)))
263
for version_id in version_ids:
264
target = lines[version_id]
266
parents = [lines[p] for p in parent_map[version_id] if p in
269
raise RevisionNotPresent(version_id, self)
271
left_parent_blocks = self._extract_blocks(version_id,
274
left_parent_blocks = None
275
diffs.append(multiparent.MultiParent.from_lines(target, parents,
279
def _extract_blocks(self, version_id, source, target):
282
def add_mpdiffs(self, records):
283
"""Add mpdiffs to this VersionedFile.
285
Records should be iterables of version, parents, expected_sha1,
286
mpdiff. mpdiff should be a MultiParent instance.
288
# Does this need to call self._check_write_ok()? (IanC 20070919)
290
mpvf = multiparent.MultiMemoryVersionedFile()
292
for version, parent_ids, expected_sha1, mpdiff in records:
293
versions.append(version)
294
mpvf.add_diff(mpdiff, version, parent_ids)
295
needed_parents = set()
296
for version, parent_ids, expected_sha1, mpdiff in records:
297
needed_parents.update(p for p in parent_ids
298
if not mpvf.has_version(p))
299
present_parents = set(self.get_parent_map(needed_parents).keys())
300
for parent_id, lines in zip(present_parents,
301
self._get_lf_split_line_list(present_parents)):
302
mpvf.add_version(lines, parent_id, [])
303
for (version, parent_ids, expected_sha1, mpdiff), lines in\
304
zip(records, mpvf.get_line_list(versions)):
305
if len(parent_ids) == 1:
306
left_matching_blocks = list(mpdiff.get_matching_blocks(0,
307
mpvf.get_diff(parent_ids[0]).num_lines()))
309
left_matching_blocks = None
311
_, _, version_text = self.add_lines_with_ghosts(version,
312
parent_ids, lines, vf_parents,
313
left_matching_blocks=left_matching_blocks)
314
except NotImplementedError:
315
# The vf can't handle ghosts, so add lines normally, which will
316
# (reasonably) fail if there are ghosts in the data.
317
_, _, version_text = self.add_lines(version,
318
parent_ids, lines, vf_parents,
319
left_matching_blocks=left_matching_blocks)
320
vf_parents[version] = version_text
321
for (version, parent_ids, expected_sha1, mpdiff), sha1 in\
322
zip(records, self.get_sha1s(versions)):
323
if expected_sha1 != sha1:
324
raise errors.VersionedFileInvalidChecksum(version)
326
def get_sha1s(self, version_ids):
327
"""Get the stored sha1 sums for the given revisions.
329
:param version_ids: The names of the versions to lookup
330
:return: a list of sha1s in order according to the version_ids
332
raise NotImplementedError(self.get_sha1s)
230
334
def get_text(self, version_id):
231
335
"""Return version contents as a text string.
309
401
Ghosts that are known about will be included in the parent list,
310
402
but are not explicitly marked.
312
raise NotImplementedError(self.get_parents_with_ghosts)
314
def annotate_iter(self, version_id):
315
"""Yield list of (version-id, line) pairs for the specified
318
Must raise RevisionNotPresent if any of the given versions are
319
not present in file history.
321
raise NotImplementedError(self.annotate_iter)
405
return list(self.get_parent_map([version_id])[version_id])
407
raise errors.RevisionNotPresent(version_id, self)
323
409
def annotate(self, version_id):
324
return list(self.annotate_iter(version_id))
326
def _apply_delta(self, lines, delta):
327
"""Apply delta to lines."""
330
for start, end, count, delta_lines in delta:
331
lines[offset+start:offset+end] = delta_lines
332
offset = offset + (start - end) + count
410
"""Return a list of (version-id, line) tuples for version_id.
412
:raise RevisionNotPresent: If the given version is
413
not present in file history.
415
raise NotImplementedError(self.annotate)
417
@deprecated_method(one_five)
335
418
def join(self, other, pb=None, msg=None, version_ids=None,
336
419
ignore_missing=False):
337
420
"""Integrate versions from other into this versioned file.
353
def iter_lines_added_or_present_in_versions(self, version_ids=None):
436
def iter_lines_added_or_present_in_versions(self, version_ids=None,
354
438
"""Iterate over the lines in the versioned file from version_ids.
356
This may return lines from other versions, and does not return the
357
specific version marker at this point. The api may be changed
358
during development to include the version that the versioned file
359
thinks is relevant, but given that such hints are just guesses,
360
its better not to have it if we dont need it.
440
This may return lines from other versions. Each item the returned
441
iterator yields is a tuple of a line and a text version that that line
442
is present in (not introduced in).
444
Ordering of results is in whatever order is most suitable for the
445
underlying storage format.
447
If a progress bar is supplied, it may be used to indicate progress.
448
The caller is responsible for cleaning up progress bars (because this
362
451
NOTES: Lines are normalised: they will all have \n terminators.
363
452
Lines are returned in arbitrary order.
454
:return: An iterator over (line, version_id).
365
456
raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
367
def transaction_finished(self):
368
"""The transaction that this file was opened in has finished.
370
This records self.finished = True and should cause all mutating
375
@deprecated_method(zero_eight)
376
def walk(self, version_ids=None):
377
"""Walk the versioned file as a weave-like structure, for
378
versions relative to version_ids. Yields sequence of (lineno,
379
insert, deletes, text) for each relevant line.
381
Must raise RevisionNotPresent if any of the specified versions
382
are not present in the file history.
384
:param version_ids: the version_ids to walk with respect to. If not
385
supplied the entire weave-like structure is walked.
387
walk is deprecated in favour of iter_lines_added_or_present_in_versions
389
raise NotImplementedError(self.walk)
391
@deprecated_method(zero_eight)
392
def iter_names(self):
393
"""Walk the names list."""
394
return iter(self.versions())
396
458
def plan_merge(self, ver_a, ver_b):
397
459
"""Return pseudo-annotation indicating how the two versions merge.
402
464
Weave lines present in none of them are skipped entirely.
404
inc_a = set(self.get_ancestry([ver_a]))
405
inc_b = set(self.get_ancestry([ver_b]))
406
inc_c = inc_a & inc_b
408
for lineno, insert, deleteset, line in self.walk([ver_a, ver_b]):
409
if deleteset & inc_c:
410
# killed in parent; can't be in either a or b
411
# not relevant to our work
412
yield 'killed-base', line
413
elif insert in inc_c:
414
# was inserted in base
415
killed_a = bool(deleteset & inc_a)
416
killed_b = bool(deleteset & inc_b)
417
if killed_a and killed_b:
418
yield 'killed-both', line
420
yield 'killed-a', line
422
yield 'killed-b', line
424
yield 'unchanged', line
425
elif insert in inc_a:
426
if deleteset & inc_a:
427
yield 'ghost-a', line
431
elif insert in inc_b:
432
if deleteset & inc_b:
433
yield 'ghost-b', line
467
killed-base Dead in base revision
468
killed-both Killed in each revision
471
unchanged Alive in both a and b (possibly created in both)
474
ghost-a Killed in a, unborn in b
475
ghost-b Killed in b, unborn in a
476
irrelevant Not in either revision
478
raise NotImplementedError(VersionedFile.plan_merge)
480
def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
481
b_marker=TextMerge.B_MARKER):
482
return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
485
class RecordingVersionedFileDecorator(object):
486
"""A minimal versioned file that records calls made on it.
488
Only enough methods have been added to support tests using it to date.
490
:ivar calls: A list of the calls made; can be reset at any time by
494
def __init__(self, backing_vf):
495
"""Create a RecordingVersionedFileDecorator decorating backing_vf.
497
:param backing_vf: The versioned file to answer all methods.
499
self._backing_vf = backing_vf
502
def get_lines(self, version_ids):
503
self.calls.append(("get_lines", version_ids))
504
return self._backing_vf.get_lines(version_ids)
507
class _PlanMergeVersionedFile(object):
508
"""A VersionedFile for uncommitted and committed texts.
510
It is intended to allow merges to be planned with working tree texts.
511
It implements only the small part of the VersionedFile interface used by
512
PlanMerge. It falls back to multiple versionedfiles for data not stored in
513
_PlanMergeVersionedFile itself.
516
def __init__(self, file_id, fallback_versionedfiles=None):
519
:param file_id: Used when raising exceptions.
520
:param fallback_versionedfiles: If supplied, the set of fallbacks to
521
use. Otherwise, _PlanMergeVersionedFile.fallback_versionedfiles
522
can be appended to later.
524
self._file_id = file_id
525
if fallback_versionedfiles is None:
526
self.fallback_versionedfiles = []
528
self.fallback_versionedfiles = fallback_versionedfiles
532
def plan_merge(self, ver_a, ver_b, base=None):
533
"""See VersionedFile.plan_merge"""
534
from bzrlib.merge import _PlanMerge
536
return _PlanMerge(ver_a, ver_b, self).plan_merge()
537
old_plan = list(_PlanMerge(ver_a, base, self).plan_merge())
538
new_plan = list(_PlanMerge(ver_a, ver_b, self).plan_merge())
539
return _PlanMerge._subtract_plans(old_plan, new_plan)
541
def plan_lca_merge(self, ver_a, ver_b, base=None):
542
from bzrlib.merge import _PlanLCAMerge
543
graph = self._get_graph()
544
new_plan = _PlanLCAMerge(ver_a, ver_b, self, graph).plan_merge()
547
old_plan = _PlanLCAMerge(ver_a, base, self, graph).plan_merge()
548
return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
550
def add_lines(self, version_id, parents, lines):
551
"""See VersionedFile.add_lines
553
Lines are added locally, not fallback versionedfiles. Also, ghosts are
554
permitted. Only reserved ids are permitted.
556
if not revision.is_reserved_id(version_id):
557
raise ValueError('Only reserved ids may be used')
559
raise ValueError('Parents may not be None')
561
raise ValueError('Lines may not be None')
562
self._parents[version_id] = tuple(parents)
563
self._lines[version_id] = lines
565
def get_lines(self, version_id):
566
"""See VersionedFile.get_ancestry"""
567
lines = self._lines.get(version_id)
568
if lines is not None:
570
for versionedfile in self.fallback_versionedfiles:
572
return versionedfile.get_lines(version_id)
573
except errors.RevisionNotPresent:
576
raise errors.RevisionNotPresent(version_id, self._file_id)
578
def get_ancestry(self, version_id, topo_sorted=False):
579
"""See VersionedFile.get_ancestry.
581
Note that this implementation assumes that if a VersionedFile can
582
answer get_ancestry at all, it can give an authoritative answer. In
583
fact, ghosts can invalidate this assumption. But it's good enough
584
99% of the time, and far cheaper/simpler.
586
Also note that the results of this version are never topologically
587
sorted, and are a set.
590
raise ValueError('This implementation does not provide sorting')
591
parents = self._parents.get(version_id)
593
for vf in self.fallback_versionedfiles:
595
return vf.get_ancestry(version_id, topo_sorted=False)
596
except errors.RevisionNotPresent:
437
# not in either revision
438
yield 'irrelevant', line
440
yield 'unchanged', '' # terminator
442
def weave_merge(self, plan, a_marker='<<<<<<< \n', b_marker='>>>>>>> \n'):
599
raise errors.RevisionNotPresent(version_id, self._file_id)
600
ancestry = set([version_id])
601
for parent in parents:
602
ancestry.update(self.get_ancestry(parent, topo_sorted=False))
605
def get_parent_map(self, version_ids):
606
"""See VersionedFile.get_parent_map"""
608
pending = set(version_ids)
609
for key in version_ids:
611
result[key] = self._parents[key]
614
pending = pending - set(result.keys())
615
for versionedfile in self.fallback_versionedfiles:
616
parents = versionedfile.get_parent_map(pending)
617
result.update(parents)
618
pending = pending - set(parents.keys())
623
def _get_graph(self):
624
from bzrlib.graph import (
627
_StackedParentsProvider,
629
from bzrlib.repofmt.knitrepo import _KnitParentsProvider
630
parent_providers = [DictParentsProvider(self._parents)]
631
for vf in self.fallback_versionedfiles:
632
parent_providers.append(_KnitParentsProvider(vf))
633
return Graph(_StackedParentsProvider(parent_providers))
636
class PlanWeaveMerge(TextMerge):
637
"""Weave merge that takes a plan as its input.
639
This exists so that VersionedFile.plan_merge is implementable.
640
Most callers will want to use WeaveMerge instead.
643
def __init__(self, plan, a_marker=TextMerge.A_MARKER,
644
b_marker=TextMerge.B_MARKER):
645
TextMerge.__init__(self, a_marker, b_marker)
648
def _merge_struct(self):
445
651
ch_a = ch_b = False
446
# TODO: Return a structured form of the conflicts (e.g. 2-tuples for
447
# conflicted regions), rather than just inserting the markers.
449
# TODO: Show some version information (e.g. author, date) on
450
# conflicted regions.
653
def outstanding_struct():
654
if not lines_a and not lines_b:
656
elif ch_a and not ch_b:
659
elif ch_b and not ch_a:
661
elif lines_a == lines_b:
664
yield (lines_a, lines_b)
452
666
# We previously considered either 'unchanged' or 'killed-both' lines
453
667
# to be possible places to resynchronize. However, assuming agreement
454
# on killed-both lines may be too agressive. -- mbp 20060324
455
for state, line in plan:
668
# on killed-both lines may be too aggressive. -- mbp 20060324
669
for state, line in self.plan:
456
670
if state == 'unchanged':
457
671
# resync and flush queued conflicts changes if any
458
if not lines_a and not lines_b:
460
elif ch_a and not ch_b:
462
for l in lines_a: yield l
463
elif ch_b and not ch_a:
464
for l in lines_b: yield l
465
elif lines_a == lines_b:
466
for l in lines_a: yield l
469
for l in lines_a: yield l
471
for l in lines_b: yield l
672
for struct in outstanding_struct():
476
676
ch_a = ch_b = False
478
678
if state == 'unchanged':
481
681
elif state == 'killed-a':
483
683
lines_b.append(line)
551
762
# TODO: remove parent texts when they are not relevant any more for
552
763
# memory pressure reduction. RBC 20060313
553
764
# pb.update('Converting versioned data', 0, len(order))
554
# deltas = self.source.get_deltas(order)
555
766
for index, version in enumerate(order):
556
pb.update('Converting versioned data', index, len(order))
557
parent_text = target.add_lines(version,
558
self.source.get_parents(version),
767
pb.update('Converting versioned data', index, total)
768
if version in target:
770
_, _, parent_text = target.add_lines(version,
559
772
self.source.get_lines(version),
560
773
parent_texts=parent_texts)
561
774
parent_texts[version] = parent_text
562
#delta_parent, sha1, noeol, delta = deltas[version]
563
#target.add_delta(version,
564
# self.source.get_parents(version),
569
#target.get_lines(version)
571
# this should hit the native code path for target
572
if target is not self.target:
573
return self.target.join(temp_source,
582
class InterVersionedFileTestProviderAdapter(object):
583
"""A tool to generate a suite testing multiple inter versioned-file classes.
585
This is done by copying the test once for each interversionedfile provider
586
and injecting the transport_server, transport_readonly_server,
587
versionedfile_factory and versionedfile_factory_to classes into each copy.
588
Each copy is also given a new id() to make it easy to identify.
591
def __init__(self, transport_server, transport_readonly_server, formats):
592
self._transport_server = transport_server
593
self._transport_readonly_server = transport_readonly_server
594
self._formats = formats
596
def adapt(self, test):
598
for (interversionedfile_class,
599
versionedfile_factory,
600
versionedfile_factory_to) in self._formats:
601
new_test = deepcopy(test)
602
new_test.transport_server = self._transport_server
603
new_test.transport_readonly_server = self._transport_readonly_server
604
new_test.interversionedfile_class = interversionedfile_class
605
new_test.versionedfile_factory = versionedfile_factory
606
new_test.versionedfile_factory_to = versionedfile_factory_to
607
def make_new_test_id():
608
new_id = "%s(%s)" % (new_test.id(), interversionedfile_class.__name__)
609
return lambda: new_id
610
new_test.id = make_new_test_id()
611
result.addTest(new_test)
615
def default_test_list():
616
"""Generate the default list of interversionedfile permutations to test."""
617
from bzrlib.weave import WeaveFile
618
from bzrlib.knit import KnitVersionedFile
620
# test the fallback InterVersionedFile from weave to annotated knits
621
result.append((InterVersionedFile,
624
for optimiser in InterVersionedFile._optimisers:
625
result.append((optimiser,
626
optimiser._matching_file_factory,
627
optimiser._matching_file_factory
629
# if there are specific combinations we want to use, we can add them
779
def _get_source_version_ids(self, version_ids, ignore_missing):
780
"""Determine the version ids to be used from self.source.
782
:param version_ids: The caller-supplied version ids to check. (None
783
for all). If None is in version_ids, it is stripped.
784
:param ignore_missing: if True, remove missing ids from the version
785
list. If False, raise RevisionNotPresent on
786
a missing version id.
787
:return: A set of version ids.
789
if version_ids is None:
790
# None cannot be in source.versions
791
return set(self.source.versions())
794
return set(self.source.versions()).intersection(set(version_ids))
796
new_version_ids = set()
797
for version in version_ids:
800
if not self.source.has_version(version):
801
raise errors.RevisionNotPresent(version, str(self.source))
803
new_version_ids.add(version)
804
return new_version_ids