1
# Copyright (C) 2005, 2006 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
"""Versioned text file storage api."""
22
from bzrlib.lazy_import import lazy_import
23
lazy_import(globals(), """
33
from bzrlib.graph import Graph
34
from bzrlib.transport.memory import MemoryTransport
37
from cStringIO import StringIO
39
from bzrlib.inter import InterObject
40
from bzrlib.registry import Registry
41
from bzrlib.symbol_versioning import *
42
from bzrlib.textmerge import TextMerge
45
adapter_registry = Registry()
46
adapter_registry.register_lazy(('knit-delta-gz', 'fulltext'), 'bzrlib.knit',
47
'DeltaPlainToFullText')
48
adapter_registry.register_lazy(('knit-ft-gz', 'fulltext'), 'bzrlib.knit',
50
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'knit-delta-gz'),
51
'bzrlib.knit', 'DeltaAnnotatedToUnannotated')
52
adapter_registry.register_lazy(('knit-annotated-delta-gz', 'fulltext'),
53
'bzrlib.knit', 'DeltaAnnotatedToFullText')
54
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'knit-ft-gz'),
55
'bzrlib.knit', 'FTAnnotatedToUnannotated')
56
adapter_registry.register_lazy(('knit-annotated-ft-gz', 'fulltext'),
57
'bzrlib.knit', 'FTAnnotatedToFullText')
60
class ContentFactory(object):
61
"""Abstract interface for insertion and retrieval from a VersionedFile.
63
:ivar sha1: None, or the sha1 of the content fulltext.
64
:ivar storage_kind: The native storage kind of this factory. One of
65
'mpdiff', 'knit-annotated-ft', 'knit-annotated-delta', 'knit-ft',
66
'knit-delta', 'fulltext', 'knit-annotated-ft-gz',
67
'knit-annotated-delta-gz', 'knit-ft-gz', 'knit-delta-gz'.
68
:ivar key: The key of this content. Each key is a tuple with a single
70
:ivar parents: A tuple of parent keys for self.key. If the object has
71
no parent information, None (as opposed to () for an empty list of
76
"""Create a ContentFactory."""
78
self.storage_kind = None
83
class AbsentContentFactory(object):
84
"""A placeholder content factory for unavailable texts.
87
:ivar storage_kind: 'absent'.
88
:ivar key: The key of this content. Each key is a tuple with a single
93
def __init__(self, key):
94
"""Create a ContentFactory."""
96
self.storage_kind = 'absent'
101
def filter_absent(record_stream):
102
"""Adapt a record stream to remove absent records."""
103
for record in record_stream:
104
if record.storage_kind != 'absent':
108
class VersionedFile(object):
109
"""Versioned text file storage.
111
A versioned file manages versions of line-based text files,
112
keeping track of the originating version for each line.
114
To clients the "lines" of the file are represented as a list of
115
strings. These strings will typically have terminal newline
116
characters, but this is not required. In particular files commonly
117
do not have a newline at the end of the file.
119
Texts are identified by a version-id string.
123
def check_not_reserved_id(version_id):
124
revision.check_not_reserved_id(version_id)
126
def copy_to(self, name, transport):
127
"""Copy this versioned file to name on transport."""
128
raise NotImplementedError(self.copy_to)
130
def get_record_stream(self, versions, ordering, include_delta_closure):
131
"""Get a stream of records for versions.
133
:param versions: The versions to include. Each version is a tuple
135
:param ordering: Either 'unordered' or 'topological'. A topologically
136
sorted stream has compression parents strictly before their
138
:param include_delta_closure: If True then the closure across any
139
compression parents will be included (in the opaque data).
140
:return: An iterator of ContentFactory objects, each of which is only
141
valid until the iterator is advanced.
143
raise NotImplementedError(self.get_record_stream)
145
def has_version(self, version_id):
146
"""Returns whether version is present."""
147
raise NotImplementedError(self.has_version)
149
def insert_record_stream(self, stream):
150
"""Insert a record stream into this versioned file.
152
:param stream: A stream of records to insert.
154
:seealso VersionedFile.get_record_stream:
156
raise NotImplementedError
158
def add_lines(self, version_id, parents, lines, parent_texts=None,
159
left_matching_blocks=None, nostore_sha=None, random_id=False,
161
"""Add a single text on top of the versioned file.
163
Must raise RevisionAlreadyPresent if the new version is
164
already present in file history.
166
Must raise RevisionNotPresent if any of the given parents are
167
not present in file history.
169
:param lines: A list of lines. Each line must be a bytestring. And all
170
of them except the last must be terminated with \n and contain no
171
other \n's. The last line may either contain no \n's or a single
172
terminated \n. If the lines list does meet this constraint the add
173
routine may error or may succeed - but you will be unable to read
174
the data back accurately. (Checking the lines have been split
175
correctly is expensive and extremely unlikely to catch bugs so it
176
is not done at runtime unless check_content is True.)
177
:param parent_texts: An optional dictionary containing the opaque
178
representations of some or all of the parents of version_id to
179
allow delta optimisations. VERY IMPORTANT: the texts must be those
180
returned by add_lines or data corruption can be caused.
181
:param left_matching_blocks: a hint about which areas are common
182
between the text and its left-hand-parent. The format is
183
the SequenceMatcher.get_matching_blocks format.
184
:param nostore_sha: Raise ExistingContent and do not add the lines to
185
the versioned file if the digest of the lines matches this.
186
:param random_id: If True a random id has been selected rather than
187
an id determined by some deterministic process such as a converter
188
from a foreign VCS. When True the backend may choose not to check
189
for uniqueness of the resulting key within the versioned file, so
190
this should only be done when the result is expected to be unique
192
:param check_content: If True, the lines supplied are verified to be
193
bytestrings that are correctly formed lines.
194
:return: The text sha1, the number of bytes in the text, and an opaque
195
representation of the inserted version which can be provided
196
back to future add_lines calls in the parent_texts dictionary.
198
self._check_write_ok()
199
return self._add_lines(version_id, parents, lines, parent_texts,
200
left_matching_blocks, nostore_sha, random_id, check_content)
202
def _add_lines(self, version_id, parents, lines, parent_texts,
203
left_matching_blocks, nostore_sha, random_id, check_content):
204
"""Helper to do the class specific add_lines."""
205
raise NotImplementedError(self.add_lines)
207
def add_lines_with_ghosts(self, version_id, parents, lines,
208
parent_texts=None, nostore_sha=None, random_id=False,
209
check_content=True, left_matching_blocks=None):
210
"""Add lines to the versioned file, allowing ghosts to be present.
212
This takes the same parameters as add_lines and returns the same.
214
self._check_write_ok()
215
return self._add_lines_with_ghosts(version_id, parents, lines,
216
parent_texts, nostore_sha, random_id, check_content, left_matching_blocks)
218
def _add_lines_with_ghosts(self, version_id, parents, lines, parent_texts,
219
nostore_sha, random_id, check_content, left_matching_blocks):
220
"""Helper to do class specific add_lines_with_ghosts."""
221
raise NotImplementedError(self.add_lines_with_ghosts)
223
def check(self, progress_bar=None):
224
"""Check the versioned file for integrity."""
225
raise NotImplementedError(self.check)
227
def _check_lines_not_unicode(self, lines):
228
"""Check that lines being added to a versioned file are not unicode."""
230
if line.__class__ is not str:
231
raise errors.BzrBadParameterUnicode("lines")
233
def _check_lines_are_lines(self, lines):
234
"""Check that the lines really are full lines without inline EOL."""
236
if '\n' in line[:-1]:
237
raise errors.BzrBadParameterContainsNewline("lines")
239
def get_format_signature(self):
240
"""Get a text description of the data encoding in this file.
244
raise NotImplementedError(self.get_format_signature)
246
def make_mpdiffs(self, version_ids):
247
"""Create multiparent diffs for specified versions."""
248
knit_versions = set()
249
knit_versions.update(version_ids)
250
parent_map = self.get_parent_map(version_ids)
251
for version_id in version_ids:
253
knit_versions.update(parent_map[version_id])
255
raise RevisionNotPresent(version_id, self)
256
# We need to filter out ghosts, because we can't diff against them.
257
knit_versions = set(self.get_parent_map(knit_versions).keys())
258
lines = dict(zip(knit_versions,
259
self._get_lf_split_line_list(knit_versions)))
261
for version_id in version_ids:
262
target = lines[version_id]
264
parents = [lines[p] for p in parent_map[version_id] if p in
267
raise RevisionNotPresent(version_id, self)
269
left_parent_blocks = self._extract_blocks(version_id,
272
left_parent_blocks = None
273
diffs.append(multiparent.MultiParent.from_lines(target, parents,
277
def _extract_blocks(self, version_id, source, target):
280
def add_mpdiffs(self, records):
281
"""Add mpdiffs to this VersionedFile.
283
Records should be iterables of version, parents, expected_sha1,
284
mpdiff. mpdiff should be a MultiParent instance.
286
# Does this need to call self._check_write_ok()? (IanC 20070919)
288
mpvf = multiparent.MultiMemoryVersionedFile()
290
for version, parent_ids, expected_sha1, mpdiff in records:
291
versions.append(version)
292
mpvf.add_diff(mpdiff, version, parent_ids)
293
needed_parents = set()
294
for version, parent_ids, expected_sha1, mpdiff in records:
295
needed_parents.update(p for p in parent_ids
296
if not mpvf.has_version(p))
297
present_parents = set(self.get_parent_map(needed_parents).keys())
298
for parent_id, lines in zip(present_parents,
299
self._get_lf_split_line_list(present_parents)):
300
mpvf.add_version(lines, parent_id, [])
301
for (version, parent_ids, expected_sha1, mpdiff), lines in\
302
zip(records, mpvf.get_line_list(versions)):
303
if len(parent_ids) == 1:
304
left_matching_blocks = list(mpdiff.get_matching_blocks(0,
305
mpvf.get_diff(parent_ids[0]).num_lines()))
307
left_matching_blocks = None
309
_, _, version_text = self.add_lines_with_ghosts(version,
310
parent_ids, lines, vf_parents,
311
left_matching_blocks=left_matching_blocks)
312
except NotImplementedError:
313
# The vf can't handle ghosts, so add lines normally, which will
314
# (reasonably) fail if there are ghosts in the data.
315
_, _, version_text = self.add_lines(version,
316
parent_ids, lines, vf_parents,
317
left_matching_blocks=left_matching_blocks)
318
vf_parents[version] = version_text
319
for (version, parent_ids, expected_sha1, mpdiff), sha1 in\
320
zip(records, self.get_sha1s(versions)):
321
if expected_sha1 != sha1:
322
raise errors.VersionedFileInvalidChecksum(version)
324
def get_sha1s(self, version_ids):
325
"""Get the stored sha1 sums for the given revisions.
327
:param version_ids: The names of the versions to lookup
328
:return: a list of sha1s in order according to the version_ids
330
raise NotImplementedError(self.get_sha1s)
332
def get_text(self, version_id):
333
"""Return version contents as a text string.
335
Raises RevisionNotPresent if version is not present in
338
return ''.join(self.get_lines(version_id))
339
get_string = get_text
341
def get_texts(self, version_ids):
342
"""Return the texts of listed versions as a list of strings.
344
Raises RevisionNotPresent if version is not present in
347
return [''.join(self.get_lines(v)) for v in version_ids]
349
def get_lines(self, version_id):
350
"""Return version contents as a sequence of lines.
352
Raises RevisionNotPresent if version is not present in
355
raise NotImplementedError(self.get_lines)
357
def _get_lf_split_line_list(self, version_ids):
358
return [StringIO(t).readlines() for t in self.get_texts(version_ids)]
360
def get_ancestry(self, version_ids, topo_sorted=True):
361
"""Return a list of all ancestors of given version(s). This
362
will not include the null revision.
364
This list will not be topologically sorted if topo_sorted=False is
367
Must raise RevisionNotPresent if any of the given versions are
368
not present in file history."""
369
if isinstance(version_ids, basestring):
370
version_ids = [version_ids]
371
raise NotImplementedError(self.get_ancestry)
373
def get_ancestry_with_ghosts(self, version_ids):
374
"""Return a list of all ancestors of given version(s). This
375
will not include the null revision.
377
Must raise RevisionNotPresent if any of the given versions are
378
not present in file history.
380
Ghosts that are known about will be included in ancestry list,
381
but are not explicitly marked.
383
raise NotImplementedError(self.get_ancestry_with_ghosts)
385
def get_parent_map(self, version_ids):
386
"""Get a map of the parents of version_ids.
388
:param version_ids: The version ids to look up parents for.
389
:return: A mapping from version id to parents.
391
raise NotImplementedError(self.get_parent_map)
393
def get_parents_with_ghosts(self, version_id):
394
"""Return version names for parents of version_id.
396
Will raise RevisionNotPresent if version_id is not present
399
Ghosts that are known about will be included in the parent list,
400
but are not explicitly marked.
403
return list(self.get_parent_map([version_id])[version_id])
405
raise errors.RevisionNotPresent(version_id, self)
407
def annotate(self, version_id):
408
"""Return a list of (version-id, line) tuples for version_id.
410
:raise RevisionNotPresent: If the given version is
411
not present in file history.
413
raise NotImplementedError(self.annotate)
415
@deprecated_method(one_five)
416
def join(self, other, pb=None, msg=None, version_ids=None,
417
ignore_missing=False):
418
"""Integrate versions from other into this versioned file.
420
If version_ids is None all versions from other should be
421
incorporated into this versioned file.
423
Must raise RevisionNotPresent if any of the specified versions
424
are not present in the other file's history unless ignore_missing
425
is supplied in which case they are silently skipped.
427
self._check_write_ok()
428
return InterVersionedFile.get(other, self).join(
434
def iter_lines_added_or_present_in_versions(self, version_ids=None,
436
"""Iterate over the lines in the versioned file from version_ids.
438
This may return lines from other versions. Each item the returned
439
iterator yields is a tuple of a line and a text version that that line
440
is present in (not introduced in).
442
Ordering of results is in whatever order is most suitable for the
443
underlying storage format.
445
If a progress bar is supplied, it may be used to indicate progress.
446
The caller is responsible for cleaning up progress bars (because this
449
NOTES: Lines are normalised: they will all have \n terminators.
450
Lines are returned in arbitrary order.
452
:return: An iterator over (line, version_id).
454
raise NotImplementedError(self.iter_lines_added_or_present_in_versions)
456
def plan_merge(self, ver_a, ver_b):
457
"""Return pseudo-annotation indicating how the two versions merge.
459
This is computed between versions a and b and their common
462
Weave lines present in none of them are skipped entirely.
465
killed-base Dead in base revision
466
killed-both Killed in each revision
469
unchanged Alive in both a and b (possibly created in both)
472
ghost-a Killed in a, unborn in b
473
ghost-b Killed in b, unborn in a
474
irrelevant Not in either revision
476
raise NotImplementedError(VersionedFile.plan_merge)
478
def weave_merge(self, plan, a_marker=TextMerge.A_MARKER,
479
b_marker=TextMerge.B_MARKER):
480
return PlanWeaveMerge(plan, a_marker, b_marker).merge_lines()[0]
483
class RecordingVersionedFileDecorator(object):
484
"""A minimal versioned file that records calls made on it.
486
Only enough methods have been added to support tests using it to date.
488
:ivar calls: A list of the calls made; can be reset at any time by
492
def __init__(self, backing_vf):
493
"""Create a RecordingVersionedFileDecorator decorating backing_vf.
495
:param backing_vf: The versioned file to answer all methods.
497
self._backing_vf = backing_vf
500
def get_lines(self, version_ids):
501
self.calls.append(("get_lines", version_ids))
502
return self._backing_vf.get_lines(version_ids)
505
class _PlanMergeVersionedFile(object):
506
"""A VersionedFile for uncommitted and committed texts.
508
It is intended to allow merges to be planned with working tree texts.
509
It implements only the small part of the VersionedFile interface used by
510
PlanMerge. It falls back to multiple versionedfiles for data not stored in
511
_PlanMergeVersionedFile itself.
514
def __init__(self, file_id, fallback_versionedfiles=None):
517
:param file_id: Used when raising exceptions.
518
:param fallback_versionedfiles: If supplied, the set of fallbacks to
519
use. Otherwise, _PlanMergeVersionedFile.fallback_versionedfiles
520
can be appended to later.
522
self._file_id = file_id
523
if fallback_versionedfiles is None:
524
self.fallback_versionedfiles = []
526
self.fallback_versionedfiles = fallback_versionedfiles
530
def plan_merge(self, ver_a, ver_b, base=None):
531
"""See VersionedFile.plan_merge"""
532
from bzrlib.merge import _PlanMerge
534
return _PlanMerge(ver_a, ver_b, self).plan_merge()
535
old_plan = list(_PlanMerge(ver_a, base, self).plan_merge())
536
new_plan = list(_PlanMerge(ver_a, ver_b, self).plan_merge())
537
return _PlanMerge._subtract_plans(old_plan, new_plan)
539
def plan_lca_merge(self, ver_a, ver_b, base=None):
540
from bzrlib.merge import _PlanLCAMerge
541
graph = self._get_graph()
542
new_plan = _PlanLCAMerge(ver_a, ver_b, self, graph).plan_merge()
545
old_plan = _PlanLCAMerge(ver_a, base, self, graph).plan_merge()
546
return _PlanLCAMerge._subtract_plans(list(old_plan), list(new_plan))
548
def add_lines(self, version_id, parents, lines):
549
"""See VersionedFile.add_lines
551
Lines are added locally, not fallback versionedfiles. Also, ghosts are
552
permitted. Only reserved ids are permitted.
554
if not revision.is_reserved_id(version_id):
555
raise ValueError('Only reserved ids may be used')
557
raise ValueError('Parents may not be None')
559
raise ValueError('Lines may not be None')
560
self._parents[version_id] = tuple(parents)
561
self._lines[version_id] = lines
563
def get_lines(self, version_id):
564
"""See VersionedFile.get_ancestry"""
565
lines = self._lines.get(version_id)
566
if lines is not None:
568
for versionedfile in self.fallback_versionedfiles:
570
return versionedfile.get_lines(version_id)
571
except errors.RevisionNotPresent:
574
raise errors.RevisionNotPresent(version_id, self._file_id)
576
def get_ancestry(self, version_id, topo_sorted=False):
577
"""See VersionedFile.get_ancestry.
579
Note that this implementation assumes that if a VersionedFile can
580
answer get_ancestry at all, it can give an authoritative answer. In
581
fact, ghosts can invalidate this assumption. But it's good enough
582
99% of the time, and far cheaper/simpler.
584
Also note that the results of this version are never topologically
585
sorted, and are a set.
588
raise ValueError('This implementation does not provide sorting')
589
parents = self._parents.get(version_id)
591
for vf in self.fallback_versionedfiles:
593
return vf.get_ancestry(version_id, topo_sorted=False)
594
except errors.RevisionNotPresent:
597
raise errors.RevisionNotPresent(version_id, self._file_id)
598
ancestry = set([version_id])
599
for parent in parents:
600
ancestry.update(self.get_ancestry(parent, topo_sorted=False))
603
def get_parent_map(self, version_ids):
604
"""See VersionedFile.get_parent_map"""
606
pending = set(version_ids)
607
for key in version_ids:
609
result[key] = self._parents[key]
612
pending = pending - set(result.keys())
613
for versionedfile in self.fallback_versionedfiles:
614
parents = versionedfile.get_parent_map(pending)
615
result.update(parents)
616
pending = pending - set(parents.keys())
621
def _get_graph(self):
622
from bzrlib.graph import (
625
_StackedParentsProvider,
627
from bzrlib.repofmt.knitrepo import _KnitParentsProvider
628
parent_providers = [DictParentsProvider(self._parents)]
629
for vf in self.fallback_versionedfiles:
630
parent_providers.append(_KnitParentsProvider(vf))
631
return Graph(_StackedParentsProvider(parent_providers))
634
class PlanWeaveMerge(TextMerge):
635
"""Weave merge that takes a plan as its input.
637
This exists so that VersionedFile.plan_merge is implementable.
638
Most callers will want to use WeaveMerge instead.
641
def __init__(self, plan, a_marker=TextMerge.A_MARKER,
642
b_marker=TextMerge.B_MARKER):
643
TextMerge.__init__(self, a_marker, b_marker)
646
def _merge_struct(self):
651
def outstanding_struct():
652
if not lines_a and not lines_b:
654
elif ch_a and not ch_b:
657
elif ch_b and not ch_a:
659
elif lines_a == lines_b:
662
yield (lines_a, lines_b)
664
# We previously considered either 'unchanged' or 'killed-both' lines
665
# to be possible places to resynchronize. However, assuming agreement
666
# on killed-both lines may be too aggressive. -- mbp 20060324
667
for state, line in self.plan:
668
if state == 'unchanged':
669
# resync and flush queued conflicts changes if any
670
for struct in outstanding_struct():
676
if state == 'unchanged':
679
elif state == 'killed-a':
682
elif state == 'killed-b':
685
elif state == 'new-a':
688
elif state == 'new-b':
691
elif state == 'conflicted-a':
694
elif state == 'conflicted-b':
698
assert state in ('irrelevant', 'ghost-a', 'ghost-b',
699
'killed-base', 'killed-both'), state
700
for struct in outstanding_struct():
704
class WeaveMerge(PlanWeaveMerge):
705
"""Weave merge that takes a VersionedFile and two versions as its input."""
707
def __init__(self, versionedfile, ver_a, ver_b,
708
a_marker=PlanWeaveMerge.A_MARKER, b_marker=PlanWeaveMerge.B_MARKER):
709
plan = versionedfile.plan_merge(ver_a, ver_b)
710
PlanWeaveMerge.__init__(self, plan, a_marker, b_marker)
713
class InterVersionedFile(InterObject):
714
"""This class represents operations taking place between two VersionedFiles.
716
Its instances have methods like join, and contain
717
references to the source and target versionedfiles these operations can be
720
Often we will provide convenience methods on 'versionedfile' which carry out
721
operations with another versionedfile - they will always forward to
722
InterVersionedFile.get(other).method_name(parameters).
726
"""The available optimised InterVersionedFile types."""
728
def join(self, pb=None, msg=None, version_ids=None, ignore_missing=False):
729
"""Integrate versions from self.source into self.target.
731
If version_ids is None all versions from source should be
732
incorporated into this versioned file.
734
Must raise RevisionNotPresent if any of the specified versions
735
are not present in the other file's history unless ignore_missing is
736
supplied in which case they are silently skipped.
739
version_ids = self._get_source_version_ids(version_ids, ignore_missing)
740
graph = Graph(self.source)
741
search = graph._make_breadth_first_searcher(version_ids)
742
transitive_ids = set()
743
map(transitive_ids.update, list(search))
744
parent_map = self.source.get_parent_map(transitive_ids)
745
order = tsort.topo_sort(parent_map.items())
746
pb = ui.ui_factory.nested_progress_bar()
749
# TODO for incremental cross-format work:
750
# make a versioned file with the following content:
751
# all revisions we have been asked to join
752
# all their ancestors that are *not* in target already.
753
# the immediate parents of the above two sets, with
754
# empty parent lists - these versions are in target already
755
# and the incorrect version data will be ignored.
756
# TODO: for all ancestors that are present in target already,
757
# check them for consistent data, this requires moving sha1 from
759
# TODO: remove parent texts when they are not relevant any more for
760
# memory pressure reduction. RBC 20060313
761
# pb.update('Converting versioned data', 0, len(order))
763
for index, version in enumerate(order):
764
pb.update('Converting versioned data', index, total)
765
if version in target:
767
_, _, parent_text = target.add_lines(version,
769
self.source.get_lines(version),
770
parent_texts=parent_texts)
771
parent_texts[version] = parent_text
776
def _get_source_version_ids(self, version_ids, ignore_missing):
777
"""Determine the version ids to be used from self.source.
779
:param version_ids: The caller-supplied version ids to check. (None
780
for all). If None is in version_ids, it is stripped.
781
:param ignore_missing: if True, remove missing ids from the version
782
list. If False, raise RevisionNotPresent on
783
a missing version id.
784
:return: A set of version ids.
786
if version_ids is None:
787
# None cannot be in source.versions
788
return set(self.source.versions())
791
return set(self.source.versions()).intersection(set(version_ids))
793
new_version_ids = set()
794
for version in version_ids:
797
if not self.source.has_version(version):
798
raise errors.RevisionNotPresent(version, str(self.source))
800
new_version_ids.add(version)
801
return new_version_ids