1
# Copyright (C) 2005, 2009 Canonical Ltd
3
# Copyright (C) 2005 Canonical Ltd
3
5
# This program is free software; you can redistribute it and/or modify
4
6
# it under the terms of the GNU General Public License as published by
5
7
# the Free Software Foundation; either version 2 of the License, or
6
8
# (at your option) any later version.
8
10
# This program is distributed in the hope that it will be useful,
9
11
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
12
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
13
# GNU General Public License for more details.
13
15
# You should have received a copy of the GNU General Public License
14
16
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
19
# Author: Martin Pool <mbp@canonical.com>
20
22
"""Weave - storage of related text file versions"""
24
# TODO: Perhaps have copy method for Weave instances?
23
26
# XXX: If we do weaves this way, will a merge still behave the same
24
27
# way if it's done in a different order? That's a pretty desirable
30
# TODO: How to write these to disk? One option is cPickle, which
31
# would be fast but less friendly to C, and perhaps not portable. Another is
27
33
# TODO: Nothing here so far assumes the lines are really \n newlines,
28
# rather than being split up in some other way. We could accommodate
34
# rather than being split up in some other way. We could accomodate
29
35
# binaries, perhaps by naively splitting on \n or perhaps using
30
36
# something like a rolling checksum.
38
# TODO: Perhaps track SHA-1 in the header for protection? This would
39
# be redundant with it being stored in the inventory, but perhaps
42
# TODO: Track version names as well as indexes.
44
# TODO: Probably do transitive expansion when specifying parents?
46
# TODO: Separate out some code to read and write weaves.
32
48
# TODO: End marker for each version so we can stop reading?
34
50
# TODO: Check that no insertion occurs inside a deletion that was
35
51
# active in the version of the insertion.
37
# TODO: In addition to the SHA-1 check, perhaps have some code that
38
# checks structural constraints of the weave: ie that insertions are
39
# properly nested, that there is no text outside of an insertion, that
40
# insertions or deletions are not repeated, etc.
42
# TODO: Parallel-extract that passes back each line along with a
43
# description of which revisions include it. Nice for checking all
44
# shas or calculating stats in parallel.
46
# TODO: Using a single _extract routine and then processing the output
47
# is probably inefficient. It's simple enough that we can afford to
48
# have slight specializations for different ways its used: annotate,
49
# basis for add, get, etc.
51
# TODO: Probably the API should work only in names to hide the integer
52
# indexes from the user.
54
# TODO: Is there any potential performance win by having an add()
55
# variant that is passed a pre-cooked version of the single basis
58
# TODO: Reweave can possibly be made faster by remembering diffs
59
# where the basis and destination are unchanged.
61
# FIXME: Sometimes we will be given a parents list for a revision
62
# that includes some redundant parents (i.e. already a parent of
63
# something in the list.) We should eliminate them. This can
64
# be done fairly efficiently because the sequence numbers constrain
65
# the possible relationships.
67
# FIXME: the conflict markers should be *7* characters
70
from cStringIO import StringIO
73
from bzrlib.lazy_import import lazy_import
74
lazy_import(globals(), """
75
from bzrlib import tsort
81
from bzrlib.errors import (WeaveError, WeaveFormatError, WeaveParentMismatch,
82
RevisionAlreadyPresent,
84
UnavailableRepresentation,
86
from bzrlib.osutils import dirname, sha, sha_strings, split_lines
87
import bzrlib.patiencediff
88
from bzrlib.revision import NULL_REVISION
89
from bzrlib.symbol_versioning import *
90
from bzrlib.trace import mutter
91
from bzrlib.versionedfile import (
98
from bzrlib.weavefile import _read_weave_v5, write_weave_v5
101
class WeaveContentFactory(ContentFactory):
102
"""Content factory for streaming from weaves.
104
:seealso ContentFactory:
107
def __init__(self, version, weave):
108
"""Create a WeaveContentFactory for version from weave."""
109
ContentFactory.__init__(self)
110
self.sha1 = weave.get_sha1s([version])[version]
111
self.key = (version,)
112
parents = weave.get_parent_map([version])[version]
113
self.parents = tuple((parent,) for parent in parents)
114
self.storage_kind = 'fulltext'
117
def get_bytes_as(self, storage_kind):
118
if storage_kind == 'fulltext':
119
return self._weave.get_text(self.key[-1])
120
elif storage_kind == 'chunked':
121
return self._weave.get_lines(self.key[-1])
123
raise UnavailableRepresentation(self.key, storage_kind, 'fulltext')
126
class Weave(VersionedFile):
53
# TODO: Perhaps a special slower check() method that verifies more
54
# nesting constraints and the MD5 of each version?
62
from sets import Set, ImmutableSet
64
frozenset = ImmutableSet
68
class WeaveError(Exception):
69
"""Exception in processing weave"""
72
class WeaveFormatError(WeaveError):
73
"""Weave invariant violated"""
127
77
"""weave - versioned text file storage.
129
79
A Weave manages versions of line-based text files, keeping track
130
80
of the originating version for each line.
190
138
should be no way to get an earlier version deleting a later
194
Text of the weave; list of control instruction tuples and strings.
197
List of parents, indexed by version number.
198
It is only necessary to store the minimal set of parents for
199
each version; the parent's parents are implied.
145
List of versions, indexed by index number.
147
For each version we store the set (included_versions), which
148
lists the previous versions also considered active; the
149
versions included in those versions are included transitively.
150
So new versions created from nothing list []; most versions
151
have a single entry; some have more.
202
List of hex SHA-1 of each version.
205
List of symbolic names for each version. Each should be unique.
208
For each name, the version number.
211
Descriptive name of this weave; typically the filename if known.
154
List of hex SHA-1 of each version, or None if not recorded.
215
__slots__ = ['_weave', '_parents', '_sha1s', '_names', '_name_map',
216
'_weave_name', '_matcher', '_allow_reserved']
218
def __init__(self, weave_name=None, access_mode='w', matcher=None,
219
get_scope=None, allow_reserved=False):
222
:param get_scope: A callable that returns an opaque object to be used
223
for detecting when this weave goes out of scope (should stop
224
answering requests or allowing mutation).
226
super(Weave, self).__init__()
232
self._weave_name = weave_name
234
self._matcher = bzrlib.patiencediff.PatienceSequenceMatcher
236
self._matcher = matcher
237
if get_scope is None:
238
get_scope = lambda:None
239
self._get_scope = get_scope
240
self._scope = get_scope()
241
self._access_mode = access_mode
242
self._allow_reserved = allow_reserved
245
return "Weave(%r)" % self._weave_name
247
def _check_write_ok(self):
248
"""Is the versioned file marked as 'finished' ? Raise if it is."""
249
if self._get_scope() != self._scope:
250
raise errors.OutSideTransaction()
251
if self._access_mode != 'w':
252
raise errors.ReadOnlyObjectDirtiedError(self)
255
"""Return a deep copy of self.
257
The copy can be modified without affecting the original weave."""
259
other._weave = self._weave[:]
260
other._parents = self._parents[:]
261
other._sha1s = self._sha1s[:]
262
other._names = self._names[:]
263
other._name_map = self._name_map.copy()
264
other._weave_name = self._weave_name
267
162
def __eq__(self, other):
268
163
if not isinstance(other, Weave):
270
return self._parents == other._parents \
271
and self._weave == other._weave \
272
and self._sha1s == other._sha1s
165
return self._v == other._v \
166
and self._l == other._l
274
169
def __ne__(self, other):
275
170
return not self.__eq__(other)
277
def _idx_to_name(self, version):
278
return self._names[version]
280
def _lookup(self, name):
281
"""Convert symbolic version name to index."""
282
if not self._allow_reserved:
283
self.check_not_reserved_id(name)
285
return self._name_map[name]
287
raise RevisionNotPresent(name, self._weave_name)
290
"""See VersionedFile.versions."""
291
return self._names[:]
293
def has_version(self, version_id):
294
"""See VersionedFile.has_version."""
295
return (version_id in self._name_map)
297
__contains__ = has_version
299
def get_record_stream(self, versions, ordering, include_delta_closure):
300
"""Get a stream of records for versions.
302
:param versions: The versions to include. Each version is a tuple
304
:param ordering: Either 'unordered' or 'topological'. A topologically
305
sorted stream has compression parents strictly before their
307
:param include_delta_closure: If True then the closure across any
308
compression parents will be included (in the opaque data).
309
:return: An iterator of ContentFactory objects, each of which is only
310
valid until the iterator is advanced.
312
versions = [version[-1] for version in versions]
313
if ordering == 'topological':
314
parents = self.get_parent_map(versions)
315
new_versions = tsort.topo_sort(parents)
316
new_versions.extend(set(versions).difference(set(parents)))
317
versions = new_versions
318
elif ordering == 'groupcompress':
319
parents = self.get_parent_map(versions)
320
new_versions = sort_groupcompress(parents)
321
new_versions.extend(set(versions).difference(set(parents)))
322
versions = new_versions
323
for version in versions:
325
yield WeaveContentFactory(version, self)
327
yield AbsentContentFactory((version,))
329
def get_parent_map(self, version_ids):
330
"""See VersionedFile.get_parent_map."""
332
for version_id in version_ids:
333
if version_id == NULL_REVISION:
338
map(self._idx_to_name,
339
self._parents[self._lookup(version_id)]))
340
except RevisionNotPresent:
342
result[version_id] = parents
345
def get_parents_with_ghosts(self, version_id):
346
raise NotImplementedError(self.get_parents_with_ghosts)
348
def insert_record_stream(self, stream):
349
"""Insert a record stream into this versioned file.
351
:param stream: A stream of records to insert.
353
:seealso VersionedFile.get_record_stream:
356
for record in stream:
357
# Raise an error when a record is missing.
358
if record.storage_kind == 'absent':
359
raise RevisionNotPresent([record.key[0]], self)
360
# adapt to non-tuple interface
361
parents = [parent[0] for parent in record.parents]
362
if (record.storage_kind == 'fulltext'
363
or record.storage_kind == 'chunked'):
364
self.add_lines(record.key[0], parents,
365
osutils.chunks_to_lines(record.get_bytes_as('chunked')))
367
adapter_key = record.storage_kind, 'fulltext'
369
adapter = adapters[adapter_key]
371
adapter_factory = adapter_registry.get(adapter_key)
372
adapter = adapter_factory(self)
373
adapters[adapter_key] = adapter
374
lines = split_lines(adapter.get_bytes(record))
376
self.add_lines(record.key[0], parents, lines)
377
except RevisionAlreadyPresent:
380
def _check_repeated_add(self, name, parents, text, sha1):
381
"""Check that a duplicated add is OK.
383
If it is, return the (old) index; otherwise raise an exception.
385
idx = self._lookup(name)
386
if sorted(self._parents[idx]) != sorted(parents) \
387
or sha1 != self._sha1s[idx]:
388
raise RevisionAlreadyPresent(name, self._weave_name)
391
def _add_lines(self, version_id, parents, lines, parent_texts,
392
left_matching_blocks, nostore_sha, random_id, check_content):
393
"""See VersionedFile.add_lines."""
394
idx = self._add(version_id, lines, map(self._lookup, parents),
395
nostore_sha=nostore_sha)
396
return sha_strings(lines), sum(map(len, lines)), idx
398
def _add(self, version_id, lines, parents, sha1=None, nostore_sha=None):
173
def add(self, parents, text):
399
174
"""Add a single text on top of the weave.
401
176
Returns the index number of the newly added version.
404
Symbolic name for this version.
405
(Typically the revision-id of the revision that added it.)
406
If None, a name will be allocated based on the hash. (sha1:SHAHASH)
409
List or set of direct parent version numbers.
412
Sequence of lines to be added in the new version.
414
:param nostore_sha: See VersionedFile.add_lines.
416
self._check_lines_not_unicode(lines)
417
self._check_lines_are_lines(lines)
419
sha1 = sha_strings(lines)
420
if sha1 == nostore_sha:
421
raise errors.ExistingContent
422
if version_id is None:
423
version_id = "sha1:" + sha1
424
if version_id in self._name_map:
425
return self._check_repeated_add(version_id, parents, lines, sha1)
427
self._check_versions(parents)
428
## self._check_lines(lines)
429
new_version = len(self._parents)
431
# if we abort after here the (in-memory) weave will be corrupt because only
432
# some fields are updated
433
# XXX: FIXME implement a succeed-or-fail of the rest of this routine.
434
# - Robert Collins 20060226
435
self._parents.append(parents[:])
179
List or set of parent version numbers. This must normally include
180
the parents and the parent's parents, or wierd things might happen.
183
Sequence of lines to be added in the new version."""
184
## self._check_versions(parents)
185
## self._check_lines(text)
196
delta = self._delta(self.inclusions(parents), text)
198
# offset gives the number of lines that have been inserted
199
# into the weave up to the current point; if the original edit instruction
200
# says to change line A then we actually change (A+offset)
203
for i1, i2, newlines in delta:
206
assert i2 <= len(self._l)
208
# the deletion and insertion are handled separately.
209
# first delete the region.
211
self._l.insert(i1+offset, ('[', idx))
212
self._l.insert(i2+offset+1, (']', idx))
217
# there may have been a deletion spanning up to
218
# i2; we want to insert after this region to make sure
219
# we don't destroy ourselves
221
self._l[i:i] = [('{', idx)] \
224
offset += 2 + len(newlines)
226
self._addversion(parents)
228
# special case; adding with no parents revision; can do this
229
# more quickly by just appending unconditionally
230
self._l.append(('{', idx))
232
self._l.append(('}', idx))
234
self._addversion(None)
436
236
self._sha1s.append(sha1)
437
self._names.append(version_id)
438
self._name_map[version_id] = new_version
442
# special case; adding with no parents revision; can do
443
# this more quickly by just appending unconditionally.
444
# even more specially, if we're adding an empty text we
445
# need do nothing at all.
447
self._weave.append(('{', new_version))
448
self._weave.extend(lines)
449
self._weave.append(('}', None))
452
if len(parents) == 1:
453
pv = list(parents)[0]
454
if sha1 == self._sha1s[pv]:
455
# special case: same as the single parent
459
ancestors = self._inclusions(parents)
463
# basis a list of (origin, lineno, line)
466
for origin, lineno, line in self._extract(ancestors):
467
basis_lineno.append(lineno)
468
basis_lines.append(line)
470
# another small special case: a merge, producing the same text
472
if lines == basis_lines:
475
# add a sentinel, because we can also match against the final line
476
basis_lineno.append(len(self._weave))
478
# XXX: which line of the weave should we really consider
479
# matches the end of the file? the current code says it's the
480
# last line of the weave?
482
#print 'basis_lines:', basis_lines
483
#print 'new_lines: ', lines
485
s = self._matcher(None, basis_lines, lines)
487
# offset gives the number of lines that have been inserted
488
# into the weave up to the current point; if the original edit instruction
489
# says to change line A then we actually change (A+offset)
492
for tag, i1, i2, j1, j2 in s.get_opcodes():
493
# i1,i2 are given in offsets within basis_lines; we need to map them
494
# back to offsets within the entire weave
495
#print 'raw match', tag, i1, i2, j1, j2
498
i1 = basis_lineno[i1]
499
i2 = basis_lineno[i2]
500
# the deletion and insertion are handled separately.
501
# first delete the region.
503
self._weave.insert(i1+offset, ('[', new_version))
504
self._weave.insert(i2+offset+1, (']', new_version))
508
# there may have been a deletion spanning up to
509
# i2; we want to insert after this region to make sure
510
# we don't destroy ourselves
512
self._weave[i:i] = ([('{', new_version)]
515
offset += 2 + (j2 - j1)
518
def _inclusions(self, versions):
519
"""Return set of all ancestors of given version(s)."""
520
if not len(versions):
241
def inclusions(self, versions):
242
"""Expand out everything included by versions."""
522
243
i = set(versions)
523
for v in xrange(max(versions), 0, -1):
525
# include all its parents
526
i.update(self._parents[v])
528
## except IndexError:
529
## raise ValueError("version %d not present in weave" % v)
531
def get_ancestry(self, version_ids, topo_sorted=True):
532
"""See VersionedFile.get_ancestry."""
533
if isinstance(version_ids, basestring):
534
version_ids = [version_ids]
535
i = self._inclusions([self._lookup(v) for v in version_ids])
536
return [self._idx_to_name(v) for v in i]
249
def _addversion(self, parents):
251
self._v.append(frozenset(parents))
253
self._v.append(frozenset())
538
256
def _check_lines(self, text):
539
257
if not isinstance(text, list):
670
295
The set typically but not necessarily corresponds to a version.
673
if not isinstance(i, int):
676
included = self._inclusions(versions)
682
lineno = 0 # line of weave, 0-based
297
istack = [] # versions for which an insertion block is current
299
dset = set() # versions for which a deletion block is current
303
lineno = 0 # line of weave, 0-based
305
# TODO: Probably only need to put included revisions in the istack
307
# TODO: Could split this into two functions, one that updates
308
# the stack and the other that processes the results -- but
309
# I'm not sure it's really needed.
311
# TODO: In fact, I think we only need to store the *count* of
312
# active insertions and deletions, and we can maintain that by
313
# just by just counting as we go along.
688
315
WFE = WeaveFormatError
691
# 449 0 4474.6820 2356.5590 bzrlib.weave:556(_extract)
692
# +285282 0 1676.8040 1676.8040 +<isinstance>
693
# 1.6 seconds in 'isinstance'.
694
# changing the first isinstance:
695
# 449 0 2814.2660 1577.1760 bzrlib.weave:556(_extract)
696
# +140414 0 762.8050 762.8050 +<isinstance>
697
# note that the inline time actually dropped (less function calls)
698
# and total processing time was halved.
699
# we're still spending ~1/4 of the method in isinstance though.
700
# so lets hard code the acceptable string classes we expect:
701
# 449 0 1202.9420 786.2930 bzrlib.weave:556(_extract)
702
# +71352 0 377.5560 377.5560 +<method 'append' of 'list'
704
# yay, down to ~1/4 the initial extract time, and our inline time
705
# has shrunk again, with isinstance no longer dominating.
706
# tweaking the stack inclusion test to use a set gives:
707
# 449 0 1122.8030 713.0080 bzrlib.weave:556(_extract)
708
# +71352 0 354.9980 354.9980 +<method 'append' of 'list'
710
# - a 5% win, or possibly just noise. However with large istacks that
711
# 'in' test could dominate, so I'm leaving this change in place -
712
# when its fast enough to consider profiling big datasets we can review.
717
for l in self._weave:
718
if l.__class__ == tuple:
318
if isinstance(l, tuple):
319
isactive = None # recalculate
322
if istack and (istack[-1] >= v):
323
raise WFE("improperly nested insertions %d>=%d on line %d"
324
% (istack[-1], v, lineno))
725
iset.remove(istack.pop())
330
raise WFE("unmatched close of insertion %d on line %d"
333
raise WFE("mismatched close of insertion %d!=%d on line %d"
338
raise WFE("repeated deletion marker for version %d on line %d"
342
raise WFE("version %d deletes own text on line %d"
350
raise WFE("unmatched close of deletion %d on line %d"
733
raise AssertionError()
353
raise WFE("invalid processing instruction %r on line %d"
736
isactive = (not dset) and istack and (istack[-1] in included)
356
assert isinstance(l, basestring)
358
raise WFE("literal at top level on line %d"
361
isactive = (istack[-1] in included) \
362
and not included.intersection(dset)
738
result.append((istack[-1], lineno, l))
365
yield origin, lineno, l
741
raise WeaveFormatError("unclosed insertion blocks "
742
"at end of weave: %s" % istack)
369
raise WFE("unclosed insertion blocks at end of weave",
744
raise WeaveFormatError("unclosed deletion blocks at end of weave: %s"
748
def _maybe_lookup(self, name_or_index):
749
"""Convert possible symbolic name to index, or pass through indexes.
753
if isinstance(name_or_index, (int, long)):
756
return self._lookup(name_or_index)
758
def get_lines(self, version_id):
759
"""See VersionedFile.get_lines()."""
760
int_index = self._maybe_lookup(version_id)
761
result = [line for (origin, lineno, line) in self._extract([int_index])]
762
expected_sha1 = self._sha1s[int_index]
763
measured_sha1 = sha_strings(result)
764
if measured_sha1 != expected_sha1:
765
raise errors.WeaveInvalidChecksum(
766
'file %s, revision %s, expected: %s, measured %s'
767
% (self._weave_name, version_id,
768
expected_sha1, measured_sha1))
771
def get_sha1s(self, version_ids):
772
"""See VersionedFile.get_sha1s()."""
774
for v in version_ids:
775
result[v] = self._sha1s[self._lookup(v)]
778
def num_versions(self):
779
"""How many versions are in this weave?"""
780
l = len(self._parents)
372
raise WFE("unclosed deletion blocks at end of weave",
376
def get_iter(self, version):
377
"""Yield lines for the specified version."""
378
for origin, lineno, line in self._extract(self.inclusions([version])):
382
def get(self, index):
383
return list(self.get_iter(index))
386
def mash_iter(self, included):
387
"""Return composed version of multiple included versions."""
388
included = frozenset(included)
389
for origin, lineno, text in self._extract(included):
393
def dump(self, to_file):
394
from pprint import pprint
395
print >>to_file, "Weave._l = ",
396
pprint(self._l, to_file)
397
print >>to_file, "Weave._v = ",
398
pprint(self._v, to_file)
402
def numversions(self):
404
assert l == len(self._sha1s)
783
__len__ = num_versions
785
def check(self, progress_bar=None):
786
# TODO evaluate performance hit of using string sets in this routine.
787
# TODO: check no circular inclusions
788
# TODO: create a nested progress bar
789
for version in range(self.num_versions()):
790
inclusions = list(self._parents[version])
409
# check no circular inclusions
410
for version in range(self.numversions()):
411
inclusions = list(self._v[version])
792
413
inclusions.sort()
793
414
if inclusions[-1] >= version:
794
415
raise WeaveFormatError("invalid included version %d for index %d"
795
416
% (inclusions[-1], version))
797
# try extracting all versions; parallel extraction is used
798
nv = self.num_versions()
803
# For creating the ancestry, IntSet is much faster (3.7s vs 0.17s)
804
# The problem is that set membership is much more expensive
805
name = self._idx_to_name(i)
808
new_inc = set([name])
809
for p in self._parents[i]:
810
new_inc.update(inclusions[self._idx_to_name(p)])
812
if set(new_inc) != set(self.get_ancestry(name)):
813
raise AssertionError(
815
% (set(new_inc), set(self.get_ancestry(name))))
816
inclusions[name] = new_inc
818
nlines = len(self._weave)
820
update_text = 'checking weave'
822
short_name = os.path.basename(self._weave_name)
823
update_text = 'checking %s' % (short_name,)
824
update_text = update_text[:25]
826
for lineno, insert, deleteset, line in self._walk_internal():
828
progress_bar.update(update_text, lineno, nlines)
830
for name, name_inclusions in inclusions.items():
831
# The active inclusion must be an ancestor,
832
# and no ancestors must have deleted this line,
833
# because we don't support resurrection.
834
if (insert in name_inclusions) and not (deleteset & name_inclusions):
835
sha1s[name].update(line)
838
version = self._idx_to_name(i)
839
hd = sha1s[version].hexdigest()
840
expected = self._sha1s[i]
418
# try extracting all versions; this is a bit slow and parallel
419
# extraction could be used
421
for version in range(self.numversions()):
423
for l in self.get_iter(version):
426
expected = self._sha1s[version]
841
427
if hd != expected:
842
raise errors.WeaveInvalidChecksum(
843
"mismatched sha1 for version %s: "
844
"got %s, expected %s"
845
% (version, hd, expected))
847
# TODO: check insertions are properly nested, that there are
848
# no lines outside of insertion blocks, that deletions are
849
# properly paired, etc.
851
def _imported_parents(self, other, other_idx):
852
"""Return list of parents in self corresponding to indexes in other."""
854
for parent_idx in other._parents[other_idx]:
855
parent_name = other._names[parent_idx]
856
if parent_name not in self._name_map:
857
# should not be possible
858
raise WeaveError("missing parent {%s} of {%s} in %r"
859
% (parent_name, other._name_map[other_idx], self))
860
new_parents.append(self._name_map[parent_name])
863
def _check_version_consistent(self, other, other_idx, name):
864
"""Check if a version in consistent in this and other.
866
To be consistent it must have:
869
* the same direct parents (by name, not index, and disregarding
872
If present & correct return True;
873
if not present in self return False;
874
if inconsistent raise error."""
875
this_idx = self._name_map.get(name, -1)
877
if self._sha1s[this_idx] != other._sha1s[other_idx]:
878
raise errors.WeaveTextDiffers(name, self, other)
879
self_parents = self._parents[this_idx]
880
other_parents = other._parents[other_idx]
881
n1 = set([self._names[i] for i in self_parents])
882
n2 = set([other._names[i] for i in other_parents])
883
if not self._compatible_parents(n1, n2):
884
raise WeaveParentMismatch("inconsistent parents "
885
"for version {%s}: %s vs %s" % (name, n1, n2))
428
raise WeaveError("mismatched sha1 for version %d; "
429
"got %s, expected %s"
430
% (version, hd, expected))
434
def merge(self, merge_versions):
435
"""Automerge and mark conflicts between versions.
437
This returns a sequence, each entry describing alternatives
438
for a chunk of the file. Each of the alternatives is given as
441
If there is a chunk of the file where there's no diagreement,
442
only one alternative is given.
445
# approach: find the included versions common to all the
447
raise NotImplementedError()
451
def _delta(self, included, lines):
452
"""Return changes from basis to new revision.
454
The old text for comparison is the union of included revisions.
456
This is used in inserting a new text.
458
Delta is returned as a sequence of
459
(weave1, weave2, newlines).
461
This indicates that weave1:weave2 of the old weave should be
462
replaced by the sequence of lines in newlines. Note that
463
these line numbers are positions in the total weave and don't
464
correspond to the lines in any extracted version, or even the
465
extracted union of included versions.
467
If line1=line2, this is a pure insert; if newlines=[] this is a
468
pure delete. (Similar to difflib.)
470
# basis a list of (origin, lineno, line)
473
for origin, lineno, line in self._extract(included):
474
basis_lineno.append(lineno)
475
basis_lines.append(line)
477
# add a sentinal, because we can also match against the final line
478
basis_lineno.append(len(self._l))
480
# XXX: which line of the weave should we really consider
481
# matches the end of the file? the current code says it's the
482
# last line of the weave?
484
from difflib import SequenceMatcher
485
s = SequenceMatcher(None, basis_lines, lines)
487
# TODO: Perhaps return line numbers from composed weave as well?
489
for tag, i1, i2, j1, j2 in s.get_opcodes():
490
##print tag, i1, i2, j1, j2
495
# i1,i2 are given in offsets within basis_lines; we need to map them
496
# back to offsets within the entire weave
497
real_i1 = basis_lineno[i1]
498
real_i2 = basis_lineno[i2]
502
assert j2 <= len(lines)
504
yield real_i1, real_i2, lines[j1:j2]
508
def weave_info(filename, out):
509
"""Show some text information about the weave."""
510
from weavefile import read_weave
511
wf = file(filename, 'rb')
513
# FIXME: doesn't work on pipes
514
weave_size = wf.tell()
515
print >>out, "weave file size %d bytes" % weave_size
516
print >>out, "weave contains %d versions" % len(w._v)
519
print ' %8s %8s %8s %s' % ('version', 'lines', 'bytes', 'sha1')
520
print ' -------- -------- -------- ----------------------------------------'
521
for i in range(len(w._v)):
524
bytes = sum((len(a) for a in text))
526
print ' %8d %8d %8d %s' % (i, lines, bytes, sha1)
529
print >>out, "versions total %d bytes" % total
530
print >>out, "compression ratio %.3f" % (float(total)/float(weave_size))
537
from weavefile import write_weave_v1, read_weave
540
w = read_weave(file(argv[2], 'rb'))
541
# at the moment, based on everything in the file
542
parents = set(range(len(w._v)))
543
lines = sys.stdin.readlines()
544
ver = w.add(parents, lines)
545
write_weave_v1(w, file(argv[2], 'wb'))
546
print 'added %d' % ver
549
if os.path.exists(fn):
550
raise IOError("file exists")
552
write_weave_v1(w, file(fn, 'wb'))
554
w = read_weave(file(argv[2], 'rb'))
555
sys.stdout.writelines(w.get_iter(int(argv[3])))
556
elif cmd == 'annotate':
557
w = read_weave(file(argv[2], 'rb'))
558
# newline is added to all lines regardless; too hard to get
559
# reasonable formatting otherwise
561
for origin, text in w.annotate(int(argv[3])):
562
text = text.rstrip('\r\n')
564
print ' | %s' % (text)
891
def _reweave(self, other, pb, msg):
892
"""Reweave self with other - internal helper for join().
894
:param other: The other weave to merge
895
:param pb: An optional progress bar, indicating how far done we are
896
:param msg: An optional message for the progress
898
new_weave = _reweave(self, other, pb=pb, msg=msg)
899
self._copy_weave_content(new_weave)
901
def _copy_weave_content(self, otherweave):
902
"""adsorb the content from otherweave."""
903
for attr in self.__slots__:
904
if attr != '_weave_name':
905
setattr(self, attr, copy(getattr(otherweave, attr)))
908
class WeaveFile(Weave):
909
"""A WeaveFile represents a Weave on disk and writes on change."""
911
WEAVE_SUFFIX = '.weave'
913
def __init__(self, name, transport, filemode=None, create=False, access_mode='w', get_scope=None):
914
"""Create a WeaveFile.
916
:param create: If not True, only open an existing knit.
918
super(WeaveFile, self).__init__(name, access_mode, get_scope=get_scope,
919
allow_reserved=False)
920
self._transport = transport
921
self._filemode = filemode
923
_read_weave_v5(self._transport.get(name + WeaveFile.WEAVE_SUFFIX), self)
924
except errors.NoSuchFile:
930
def _add_lines(self, version_id, parents, lines, parent_texts,
931
left_matching_blocks, nostore_sha, random_id, check_content):
932
"""Add a version and save the weave."""
933
self.check_not_reserved_id(version_id)
934
result = super(WeaveFile, self)._add_lines(version_id, parents, lines,
935
parent_texts, left_matching_blocks, nostore_sha, random_id,
940
def copy_to(self, name, transport):
941
"""See VersionedFile.copy_to()."""
942
# as we are all in memory always, just serialise to the new place.
944
write_weave_v5(self, sio)
946
transport.put_file(name + WeaveFile.WEAVE_SUFFIX, sio, self._filemode)
949
"""Save the weave."""
950
self._check_write_ok()
952
write_weave_v5(self, sio)
954
bytes = sio.getvalue()
955
path = self._weave_name + WeaveFile.WEAVE_SUFFIX
957
self._transport.put_bytes(path, bytes, self._filemode)
958
except errors.NoSuchFile:
959
self._transport.mkdir(dirname(path))
960
self._transport.put_bytes(path, bytes, self._filemode)
964
"""See VersionedFile.get_suffixes()."""
965
return [WeaveFile.WEAVE_SUFFIX]
967
def insert_record_stream(self, stream):
968
super(WeaveFile, self).insert_record_stream(stream)
972
def _reweave(wa, wb, pb=None, msg=None):
973
"""Combine two weaves and return the result.
975
This works even if a revision R has different parents in
976
wa and wb. In the resulting weave all the parents are given.
978
This is done by just building up a new weave, maintaining ordering
979
of the versions in the two inputs. More efficient approaches
980
might be possible but it should only be necessary to do
981
this operation rarely, when a new previously ghost version is
984
:param pb: An optional progress bar, indicating how far done we are
985
:param msg: An optional message for the progress
989
queue_a = range(wa.num_versions())
990
queue_b = range(wb.num_versions())
991
# first determine combined parents of all versions
992
# map from version name -> all parent names
993
combined_parents = _reweave_parent_graphs(wa, wb)
994
mutter("combined parents: %r", combined_parents)
995
order = tsort.topo_sort(combined_parents.iteritems())
996
mutter("order to reweave: %r", order)
1001
for idx, name in enumerate(order):
1003
pb.update(msg, idx, len(order))
1004
if name in wa._name_map:
1005
lines = wa.get_lines(name)
1006
if name in wb._name_map:
1007
lines_b = wb.get_lines(name)
1008
if lines != lines_b:
1009
mutter('Weaves differ on content. rev_id {%s}', name)
1010
mutter('weaves: %s, %s', wa._weave_name, wb._weave_name)
1012
lines = list(difflib.unified_diff(lines, lines_b,
1013
wa._weave_name, wb._weave_name))
1014
mutter('lines:\n%s', ''.join(lines))
1015
raise errors.WeaveTextDiffers(name, wa, wb)
1017
lines = wb.get_lines(name)
1018
wr._add(name, lines, [wr._lookup(i) for i in combined_parents[name]])
1022
def _reweave_parent_graphs(wa, wb):
1023
"""Return combined parent ancestry for two weaves.
1025
Returned as a list of (version_name, set(parent_names))"""
1027
for weave in [wa, wb]:
1028
for idx, name in enumerate(weave._names):
1029
p = combined.setdefault(name, set())
1030
p.update(map(weave._idx_to_name, weave._parents[idx]))
566
print '%5d | %s' % (origin, text)
569
weave_info(argv[2], sys.stdout)
571
w = read_weave(file(argv[2], 'rb'))
574
raise ValueError('unknown command %r' % cmd)
577
if __name__ == '__main__':
579
sys.exit(main(sys.argv))