78
from bzrlib.trace import mutter
79
81
from bzrlib.errors import (WeaveError, WeaveFormatError, WeaveParentMismatch,
80
82
RevisionAlreadyPresent,
81
83
RevisionNotPresent,
84
UnavailableRepresentation,
82
85
WeaveRevisionAlreadyPresent,
83
86
WeaveRevisionNotPresent,
85
88
import bzrlib.errors as errors
86
from bzrlib.osutils import sha_strings
89
from bzrlib.osutils import dirname, sha_strings, split_lines
87
90
import bzrlib.patiencediff
88
from bzrlib.symbol_versioning import (deprecated_method,
91
from bzrlib.revision import NULL_REVISION
92
from bzrlib.symbol_versioning import *
93
from bzrlib.trace import mutter
92
94
from bzrlib.tsort import topo_sort
93
from bzrlib.versionedfile import VersionedFile, InterVersionedFile
95
from bzrlib.versionedfile import (
94
101
from bzrlib.weavefile import _read_weave_v5, write_weave_v5
104
class WeaveContentFactory(ContentFactory):
105
"""Content factory for streaming from weaves.
107
:seealso ContentFactory:
110
def __init__(self, version, weave):
111
"""Create a WeaveContentFactory for version from weave."""
112
ContentFactory.__init__(self)
113
self.sha1 = weave.get_sha1s([version])[version]
114
self.key = (version,)
115
parents = weave.get_parent_map([version])[version]
116
self.parents = tuple((parent,) for parent in parents)
117
self.storage_kind = 'fulltext'
120
def get_bytes_as(self, storage_kind):
121
if storage_kind == 'fulltext':
122
return self._weave.get_text(self.key[-1])
124
raise UnavailableRepresentation(self.key, storage_kind, 'fulltext')
97
127
class Weave(VersionedFile):
98
128
"""weave - versioned text file storage.
270
298
__contains__ = has_version
272
def get_delta(self, version_id):
273
"""See VersionedFile.get_delta."""
274
return self.get_deltas([version_id])[version_id]
276
def get_deltas(self, version_ids):
277
"""See VersionedFile.get_deltas."""
278
version_ids = self.get_ancestry(version_ids)
300
def get_record_stream(self, versions, ordering, include_delta_closure):
301
"""Get a stream of records for versions.
303
:param versions: The versions to include. Each version is a tuple
305
:param ordering: Either 'unordered' or 'topological'. A topologically
306
sorted stream has compression parents strictly before their
308
:param include_delta_closure: If True then the closure across any
309
compression parents will be included (in the opaque data).
310
:return: An iterator of ContentFactory objects, each of which is only
311
valid until the iterator is advanced.
313
versions = [version[-1] for version in versions]
314
if ordering == 'topological':
315
parents = self.get_parent_map(versions)
316
new_versions = topo_sort(parents)
317
new_versions.extend(set(versions).difference(set(parents)))
318
versions = new_versions
319
for version in versions:
321
yield WeaveContentFactory(version, self)
323
yield AbsentContentFactory((version,))
325
def get_parent_map(self, version_ids):
326
"""See VersionedFile.get_parent_map."""
279
328
for version_id in version_ids:
280
if not self.has_version(version_id):
281
raise RevisionNotPresent(version_id, self)
282
# try extracting all versions; parallel extraction is used
283
nv = self.num_versions()
289
last_parent_lines = {}
291
parent_inclusions = {}
296
# its simplest to generate a full set of prepared variables.
298
name = self._names[i]
299
sha1s[name] = self.get_sha1(name)
300
parents_list = self.get_parents(name)
302
parent = parents_list[0]
303
parents[name] = parent
304
parent_inclusions[name] = inclusions[parent]
307
parent_inclusions[name] = set()
308
# we want to emit start, finish, replacement_length, replacement_lines tuples.
309
diff_hunks[name] = []
310
current_hunks[name] = [0, 0, 0, []] # #start, finish, repl_length, repl_tuples
311
parent_linenums[name] = 0
313
parent_noeols[name] = False
314
last_parent_lines[name] = None
315
new_inc = set([name])
316
for p in self._parents[i]:
317
new_inc.update(inclusions[self._idx_to_name(p)])
318
# debug only, known good so far.
319
#assert set(new_inc) == set(self.get_ancestry(name)), \
320
# 'failed %s != %s' % (set(new_inc), set(self.get_ancestry(name)))
321
inclusions[name] = new_inc
323
nlines = len(self._weave)
325
for lineno, inserted, deletes, line in self._walk_internal():
326
# a line is active in a version if:
327
# insert is in the versions inclusions
329
# deleteset & the versions inclusions is an empty set.
330
# so - if we have a included by mapping - version is included by
331
# children, we get a list of children to examine for deletes affect
332
# ing them, which is less than the entire set of children.
333
for version_id in version_ids:
334
# The active inclusion must be an ancestor,
335
# and no ancestors must have deleted this line,
336
# because we don't support resurrection.
337
parent_inclusion = parent_inclusions[version_id]
338
inclusion = inclusions[version_id]
339
parent_active = inserted in parent_inclusion and not (deletes & parent_inclusion)
340
version_active = inserted in inclusion and not (deletes & inclusion)
341
if not parent_active and not version_active:
342
# unrelated line of ancestry
329
if version_id == NULL_REVISION:
334
map(self._idx_to_name,
335
self._parents[self._lookup(version_id)]))
336
except RevisionNotPresent:
344
elif parent_active and version_active:
346
parent_linenum = parent_linenums[version_id]
347
if current_hunks[version_id] != [parent_linenum, parent_linenum, 0, []]:
348
diff_hunks[version_id].append(tuple(current_hunks[version_id]))
350
current_hunks[version_id] = [parent_linenum, parent_linenum, 0, []]
351
parent_linenums[version_id] = parent_linenum
354
noeols[version_id] = True
357
elif parent_active and not version_active:
359
current_hunks[version_id][1] += 1
360
parent_linenums[version_id] += 1
361
last_parent_lines[version_id] = line
362
elif not parent_active and version_active:
364
# noeol only occurs at the end of a file because we
365
# diff linewise. We want to show noeol changes as a
366
# empty diff unless the actual eol-less content changed.
369
if last_parent_lines[version_id][-1] != '\n':
370
parent_noeols[version_id] = True
371
except (TypeError, IndexError):
374
if theline[-1] != '\n':
375
noeols[version_id] = True
379
parent_should_go = False
381
if parent_noeols[version_id] == noeols[version_id]:
382
# no noeol toggle, so trust the weaves statement
383
# that this line is changed.
385
if parent_noeols[version_id]:
386
theline = theline + '\n'
387
elif parent_noeols[version_id]:
388
# parent has no eol, we do:
389
# our line is new, report as such..
391
elif noeols[version_id]:
392
# append a eol so that it looks like
394
theline = theline + '\n'
395
if parents[version_id] is not None:
396
#if last_parent_lines[version_id] is not None:
397
parent_should_go = True
398
if last_parent_lines[version_id] != theline:
401
#parent_should_go = False
403
current_hunks[version_id][2] += 1
404
current_hunks[version_id][3].append((inserted, theline))
406
# last hunk last parent line is not eaten
407
current_hunks[version_id][1] -= 1
408
if current_hunks[version_id][1] < 0:
409
current_hunks[version_id][1] = 0
410
# import pdb;pdb.set_trace()
411
# assert current_hunks[version_id][1] >= 0
415
version = self._idx_to_name(i)
416
if current_hunks[version] != [0, 0, 0, []]:
417
diff_hunks[version].append(tuple(current_hunks[version]))
419
for version_id in version_ids:
420
result[version_id] = (
424
diff_hunks[version_id],
338
result[version_id] = parents
428
def get_parents(self, version_id):
429
"""See VersionedFile.get_parent."""
430
return map(self._idx_to_name, self._parents[self._lookup(version_id)])
341
def get_parents_with_ghosts(self, version_id):
342
raise NotImplementedError(self.get_parents_with_ghosts)
344
def insert_record_stream(self, stream):
345
"""Insert a record stream into this versioned file.
347
:param stream: A stream of records to insert.
349
:seealso VersionedFile.get_record_stream:
352
for record in stream:
353
# Raise an error when a record is missing.
354
if record.storage_kind == 'absent':
355
raise RevisionNotPresent([record.key[0]], self)
356
# adapt to non-tuple interface
357
parents = [parent[0] for parent in record.parents]
358
if record.storage_kind == 'fulltext':
359
self.add_lines(record.key[0], parents,
360
split_lines(record.get_bytes_as('fulltext')))
362
adapter_key = record.storage_kind, 'fulltext'
364
adapter = adapters[adapter_key]
366
adapter_factory = adapter_registry.get(adapter_key)
367
adapter = adapter_factory(self)
368
adapters[adapter_key] = adapter
369
lines = split_lines(adapter.get_bytes(
370
record, record.get_bytes_as(record.storage_kind)))
372
self.add_lines(record.key[0], parents, lines)
373
except RevisionAlreadyPresent:
432
376
def _check_repeated_add(self, name, parents, text, sha1):
433
377
"""Check that a duplicated add is OK.
440
384
raise RevisionAlreadyPresent(name, self._weave_name)
443
@deprecated_method(zero_eight)
444
def add_identical(self, old_rev_id, new_rev_id, parents):
445
"""Please use Weave.clone_text now."""
446
return self.clone_text(new_rev_id, old_rev_id, parents)
448
def _add_lines(self, version_id, parents, lines, parent_texts):
387
def _add_lines(self, version_id, parents, lines, parent_texts,
388
left_matching_blocks, nostore_sha, random_id, check_content):
449
389
"""See VersionedFile.add_lines."""
450
return self._add(version_id, lines, map(self._lookup, parents))
452
@deprecated_method(zero_eight)
453
def add(self, name, parents, text, sha1=None):
454
"""See VersionedFile.add_lines for the non deprecated api."""
455
return self._add(name, text, map(self._maybe_lookup, parents), sha1)
457
def _add(self, version_id, lines, parents, sha1=None):
390
idx = self._add(version_id, lines, map(self._lookup, parents),
391
nostore_sha=nostore_sha)
392
return sha_strings(lines), sum(map(len, lines)), idx
394
def _add(self, version_id, lines, parents, sha1=None, nostore_sha=None):
458
395
"""Add a single text on top of the weave.
460
397
Returns the index number of the newly added version.
638
556
return len(other_parents.difference(my_parents)) == 0
640
558
def annotate(self, version_id):
641
if isinstance(version_id, int):
642
warnings.warn('Weave.annotate(int) is deprecated. Please use version names'
643
' in all circumstances as of 0.8',
648
for origin, lineno, text in self._extract([version_id]):
649
result.append((origin, text))
652
return super(Weave, self).annotate(version_id)
654
def annotate_iter(self, version_id):
655
"""Yield list of (version-id, line) pairs for the specified version.
559
"""Return a list of (version-id, line) tuples for version_id.
657
561
The index indicates when the line originated in the weave."""
658
562
incls = [self._lookup(version_id)]
659
for origin, lineno, text in self._extract(incls):
660
yield self._idx_to_name(origin), text
662
@deprecated_method(zero_eight)
664
"""_walk has become visit, a supported api."""
665
return self._walk_internal()
667
def iter_lines_added_or_present_in_versions(self, version_ids=None):
563
return [(self._idx_to_name(origin), text) for origin, lineno, text in
564
self._extract(incls)]
566
def iter_lines_added_or_present_in_versions(self, version_ids=None,
668
568
"""See VersionedFile.iter_lines_added_or_present_in_versions()."""
669
569
if version_ids is None:
670
570
version_ids = self.versions()
980
844
# no lines outside of insertion blocks, that deletions are
981
845
# properly paired, etc.
983
def _join(self, other, pb, msg, version_ids, ignore_missing):
984
"""Worker routine for join()."""
985
if not other.versions():
986
return # nothing to update, easy
989
# versions is never none, InterWeave checks this.
992
# two loops so that we do not change ourselves before verifying it
994
# work through in index order to make sure we get all dependencies
997
# get the selected versions only that are in other.versions.
998
version_ids = set(other.versions()).intersection(set(version_ids))
999
# pull in the referenced graph.
1000
version_ids = other.get_ancestry(version_ids)
1001
pending_graph = [(version, other.get_parents(version)) for
1002
version in version_ids]
1003
for name in topo_sort(pending_graph):
1004
other_idx = other._name_map[name]
1005
# returns True if we have it, False if we need it.
1006
if not self._check_version_consistent(other, other_idx, name):
1007
names_to_join.append((other_idx, name))
1016
for other_idx, name in names_to_join:
1017
# TODO: If all the parents of the other version are already
1018
# present then we can avoid some work by just taking the delta
1019
# and adjusting the offsets.
1020
new_parents = self._imported_parents(other, other_idx)
1021
sha1 = other._sha1s[other_idx]
1026
pb.update(msg, merged, len(names_to_join))
1028
lines = other.get_lines(other_idx)
1029
self._add(name, lines, new_parents, sha1)
1031
mutter("merged = %d, processed = %d, file_id=%s; deltat=%d"%(
1032
merged, processed, self._weave_name, time.time()-time0))
1034
847
def _imported_parents(self, other, other_idx):
1035
848
"""Return list of parents in self corresponding to indexes in other."""
1036
849
new_parents = []