/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/knit.py

  • Committer: Andrew Bennetts
  • Date: 2008-10-27 06:14:45 UTC
  • mfrom: (3793 +trunk)
  • mto: This revision was merged to the branch mainline in revision 3795.
  • Revision ID: andrew.bennetts@canonical.com-20081027061445-eqt9lz6uw1mbvq4g
Merge from bzr.dev.

Show diffs side-by-side

added added

removed removed

Lines of Context:
95
95
    KnitHeaderError,
96
96
    RevisionNotPresent,
97
97
    RevisionAlreadyPresent,
 
98
    SHA1KnitCorrupt,
98
99
    )
99
100
from bzrlib.osutils import (
100
101
    contains_whitespace,
1054
1055
            text = content.text()
1055
1056
            actual_sha = sha_strings(text)
1056
1057
            if actual_sha != digest:
1057
 
                raise KnitCorrupt(self,
1058
 
                    '\n  sha-1 %s'
1059
 
                    '\n  of reconstructed text does not match'
1060
 
                    '\n  expected %s'
1061
 
                    '\n  for version %s' %
1062
 
                    (actual_sha, digest, key))
 
1058
                raise SHA1KnitCorrupt(self, actual_sha, digest, key, text)
1063
1059
            text_map[key] = text
1064
1060
        return text_map, final_content
1065
1061
 
1124
1120
            record_map[key] = record, record_details, digest, next
1125
1121
        return record_map
1126
1122
 
 
1123
    def _split_by_prefix(self, keys):
 
1124
        """For the given keys, split them up based on their prefix.
 
1125
 
 
1126
        To keep memory pressure somewhat under control, split the
 
1127
        requests back into per-file-id requests, otherwise "bzr co"
 
1128
        extracts the full tree into memory before writing it to disk.
 
1129
        This should be revisited if _get_content_maps() can ever cross
 
1130
        file-id boundaries.
 
1131
 
 
1132
        :param keys: An iterable of key tuples
 
1133
        :return: A dict of {prefix: [key_list]}
 
1134
        """
 
1135
        split_by_prefix = {}
 
1136
        for key in keys:
 
1137
            if len(key) == 1:
 
1138
                split_by_prefix.setdefault('', []).append(key)
 
1139
            else:
 
1140
                split_by_prefix.setdefault(key[0], []).append(key)
 
1141
        return split_by_prefix
 
1142
 
1127
1143
    def get_record_stream(self, keys, ordering, include_delta_closure):
1128
1144
        """Get a stream of records for keys.
1129
1145
 
1223
1239
        if include_delta_closure:
1224
1240
            # XXX: get_content_maps performs its own index queries; allow state
1225
1241
            # to be passed in.
1226
 
            text_map, _ = self._get_content_maps(present_keys,
1227
 
                needed_from_fallback - absent_keys)
1228
 
            for key in present_keys:
1229
 
                yield FulltextContentFactory(key, global_map[key], None,
1230
 
                    ''.join(text_map[key]))
 
1242
            non_local_keys = needed_from_fallback - absent_keys
 
1243
            prefix_split_keys = self._split_by_prefix(present_keys)
 
1244
            prefix_split_non_local_keys = self._split_by_prefix(non_local_keys)
 
1245
            for prefix, keys in prefix_split_keys.iteritems():
 
1246
                non_local = prefix_split_non_local_keys.get(prefix, [])
 
1247
                non_local = set(non_local)
 
1248
                text_map, _ = self._get_content_maps(keys, non_local)
 
1249
                for key in keys:
 
1250
                    lines = text_map.pop(key)
 
1251
                    text = ''.join(lines)
 
1252
                    yield FulltextContentFactory(key, global_map[key], None,
 
1253
                                                 text)
1231
1254
        else:
1232
1255
            for source, keys in source_keys:
1233
1256
                if source is parent_maps[0]:
1433
1456
                yield line, key
1434
1457
            keys.difference_update(source_keys)
1435
1458
        if keys:
1436
 
            raise RevisionNotPresent(keys, self.filename)
 
1459
            # XXX: strictly the second parameter is meant to be the file id
 
1460
            # but it's not easily accessible here.
 
1461
            raise RevisionNotPresent(keys, repr(self))
1437
1462
        pb.update('Walking content.', total, total)
1438
1463
 
1439
1464
    def _make_line_delta(self, delta_seq, new_content):
1642
1667
        return result
1643
1668
 
1644
1669
 
1645
 
 
1646
1670
class _KndxIndex(object):
1647
1671
    """Manages knit index files
1648
1672
 
2639
2663
                (rev_id, parent_ids, record) = nodes_to_annotate.pop()
2640
2664
                (index_memo, compression_parent, parents,
2641
2665
                 record_details) = self._all_build_details[rev_id]
 
2666
                blocks = None
2642
2667
                if compression_parent is not None:
2643
2668
                    comp_children = self._compression_children[compression_parent]
2644
2669
                    if rev_id not in comp_children:
2665
2690
                        copy_base_content=(not reuse_content))
2666
2691
                    fulltext = self._add_fulltext_content(rev_id,
2667
2692
                                                          fulltext_content)
2668
 
                    blocks = KnitContent.get_line_delta_blocks(delta,
2669
 
                            parent_fulltext, fulltext)
 
2693
                    if compression_parent == parent_ids[0]:
 
2694
                        # the compression_parent is the left parent, so we can
 
2695
                        # re-use the delta
 
2696
                        blocks = KnitContent.get_line_delta_blocks(delta,
 
2697
                                parent_fulltext, fulltext)
2670
2698
                else:
2671
2699
                    fulltext_content = self._knit._factory.parse_fulltext(
2672
2700
                        record, rev_id)
2673
2701
                    fulltext = self._add_fulltext_content(rev_id,
2674
2702
                        fulltext_content)
2675
 
                    blocks = None
2676
2703
                nodes_to_annotate.extend(
2677
2704
                    self._add_annotation(rev_id, fulltext, parent_ids,
2678
2705
                                     left_matching_blocks=blocks))
2693
2720
 
2694
2721
        :param key: The key to annotate.
2695
2722
        """
2696
 
        if True or len(self._knit._fallback_vfs) > 0:
 
2723
        if len(self._knit._fallback_vfs) > 0:
2697
2724
            # stacked knits can't use the fast path at present.
2698
2725
            return self._simple_annotate(key)
2699
2726
        records = self._get_build_graph(key)