/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/knit.py

  • Committer: John Arbash Meinel
  • Date: 2011-04-07 10:36:24 UTC
  • mfrom: (5764 +trunk)
  • mto: This revision was merged to the branch mainline in revision 5766.
  • Revision ID: john@arbash-meinel.com-20110407103624-n76g6tjeqmznwdcd
Merge bzr.dev 5764 to resolve release-notes (aka NEWS) conflicts

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
60
60
 
61
61
from bzrlib.lazy_import import lazy_import
62
62
lazy_import(globals(), """
 
63
import gzip
 
64
 
63
65
from bzrlib import (
64
 
    annotate,
65
66
    debug,
66
67
    diff,
67
68
    graph as _mod_graph,
68
69
    index as _mod_index,
69
 
    lru_cache,
70
70
    pack,
71
 
    progress,
 
71
    patiencediff,
72
72
    static_tuple,
73
73
    trace,
74
74
    tsort,
77
77
    )
78
78
""")
79
79
from bzrlib import (
 
80
    annotate,
80
81
    errors,
81
82
    osutils,
82
 
    patiencediff,
83
83
    )
84
84
from bzrlib.errors import (
85
 
    FileExists,
86
85
    NoSuchFile,
87
 
    KnitError,
88
86
    InvalidRevisionId,
89
87
    KnitCorrupt,
90
88
    KnitHeaderError,
91
89
    RevisionNotPresent,
92
 
    RevisionAlreadyPresent,
93
90
    SHA1KnitCorrupt,
94
91
    )
95
92
from bzrlib.osutils import (
96
93
    contains_whitespace,
97
 
    contains_linebreaks,
98
94
    sha_string,
99
95
    sha_strings,
100
96
    split_lines,
104
100
    adapter_registry,
105
101
    ConstantMapper,
106
102
    ContentFactory,
107
 
    ChunkedContentFactory,
108
103
    sort_groupcompress,
109
 
    VersionedFile,
110
104
    VersionedFiles,
111
105
    )
112
106
 
882
876
            self._factory = KnitAnnotateFactory()
883
877
        else:
884
878
            self._factory = KnitPlainFactory()
885
 
        self._fallback_vfs = []
 
879
        self._immediate_fallback_vfs = []
886
880
        self._reload_func = reload_func
887
881
 
888
882
    def __repr__(self):
896
890
 
897
891
        :param a_versioned_files: A VersionedFiles object.
898
892
        """
899
 
        self._fallback_vfs.append(a_versioned_files)
 
893
        self._immediate_fallback_vfs.append(a_versioned_files)
900
894
 
901
895
    def add_lines(self, key, parents, lines, parent_texts=None,
902
896
        left_matching_blocks=None, nostore_sha=None, random_id=False,
1069
1063
                    raise errors.KnitCorrupt(self,
1070
1064
                        "Missing basis parent %s for %s" % (
1071
1065
                        compression_parent, key))
1072
 
        for fallback_vfs in self._fallback_vfs:
 
1066
        for fallback_vfs in self._immediate_fallback_vfs:
1073
1067
            fallback_vfs.check()
1074
1068
 
1075
1069
    def _check_add(self, key, lines, random_id, check_content):
1195
1189
    def get_known_graph_ancestry(self, keys):
1196
1190
        """Get a KnownGraph instance with the ancestry of keys."""
1197
1191
        parent_map, missing_keys = self._index.find_ancestry(keys)
1198
 
        for fallback in self._fallback_vfs:
 
1192
        for fallback in self._transitive_fallbacks():
1199
1193
            if not missing_keys:
1200
1194
                break
1201
1195
            (f_parent_map, f_missing_keys) = fallback._index.find_ancestry(
1225
1219
            and so on.
1226
1220
        """
1227
1221
        result = {}
1228
 
        sources = [self._index] + self._fallback_vfs
 
1222
        sources = [self._index] + self._immediate_fallback_vfs
1229
1223
        source_results = []
1230
1224
        missing = set(keys)
1231
1225
        for source in sources:
1525
1519
                        yield KnitContentFactory(key, global_map[key],
1526
1520
                            record_details, None, raw_data, self._factory.annotated, None)
1527
1521
                else:
1528
 
                    vf = self._fallback_vfs[parent_maps.index(source) - 1]
 
1522
                    vf = self._immediate_fallback_vfs[parent_maps.index(source) - 1]
1529
1523
                    for record in vf.get_record_stream(keys, ordering,
1530
1524
                        include_delta_closure):
1531
1525
                        yield record
1541
1535
            # record entry 2 is the 'digest'.
1542
1536
            result[key] = details[2]
1543
1537
        missing.difference_update(set(result))
1544
 
        for source in self._fallback_vfs:
 
1538
        for source in self._immediate_fallback_vfs:
1545
1539
            if not missing:
1546
1540
                break
1547
1541
            new_result = source.get_sha1s(missing)
1618
1612
                raise RevisionNotPresent([record.key], self)
1619
1613
            elif ((record.storage_kind in knit_types)
1620
1614
                  and (compression_parent is None
1621
 
                       or not self._fallback_vfs
 
1615
                       or not self._immediate_fallback_vfs
1622
1616
                       or self._index.has_key(compression_parent)
1623
1617
                       or not self.has_key(compression_parent))):
1624
1618
                # we can insert the knit record literally if either it has no
1796
1790
        # vfs, and hope to find them there.  Note that if the keys are found
1797
1791
        # but had no changes or no content, the fallback may not return
1798
1792
        # anything.
1799
 
        if keys and not self._fallback_vfs:
 
1793
        if keys and not self._immediate_fallback_vfs:
1800
1794
            # XXX: strictly the second parameter is meant to be the file id
1801
1795
            # but it's not easily accessible here.
1802
1796
            raise RevisionNotPresent(keys, repr(self))
1803
 
        for source in self._fallback_vfs:
 
1797
        for source in self._immediate_fallback_vfs:
1804
1798
            if not keys:
1805
1799
                break
1806
1800
            source_keys = set()
1879
1873
        :return: the header and the decompressor stream.
1880
1874
                 as (stream, header_record)
1881
1875
        """
1882
 
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
 
1876
        df = gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
1883
1877
        try:
1884
1878
            # Current serialise
1885
1879
            rec = self._check_header(key, df.readline())
1894
1888
        # 4168 calls in 2880 217 internal
1895
1889
        # 4168 calls to _parse_record_header in 2121
1896
1890
        # 4168 calls to readlines in 330
1897
 
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(data))
 
1891
        df = gzip.GzipFile(mode='rb', fileobj=StringIO(data))
1898
1892
        try:
1899
1893
            record_contents = df.readlines()
1900
1894
        except Exception, e:
2015
2009
        """See VersionedFiles.keys."""
2016
2010
        if 'evil' in debug.debug_flags:
2017
2011
            trace.mutter_callsite(2, "keys scales with size of history")
2018
 
        sources = [self._index] + self._fallback_vfs
 
2012
        sources = [self._index] + self._immediate_fallback_vfs
2019
2013
        result = set()
2020
2014
        for source in sources:
2021
2015
            result.update(source.keys())
2061
2055
 
2062
2056
        missing_keys = set(nonlocal_keys)
2063
2057
        # Read from remote versioned file instances and provide to our caller.
2064
 
        for source in self.vf._fallback_vfs:
 
2058
        for source in self.vf._immediate_fallback_vfs:
2065
2059
            if not missing_keys:
2066
2060
                break
2067
2061
            # Loop over fallback repositories asking them for texts - ignore
3417
3411
            raise exc_class, exc_value, exc_traceback
3418
3412
 
3419
3413
 
3420
 
# Deprecated, use PatienceSequenceMatcher instead
3421
 
KnitSequenceMatcher = patiencediff.PatienceSequenceMatcher
3422
 
 
3423
 
 
3424
3414
def annotate_knit(knit, revision_id):
3425
3415
    """Annotate a knit with no cached annotations.
3426
3416
 
3524
3514
        return records, ann_keys
3525
3515
 
3526
3516
    def _get_needed_texts(self, key, pb=None):
3527
 
        # if True or len(self._vf._fallback_vfs) > 0:
3528
 
        if len(self._vf._fallback_vfs) > 0:
 
3517
        # if True or len(self._vf._immediate_fallback_vfs) > 0:
 
3518
        if len(self._vf._immediate_fallback_vfs) > 0:
3529
3519
            # If we have fallbacks, go to the generic path
3530
3520
            for v in annotate.Annotator._get_needed_texts(self, key, pb=pb):
3531
3521
                yield v