/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/knit.py

  • Committer: John Arbash Meinel
  • Date: 2011-03-15 10:28:20 UTC
  • mto: This revision was merged to the branch mainline in revision 5725.
  • Revision ID: john@arbash-meinel.com-20110315102820-51wy8wjre5ol34mu
'bzr export' needs to use 'exact' encoding.

If we are going to be writing binary bites out of stdout, then it needs to
be in binary mode, or it will corrupt the data stream.
Oddly enough, it only seemed to fail if we set '--verbose'. I didn't
bother to track into that bug.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
60
60
 
61
61
from bzrlib.lazy_import import lazy_import
62
62
lazy_import(globals(), """
 
63
import gzip
 
64
 
63
65
from bzrlib import (
64
66
    annotate,
65
67
    debug,
68
70
    index as _mod_index,
69
71
    lru_cache,
70
72
    pack,
 
73
    patiencediff,
71
74
    progress,
72
75
    static_tuple,
73
76
    trace,
79
82
from bzrlib import (
80
83
    errors,
81
84
    osutils,
82
 
    patiencediff,
83
85
    )
84
86
from bzrlib.errors import (
85
 
    FileExists,
86
87
    NoSuchFile,
87
 
    KnitError,
88
88
    InvalidRevisionId,
89
89
    KnitCorrupt,
90
90
    KnitHeaderError,
91
91
    RevisionNotPresent,
92
 
    RevisionAlreadyPresent,
93
92
    SHA1KnitCorrupt,
94
93
    )
95
94
from bzrlib.osutils import (
96
95
    contains_whitespace,
97
 
    contains_linebreaks,
98
96
    sha_string,
99
97
    sha_strings,
100
98
    split_lines,
104
102
    adapter_registry,
105
103
    ConstantMapper,
106
104
    ContentFactory,
107
 
    ChunkedContentFactory,
108
105
    sort_groupcompress,
109
 
    VersionedFile,
110
106
    VersionedFiles,
111
107
    )
112
108
 
882
878
            self._factory = KnitAnnotateFactory()
883
879
        else:
884
880
            self._factory = KnitPlainFactory()
885
 
        self._fallback_vfs = []
 
881
        self._immediate_fallback_vfs = []
886
882
        self._reload_func = reload_func
887
883
 
888
884
    def __repr__(self):
896
892
 
897
893
        :param a_versioned_files: A VersionedFiles object.
898
894
        """
899
 
        self._fallback_vfs.append(a_versioned_files)
 
895
        self._immediate_fallback_vfs.append(a_versioned_files)
900
896
 
901
897
    def add_lines(self, key, parents, lines, parent_texts=None,
902
898
        left_matching_blocks=None, nostore_sha=None, random_id=False,
1069
1065
                    raise errors.KnitCorrupt(self,
1070
1066
                        "Missing basis parent %s for %s" % (
1071
1067
                        compression_parent, key))
1072
 
        for fallback_vfs in self._fallback_vfs:
 
1068
        for fallback_vfs in self._immediate_fallback_vfs:
1073
1069
            fallback_vfs.check()
1074
1070
 
1075
1071
    def _check_add(self, key, lines, random_id, check_content):
1195
1191
    def get_known_graph_ancestry(self, keys):
1196
1192
        """Get a KnownGraph instance with the ancestry of keys."""
1197
1193
        parent_map, missing_keys = self._index.find_ancestry(keys)
1198
 
        for fallback in self._fallback_vfs:
 
1194
        for fallback in self._transitive_fallbacks():
1199
1195
            if not missing_keys:
1200
1196
                break
1201
1197
            (f_parent_map, f_missing_keys) = fallback._index.find_ancestry(
1225
1221
            and so on.
1226
1222
        """
1227
1223
        result = {}
1228
 
        sources = [self._index] + self._fallback_vfs
 
1224
        sources = [self._index] + self._immediate_fallback_vfs
1229
1225
        source_results = []
1230
1226
        missing = set(keys)
1231
1227
        for source in sources:
1525
1521
                        yield KnitContentFactory(key, global_map[key],
1526
1522
                            record_details, None, raw_data, self._factory.annotated, None)
1527
1523
                else:
1528
 
                    vf = self._fallback_vfs[parent_maps.index(source) - 1]
 
1524
                    vf = self._immediate_fallback_vfs[parent_maps.index(source) - 1]
1529
1525
                    for record in vf.get_record_stream(keys, ordering,
1530
1526
                        include_delta_closure):
1531
1527
                        yield record
1541
1537
            # record entry 2 is the 'digest'.
1542
1538
            result[key] = details[2]
1543
1539
        missing.difference_update(set(result))
1544
 
        for source in self._fallback_vfs:
 
1540
        for source in self._immediate_fallback_vfs:
1545
1541
            if not missing:
1546
1542
                break
1547
1543
            new_result = source.get_sha1s(missing)
1618
1614
                raise RevisionNotPresent([record.key], self)
1619
1615
            elif ((record.storage_kind in knit_types)
1620
1616
                  and (compression_parent is None
1621
 
                       or not self._fallback_vfs
 
1617
                       or not self._immediate_fallback_vfs
1622
1618
                       or self._index.has_key(compression_parent)
1623
1619
                       or not self.has_key(compression_parent))):
1624
1620
                # we can insert the knit record literally if either it has no
1796
1792
        # vfs, and hope to find them there.  Note that if the keys are found
1797
1793
        # but had no changes or no content, the fallback may not return
1798
1794
        # anything.
1799
 
        if keys and not self._fallback_vfs:
 
1795
        if keys and not self._immediate_fallback_vfs:
1800
1796
            # XXX: strictly the second parameter is meant to be the file id
1801
1797
            # but it's not easily accessible here.
1802
1798
            raise RevisionNotPresent(keys, repr(self))
1803
 
        for source in self._fallback_vfs:
 
1799
        for source in self._immediate_fallback_vfs:
1804
1800
            if not keys:
1805
1801
                break
1806
1802
            source_keys = set()
1879
1875
        :return: the header and the decompressor stream.
1880
1876
                 as (stream, header_record)
1881
1877
        """
1882
 
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
 
1878
        df = gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
1883
1879
        try:
1884
1880
            # Current serialise
1885
1881
            rec = self._check_header(key, df.readline())
1894
1890
        # 4168 calls in 2880 217 internal
1895
1891
        # 4168 calls to _parse_record_header in 2121
1896
1892
        # 4168 calls to readlines in 330
1897
 
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(data))
 
1893
        df = gzip.GzipFile(mode='rb', fileobj=StringIO(data))
1898
1894
        try:
1899
1895
            record_contents = df.readlines()
1900
1896
        except Exception, e:
2015
2011
        """See VersionedFiles.keys."""
2016
2012
        if 'evil' in debug.debug_flags:
2017
2013
            trace.mutter_callsite(2, "keys scales with size of history")
2018
 
        sources = [self._index] + self._fallback_vfs
 
2014
        sources = [self._index] + self._immediate_fallback_vfs
2019
2015
        result = set()
2020
2016
        for source in sources:
2021
2017
            result.update(source.keys())
2061
2057
 
2062
2058
        missing_keys = set(nonlocal_keys)
2063
2059
        # Read from remote versioned file instances and provide to our caller.
2064
 
        for source in self.vf._fallback_vfs:
 
2060
        for source in self.vf._immediate_fallback_vfs:
2065
2061
            if not missing_keys:
2066
2062
                break
2067
2063
            # Loop over fallback repositories asking them for texts - ignore
3417
3413
            raise exc_class, exc_value, exc_traceback
3418
3414
 
3419
3415
 
3420
 
# Deprecated, use PatienceSequenceMatcher instead
3421
 
KnitSequenceMatcher = patiencediff.PatienceSequenceMatcher
3422
 
 
3423
 
 
3424
3416
def annotate_knit(knit, revision_id):
3425
3417
    """Annotate a knit with no cached annotations.
3426
3418
 
3524
3516
        return records, ann_keys
3525
3517
 
3526
3518
    def _get_needed_texts(self, key, pb=None):
3527
 
        # if True or len(self._vf._fallback_vfs) > 0:
3528
 
        if len(self._vf._fallback_vfs) > 0:
 
3519
        # if True or len(self._vf._immediate_fallback_vfs) > 0:
 
3520
        if len(self._vf._immediate_fallback_vfs) > 0:
3529
3521
            # If we have fallbacks, go to the generic path
3530
3522
            for v in annotate.Annotator._get_needed_texts(self, key, pb=pb):
3531
3523
                yield v