/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/knit.py

  • Committer: Robert Collins
  • Date: 2010-05-06 11:08:10 UTC
  • mto: This revision was merged to the branch mainline in revision 5223.
  • Revision ID: robertc@robertcollins.net-20100506110810-h3j07fh5gmw54s25
Cleaner matcher matching revised unlocking protocol.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
54
54
 
55
55
from cStringIO import StringIO
56
56
from itertools import izip
57
 
import gzip
58
57
import operator
59
58
import os
60
59
import sys
69
68
    index as _mod_index,
70
69
    lru_cache,
71
70
    pack,
72
 
    patiencediff,
73
71
    progress,
74
72
    static_tuple,
75
73
    trace,
81
79
from bzrlib import (
82
80
    errors,
83
81
    osutils,
 
82
    patiencediff,
84
83
    )
85
84
from bzrlib.errors import (
86
85
    FileExists,
883
882
            self._factory = KnitAnnotateFactory()
884
883
        else:
885
884
            self._factory = KnitPlainFactory()
886
 
        self._immediate_fallback_vfs = []
 
885
        self._fallback_vfs = []
887
886
        self._reload_func = reload_func
888
887
 
889
888
    def __repr__(self):
897
896
 
898
897
        :param a_versioned_files: A VersionedFiles object.
899
898
        """
900
 
        self._immediate_fallback_vfs.append(a_versioned_files)
 
899
        self._fallback_vfs.append(a_versioned_files)
901
900
 
902
901
    def add_lines(self, key, parents, lines, parent_texts=None,
903
902
        left_matching_blocks=None, nostore_sha=None, random_id=False,
1070
1069
                    raise errors.KnitCorrupt(self,
1071
1070
                        "Missing basis parent %s for %s" % (
1072
1071
                        compression_parent, key))
1073
 
        for fallback_vfs in self._immediate_fallback_vfs:
 
1072
        for fallback_vfs in self._fallback_vfs:
1074
1073
            fallback_vfs.check()
1075
1074
 
1076
1075
    def _check_add(self, key, lines, random_id, check_content):
1196
1195
    def get_known_graph_ancestry(self, keys):
1197
1196
        """Get a KnownGraph instance with the ancestry of keys."""
1198
1197
        parent_map, missing_keys = self._index.find_ancestry(keys)
1199
 
        for fallback in self._transitive_fallbacks():
 
1198
        for fallback in self._fallback_vfs:
1200
1199
            if not missing_keys:
1201
1200
                break
1202
1201
            (f_parent_map, f_missing_keys) = fallback._index.find_ancestry(
1226
1225
            and so on.
1227
1226
        """
1228
1227
        result = {}
1229
 
        sources = [self._index] + self._immediate_fallback_vfs
 
1228
        sources = [self._index] + self._fallback_vfs
1230
1229
        source_results = []
1231
1230
        missing = set(keys)
1232
1231
        for source in sources:
1526
1525
                        yield KnitContentFactory(key, global_map[key],
1527
1526
                            record_details, None, raw_data, self._factory.annotated, None)
1528
1527
                else:
1529
 
                    vf = self._immediate_fallback_vfs[parent_maps.index(source) - 1]
 
1528
                    vf = self._fallback_vfs[parent_maps.index(source) - 1]
1530
1529
                    for record in vf.get_record_stream(keys, ordering,
1531
1530
                        include_delta_closure):
1532
1531
                        yield record
1542
1541
            # record entry 2 is the 'digest'.
1543
1542
            result[key] = details[2]
1544
1543
        missing.difference_update(set(result))
1545
 
        for source in self._immediate_fallback_vfs:
 
1544
        for source in self._fallback_vfs:
1546
1545
            if not missing:
1547
1546
                break
1548
1547
            new_result = source.get_sha1s(missing)
1619
1618
                raise RevisionNotPresent([record.key], self)
1620
1619
            elif ((record.storage_kind in knit_types)
1621
1620
                  and (compression_parent is None
1622
 
                       or not self._immediate_fallback_vfs
 
1621
                       or not self._fallback_vfs
1623
1622
                       or self._index.has_key(compression_parent)
1624
1623
                       or not self.has_key(compression_parent))):
1625
1624
                # we can insert the knit record literally if either it has no
1797
1796
        # vfs, and hope to find them there.  Note that if the keys are found
1798
1797
        # but had no changes or no content, the fallback may not return
1799
1798
        # anything.
1800
 
        if keys and not self._immediate_fallback_vfs:
 
1799
        if keys and not self._fallback_vfs:
1801
1800
            # XXX: strictly the second parameter is meant to be the file id
1802
1801
            # but it's not easily accessible here.
1803
1802
            raise RevisionNotPresent(keys, repr(self))
1804
 
        for source in self._immediate_fallback_vfs:
 
1803
        for source in self._fallback_vfs:
1805
1804
            if not keys:
1806
1805
                break
1807
1806
            source_keys = set()
1880
1879
        :return: the header and the decompressor stream.
1881
1880
                 as (stream, header_record)
1882
1881
        """
1883
 
        df = gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
 
1882
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
1884
1883
        try:
1885
1884
            # Current serialise
1886
1885
            rec = self._check_header(key, df.readline())
1895
1894
        # 4168 calls in 2880 217 internal
1896
1895
        # 4168 calls to _parse_record_header in 2121
1897
1896
        # 4168 calls to readlines in 330
1898
 
        df = gzip.GzipFile(mode='rb', fileobj=StringIO(data))
 
1897
        df = tuned_gzip.GzipFile(mode='rb', fileobj=StringIO(data))
1899
1898
        try:
1900
1899
            record_contents = df.readlines()
1901
1900
        except Exception, e:
2016
2015
        """See VersionedFiles.keys."""
2017
2016
        if 'evil' in debug.debug_flags:
2018
2017
            trace.mutter_callsite(2, "keys scales with size of history")
2019
 
        sources = [self._index] + self._immediate_fallback_vfs
 
2018
        sources = [self._index] + self._fallback_vfs
2020
2019
        result = set()
2021
2020
        for source in sources:
2022
2021
            result.update(source.keys())
2062
2061
 
2063
2062
        missing_keys = set(nonlocal_keys)
2064
2063
        # Read from remote versioned file instances and provide to our caller.
2065
 
        for source in self.vf._immediate_fallback_vfs:
 
2064
        for source in self.vf._fallback_vfs:
2066
2065
            if not missing_keys:
2067
2066
                break
2068
2067
            # Loop over fallback repositories asking them for texts - ignore
3418
3417
            raise exc_class, exc_value, exc_traceback
3419
3418
 
3420
3419
 
 
3420
# Deprecated, use PatienceSequenceMatcher instead
 
3421
KnitSequenceMatcher = patiencediff.PatienceSequenceMatcher
 
3422
 
 
3423
 
3421
3424
def annotate_knit(knit, revision_id):
3422
3425
    """Annotate a knit with no cached annotations.
3423
3426
 
3521
3524
        return records, ann_keys
3522
3525
 
3523
3526
    def _get_needed_texts(self, key, pb=None):
3524
 
        # if True or len(self._vf._immediate_fallback_vfs) > 0:
3525
 
        if len(self._vf._immediate_fallback_vfs) > 0:
 
3527
        # if True or len(self._vf._fallback_vfs) > 0:
 
3528
        if len(self._vf._fallback_vfs) > 0:
3526
3529
            # If we have fallbacks, go to the generic path
3527
3530
            for v in annotate.Annotator._get_needed_texts(self, key, pb=pb):
3528
3531
                yield v