1
# Copyright (C) 2007-2010 Canonical Ltd
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
55
54
revision as _mod_revision,
58
from bzrlib.decorators import needs_write_lock, only_raises
57
from bzrlib.decorators import needs_write_lock
59
58
from bzrlib.btree_index import (
587
585
flush_func=flush_func)
588
586
self.add_callback = None
588
def replace_indices(self, index_to_pack, indices):
589
"""Replace the current mappings with fresh ones.
591
This should probably not be used eventually, rather incremental add and
592
removal of indices. It has been added during refactoring of existing
595
:param index_to_pack: A mapping from index objects to
596
(transport, name) tuples for the pack file data.
597
:param indices: A list of indices.
599
# refresh the revision pack map dict without replacing the instance.
600
self.index_to_pack.clear()
601
self.index_to_pack.update(index_to_pack)
602
# XXX: API break - clearly a 'replace' method would be good?
603
self.combined_index._indices[:] = indices
604
# the current add nodes callback for the current writable index if
606
self.add_callback = None
590
608
def add_index(self, index, pack):
591
609
"""Add index to the aggregate, which is an index for Pack pack.
599
617
# expose it to the index map
600
618
self.index_to_pack[index] = pack.access_tuple()
601
619
# put it at the front of the linear index list
602
self.combined_index.insert_index(0, index, pack.name)
620
self.combined_index.insert_index(0, index)
604
622
def add_writable_index(self, index, pack):
605
623
"""Add an index which is able to have data added to it.
625
643
self.data_access.set_writer(None, None, (None, None))
626
644
self.index_to_pack.clear()
627
645
del self.combined_index._indices[:]
628
del self.combined_index._index_names[:]
629
646
self.add_callback = None
631
def remove_index(self, index):
648
def remove_index(self, index, pack):
632
649
"""Remove index from the indices used to answer queries.
634
651
:param index: An index from the pack parameter.
652
:param pack: A Pack instance.
636
654
del self.index_to_pack[index]
637
pos = self.combined_index._indices.index(index)
638
del self.combined_index._indices[pos]
639
del self.combined_index._index_names[pos]
655
self.combined_index._indices.remove(index)
640
656
if (self.add_callback is not None and
641
657
getattr(index, 'add_nodes', None) == self.add_callback):
642
658
self.add_callback = None
1100
1116
iterator is a tuple with:
1101
1117
index, readv_vector, node_vector. readv_vector is a list ready to
1102
1118
hand to the transport readv method, and node_vector is a list of
1103
(key, eol_flag, references) for the node retrieved by the
1119
(key, eol_flag, references) for the the node retrieved by the
1104
1120
matching readv_vector.
1106
1122
# group by pack so we do one readv per pack
1398
1414
self.inventory_index = AggregateIndex(self.reload_pack_names, flush)
1399
1415
self.text_index = AggregateIndex(self.reload_pack_names, flush)
1400
1416
self.signature_index = AggregateIndex(self.reload_pack_names, flush)
1401
all_indices = [self.revision_index, self.inventory_index,
1402
self.text_index, self.signature_index]
1403
1417
if use_chk_index:
1404
1418
self.chk_index = AggregateIndex(self.reload_pack_names, flush)
1405
all_indices.append(self.chk_index)
1407
1420
# used to determine if we're using a chk_index elsewhere.
1408
1421
self.chk_index = None
1409
# Tell all the CombinedGraphIndex objects about each other, so they can
1410
# share hints about which pack names to search first.
1411
all_combined = [agg_idx.combined_index for agg_idx in all_indices]
1412
for combined_idx in all_combined:
1413
combined_idx.set_sibling_indices(
1414
set(all_combined).difference([combined_idx]))
1415
1422
# resumed packs
1416
1423
self._resumed_packs = []
1419
return '%s(%r)' % (self.__class__.__name__, self.repo)
1421
1425
def add_pack_to_memory(self, pack):
1422
1426
"""Make a Pack object available to the repository to satisfy queries.
1537
1541
self._remove_pack_from_memory(pack)
1538
1542
# record the newly available packs and stop advertising the old
1540
to_be_obsoleted = []
1541
for _, packs in pack_operations:
1542
to_be_obsoleted.extend(packs)
1543
result = self._save_pack_names(clear_obsolete_packs=True,
1544
obsolete_packs=to_be_obsoleted)
1544
result = self._save_pack_names(clear_obsolete_packs=True)
1545
# Move the old packs out of the way now they are no longer referenced.
1546
for revision_count, packs in pack_operations:
1547
self._obsolete_packs(packs)
1547
1550
def _flush_new_pack(self):
1560
1563
"""Is the collection already packed?"""
1561
1564
return not (self.repo._format.pack_compresses or (len(self._names) > 1))
1563
def pack(self, hint=None, clean_obsolete_packs=False):
1566
def pack(self, hint=None):
1564
1567
"""Pack the pack collection totally."""
1565
1568
self.ensure_loaded()
1566
1569
total_packs = len(self._names)
1582
1585
pack_operations[-1][1].append(pack)
1583
1586
self._execute_pack_operations(pack_operations, OptimisingPacker)
1585
if clean_obsolete_packs:
1586
self._clear_obsolete_packs()
1588
1588
def plan_autopack_combinations(self, existing_packs, pack_distribution):
1589
1589
"""Plan a pack operation.
1784
1784
:param return: None.
1786
1786
for pack in packs:
1788
pack.pack_transport.rename(pack.file_name(),
1789
'../obsolete_packs/' + pack.file_name())
1790
except (errors.PathError, errors.TransportError), e:
1791
# TODO: Should these be warnings or mutters?
1792
mutter("couldn't rename obsolete pack, skipping it:\n%s"
1787
pack.pack_transport.rename(pack.file_name(),
1788
'../obsolete_packs/' + pack.file_name())
1794
1789
# TODO: Probably needs to know all possible indices for this pack
1795
1790
# - or maybe list the directory and move all indices matching this
1796
1791
# name whether we recognize it or not?
1798
1793
if self.chk_index is not None:
1799
1794
suffixes.append('.cix')
1800
1795
for suffix in suffixes:
1802
self._index_transport.rename(pack.name + suffix,
1803
'../obsolete_packs/' + pack.name + suffix)
1804
except (errors.PathError, errors.TransportError), e:
1805
mutter("couldn't rename obsolete index, skipping it:\n%s"
1796
self._index_transport.rename(pack.name + suffix,
1797
'../obsolete_packs/' + pack.name + suffix)
1808
1799
def pack_distribution(self, total_revisions):
1809
1800
"""Generate a list of the number of revisions to put in each pack.
1835
1826
self._remove_pack_indices(pack)
1836
1827
self.packs.remove(pack)
1838
def _remove_pack_indices(self, pack, ignore_missing=False):
1839
"""Remove the indices for pack from the aggregated indices.
1841
:param ignore_missing: Suppress KeyErrors from calling remove_index.
1843
for index_type in Pack.index_definitions.keys():
1844
attr_name = index_type + '_index'
1845
aggregate_index = getattr(self, attr_name)
1846
if aggregate_index is not None:
1847
pack_index = getattr(pack, attr_name)
1849
aggregate_index.remove_index(pack_index)
1829
def _remove_pack_indices(self, pack):
1830
"""Remove the indices for pack from the aggregated indices."""
1831
self.revision_index.remove_index(pack.revision_index, pack)
1832
self.inventory_index.remove_index(pack.inventory_index, pack)
1833
self.text_index.remove_index(pack.text_index, pack)
1834
self.signature_index.remove_index(pack.signature_index, pack)
1835
if self.chk_index is not None:
1836
self.chk_index.remove_index(pack.chk_index, pack)
1855
1838
def reset(self):
1856
1839
"""Clear all cached data."""
1889
1872
disk_nodes = set()
1890
1873
for index, key, value in self._iter_disk_pack_index():
1891
1874
disk_nodes.add((key, value))
1892
orig_disk_nodes = set(disk_nodes)
1894
1876
# do a two-way diff against our original content
1895
1877
current_nodes = set()
1908
1890
disk_nodes.difference_update(deleted_nodes)
1909
1891
disk_nodes.update(new_nodes)
1911
return disk_nodes, deleted_nodes, new_nodes, orig_disk_nodes
1893
return disk_nodes, deleted_nodes, new_nodes
1913
1895
def _syncronize_pack_names_from_disk_nodes(self, disk_nodes):
1914
1896
"""Given the correct set of pack files, update our saved info.
1954
1936
added.append(name)
1955
1937
return removed, added, modified
1957
def _save_pack_names(self, clear_obsolete_packs=False, obsolete_packs=None):
1939
def _save_pack_names(self, clear_obsolete_packs=False):
1958
1940
"""Save the list of packs.
1960
1942
This will take out the mutex around the pack names list for the
1965
1947
:param clear_obsolete_packs: If True, clear out the contents of the
1966
1948
obsolete_packs directory.
1967
:param obsolete_packs: Packs that are obsolete once the new pack-names
1968
file has been written.
1969
1949
:return: A list of the names saved that were not previously on disk.
1971
already_obsolete = []
1972
1951
self.lock_names()
1974
1953
builder = self._index_builder_class()
1975
(disk_nodes, deleted_nodes, new_nodes,
1976
orig_disk_nodes) = self._diff_pack_names()
1954
disk_nodes, deleted_nodes, new_nodes = self._diff_pack_names()
1977
1955
# TODO: handle same-name, index-size-changes here -
1978
1956
# e.g. use the value from disk, not ours, *unless* we're the one
1981
1959
builder.add_node(key, value)
1982
1960
self.transport.put_file('pack-names', builder.finish(),
1983
1961
mode=self.repo.bzrdir._get_file_mode())
1962
# move the baseline forward
1984
1963
self._packs_at_load = disk_nodes
1985
1964
if clear_obsolete_packs:
1988
to_preserve = set([o.name for o in obsolete_packs])
1989
already_obsolete = self._clear_obsolete_packs(to_preserve)
1965
self._clear_obsolete_packs()
1991
1967
self._unlock_names()
1992
1968
# synchronise the memory packs list with what we just wrote:
1993
1969
self._syncronize_pack_names_from_disk_nodes(disk_nodes)
1995
# TODO: We could add one more condition here. "if o.name not in
1996
# orig_disk_nodes and o != the new_pack we haven't written to
1997
# disk yet. However, the new pack object is not easily
1998
# accessible here (it would have to be passed through the
1999
# autopacking code, etc.)
2000
obsolete_packs = [o for o in obsolete_packs
2001
if o.name not in already_obsolete]
2002
self._obsolete_packs(obsolete_packs)
2003
1970
return [new_node[0][0] for new_node in new_nodes]
2005
1972
def reload_pack_names(self):
2022
1989
# out the new value.
2023
(disk_nodes, deleted_nodes, new_nodes,
2024
orig_disk_nodes) = self._diff_pack_names()
2025
# _packs_at_load is meant to be the explicit list of names in
2026
# 'pack-names' at then start. As such, it should not contain any
2027
# pending names that haven't been written out yet.
2028
self._packs_at_load = orig_disk_nodes
1990
disk_nodes, _, _ = self._diff_pack_names()
1991
self._packs_at_load = disk_nodes
2029
1992
(removed, added,
2030
1993
modified) = self._syncronize_pack_names_from_disk_nodes(disk_nodes)
2031
1994
if removed or added or modified:
2041
2004
raise errors.RetryAutopack(self.repo, False, sys.exc_info())
2043
def _clear_obsolete_packs(self, preserve=None):
2006
def _clear_obsolete_packs(self):
2044
2007
"""Delete everything from the obsolete-packs directory.
2046
:return: A list of pack identifiers (the filename without '.pack') that
2047
were found in obsolete_packs.
2050
2009
obsolete_pack_transport = self.transport.clone('obsolete_packs')
2051
if preserve is None:
2053
2010
for filename in obsolete_pack_transport.list_dir('.'):
2054
name, ext = osutils.splitext(filename)
2057
if name in preserve:
2060
2012
obsolete_pack_transport.delete(filename)
2061
2013
except (errors.PathError, errors.TransportError), e:
2062
warning("couldn't delete obsolete pack, skipping it:\n%s"
2014
warning("couldn't delete obsolete pack, skipping it:\n%s" % (e,))
2066
2016
def _start_write_group(self):
2067
2017
# Do not permit preparation for writing if we're not in a 'write lock'.
2094
2044
# FIXME: just drop the transient index.
2095
2045
# forget what names there are
2096
2046
if self._new_pack is not None:
2097
operation = cleanup.OperationWithCleanups(self._new_pack.abort)
2098
operation.add_cleanup(setattr, self, '_new_pack', None)
2099
# If we aborted while in the middle of finishing the write
2100
# group, _remove_pack_indices could fail because the indexes are
2101
# already gone. But they're not there we shouldn't fail in this
2102
# case, so we pass ignore_missing=True.
2103
operation.add_cleanup(self._remove_pack_indices, self._new_pack,
2104
ignore_missing=True)
2105
operation.run_simple()
2048
self._new_pack.abort()
2050
# XXX: If we aborted while in the middle of finishing the write
2051
# group, _remove_pack_indices can fail because the indexes are
2052
# already gone. If they're not there we shouldn't fail in this
2053
# case. -- mbp 20081113
2054
self._remove_pack_indices(self._new_pack)
2055
self._new_pack = None
2106
2056
for resumed_pack in self._resumed_packs:
2107
operation = cleanup.OperationWithCleanups(resumed_pack.abort)
2108
# See comment in previous finally block.
2109
operation.add_cleanup(self._remove_pack_indices, resumed_pack,
2110
ignore_missing=True)
2111
operation.run_simple()
2058
resumed_pack.abort()
2060
# See comment in previous finally block.
2062
self._remove_pack_indices(resumed_pack)
2112
2065
del self._resumed_packs[:]
2114
2067
def _remove_resumed_pack_indices(self):
2280
2233
self._reconcile_fixes_text_parents = True
2281
2234
self._reconcile_backsup_inventory = False
2283
def _warn_if_deprecated(self, branch=None):
2236
def _warn_if_deprecated(self):
2284
2237
# This class isn't deprecated, but one sub-format is
2285
2238
if isinstance(self._format, RepositoryFormatKnitPack5RichRootBroken):
2286
super(KnitPackRepository, self)._warn_if_deprecated(branch)
2239
from bzrlib import repository
2240
if repository._deprecation_warning_done:
2242
repository._deprecation_warning_done = True
2243
warning("Format %s for %s is deprecated - please use"
2244
" 'bzr upgrade --1.6.1-rich-root'"
2245
% (self._format, self.bzrdir.transport.base))
2288
2247
def _abort_write_group(self):
2289
2248
self.revisions._index._key_dependencies.clear()
2347
2306
if self._write_lock_count == 1:
2348
2307
self._transaction = transactions.WriteTransaction()
2350
if 'relock' in debug.debug_flags and self._prev_lock == 'w':
2351
note('%r was write locked again', self)
2352
self._prev_lock = 'w'
2353
2309
for repo in self._fallback_repositories:
2354
2310
# Writes don't affect fallback repos
2355
2311
repo.lock_read()
2363
2319
self.control_files.lock_read()
2365
if 'relock' in debug.debug_flags and self._prev_lock == 'r':
2366
note('%r was read locked again', self)
2367
self._prev_lock = 'r'
2368
2321
for repo in self._fallback_repositories:
2369
2322
repo.lock_read()
2370
2323
self._refresh_data()
2378
2331
raise NotImplementedError(self.dont_leave_lock_in_place)
2380
2333
@needs_write_lock
2381
def pack(self, hint=None, clean_obsolete_packs=False):
2334
def pack(self, hint=None):
2382
2335
"""Compress the data within the repository.
2384
2337
This will pack all the data to a single pack. In future it may
2385
2338
recompress deltas or do other such expensive operations.
2387
self._pack_collection.pack(hint=hint, clean_obsolete_packs=clean_obsolete_packs)
2340
self._pack_collection.pack(hint=hint)
2389
2342
@needs_write_lock
2390
2343
def reconcile(self, other=None, thorough=False):
2398
2351
packer = ReconcilePacker(collection, packs, extension, revs)
2399
2352
return packer.pack(pb)
2401
@only_raises(errors.LockNotHeld, errors.LockBroken)
2402
2354
def unlock(self):
2403
2355
if self._write_lock_count == 1 and self._write_group is not None:
2404
2356
self.abort_write_group()
2546
2498
utf8_files = [('format', self.get_format_string())]
2548
2500
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
2549
repository = self.open(a_bzrdir=a_bzrdir, _found=True)
2550
self._run_post_repo_init_hooks(repository, a_bzrdir, shared)
2501
return self.open(a_bzrdir=a_bzrdir, _found=True)
2553
2503
def open(self, a_bzrdir, _found=False, _override_transport=None):
2554
2504
"""See RepositoryFormat.open().
2891
2840
repository_class = KnitPackRepository
2892
2841
_commit_builder_class = PackRootCommitBuilder
2893
2842
rich_root_data = True
2895
2843
supports_tree_reference = True
2896
2844
supports_external_lookups = True
2897
2845
# What index classes to use