/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
1
# Copyright (C) 2008 Canonical Ltd
2
#
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
7
#
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
#
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
17
"""Import processor that supports all Bazaar repository formats."""
18
19
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
20
import time
0.64.5 by Ian Clatworthy
first cut at generic processing method
21
from bzrlib import (
0.64.37 by Ian Clatworthy
create branches as required
22
    bzrdir,
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
23
    delta,
0.64.5 by Ian Clatworthy
first cut at generic processing method
24
    errors,
25
    osutils,
0.64.26 by Ian Clatworthy
more progress reporting tweaks
26
    progress,
0.64.5 by Ian Clatworthy
first cut at generic processing method
27
    )
0.64.51 by Ian Clatworthy
disable autopacking
28
from bzrlib.repofmt import pack_repo
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
29
from bzrlib.trace import note, mutter
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
30
import bzrlib.util.configobj.configobj as configobj
0.64.5 by Ian Clatworthy
first cut at generic processing method
31
from bzrlib.plugins.fastimport import (
0.78.4 by Ian Clatworthy
move GenericBranchUpdater into its own module
32
    branch_updater,
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
33
    bzr_commit_handler,
0.78.3 by Ian Clatworthy
move GenericCacheManager into its own module
34
    cache_manager,
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
35
    errors as plugin_errors,
0.64.31 by Ian Clatworthy
fix branch updating for the single branch case
36
    helpers,
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
37
    idmapfile,
0.78.5 by Ian Clatworthy
move import/export of marks into a module
38
    marks_file,
0.64.5 by Ian Clatworthy
first cut at generic processing method
39
    processor,
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
40
    revision_store,
0.64.5 by Ian Clatworthy
first cut at generic processing method
41
    )
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
42
43
0.64.41 by Ian Clatworthy
update multiple working trees if requested
44
# How many commits before automatically reporting progress
45
_DEFAULT_AUTO_PROGRESS = 1000
46
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
47
# How many commits before automatically checkpointing
48
_DEFAULT_AUTO_CHECKPOINT = 10000
49
0.64.170 by Ian Clatworthy
add autopack option to fast-import
50
# How many checkpoints before automatically packing
51
_DEFAULT_AUTO_PACK = 4
52
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
53
# How many inventories to cache
54
_DEFAULT_INV_CACHE_SIZE = 10
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
55
_DEFAULT_CHK_INV_CACHE_SIZE = 100
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
56
0.64.41 by Ian Clatworthy
update multiple working trees if requested
57
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
58
class GenericProcessor(processor.ImportProcessor):
59
    """An import processor that handles basic imports.
60
61
    Current features supported:
62
0.64.16 by Ian Clatworthy
safe processing tweaks
63
    * blobs are cached in memory
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
64
    * files and symlinks commits are supported
65
    * checkpoints automatically happen at a configurable frequency
66
      over and above the stream requested checkpoints
67
    * timestamped progress reporting, both automatic and stream requested
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
68
    * some basic statistics are dumped on completion.
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
69
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
70
    At checkpoints and on completion, the commit-id -> revision-id map is
71
    saved to a file called 'fastimport-id-map'. If the import crashes
72
    or is interrupted, it can be started again and this file will be
73
    used to skip over already loaded revisions. The format of each line
74
    is "commit-id revision-id" so commit-ids cannot include spaces.
75
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
76
    Here are the supported parameters:
77
0.64.38 by Ian Clatworthy
clean-up doc ready for initial release
78
    * info - name of a hints file holding the analysis generated
79
      by running the fast-import-info processor in verbose mode. When
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
80
      importing large repositories, this parameter is needed so
81
      that the importer knows what blobs to intelligently cache.
82
0.64.41 by Ian Clatworthy
update multiple working trees if requested
83
    * trees - update the working trees before completing.
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
84
      By default, the importer updates the repository
85
      and branches and the user needs to run 'bzr update' for the
0.64.41 by Ian Clatworthy
update multiple working trees if requested
86
      branches of interest afterwards.
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
87
0.64.170 by Ian Clatworthy
add autopack option to fast-import
88
    * count - only import this many commits then exit. If not set
89
      or negative, all commits are imported.
90
    
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
91
    * checkpoint - automatically checkpoint every n commits over and
92
      above any checkpoints contained in the import stream.
93
      The default is 10000.
94
0.64.170 by Ian Clatworthy
add autopack option to fast-import
95
    * autopack - pack every n checkpoints. The default is 4.
96
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
97
    * inv-cache - number of inventories to cache.
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
98
      If not set, the default is 100 for CHK formats and 10 otherwise.
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
99
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
100
    * mode - import algorithm to use: default, experimental or classic.
0.64.82 by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch
101
102
    * import-marks - name of file to read to load mark information from
103
104
    * export-marks - name of file to write to save mark information to
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
105
    """
106
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
107
    known_params = [
108
        'info',
109
        'trees',
0.64.170 by Ian Clatworthy
add autopack option to fast-import
110
        'count',
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
111
        'checkpoint',
0.64.170 by Ian Clatworthy
add autopack option to fast-import
112
        'autopack',
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
113
        'inv-cache',
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
114
        'mode',
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
115
        'import-marks',
116
        'export-marks',
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
117
        ]
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
118
0.64.196 by Ian Clatworthy
get tests passing again
119
    def __init__(self, bzrdir, params=None, verbose=False,
120
            prune_empty_dirs=True):
121
        processor.ImportProcessor.__init__(self, bzrdir, params, verbose)
122
        self.prune_empty_dirs = prune_empty_dirs
123
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
124
    def pre_process(self):
0.64.26 by Ian Clatworthy
more progress reporting tweaks
125
        self._start_time = time.time()
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
126
        self._load_info_and_params()
0.78.3 by Ian Clatworthy
move GenericCacheManager into its own module
127
        self.cache_mgr = cache_manager.CacheManager(self.info, self.verbose,
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
128
            self.inventory_cache_size)
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
129
        
0.64.82 by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch
130
        if self.params.get("import-marks") is not None:
0.79.2 by Ian Clatworthy
extend & use marks_file API
131
            mark_info = marks_file.import_marks(self.params.get("import-marks"))
132
            if mark_info is not None:
133
                self.cache_mgr.revision_ids = mark_info[0]
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
134
            self.skip_total = False
135
            self.first_incremental_commit = True
136
        else:
137
            self.first_incremental_commit = False
138
            self.skip_total = self._init_id_map()
139
            if self.skip_total:
140
                self.note("Found %d commits already loaded - "
141
                    "skipping over these ...", self.skip_total)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
142
        self._revision_count = 0
143
144
        # mapping of tag name to revision_id
145
        self.tags = {}
146
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
147
        # Create the revision store to use for committing, if any
148
        self.rev_store = self._revision_store_factory()
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
149
0.64.51 by Ian Clatworthy
disable autopacking
150
        # Disable autopacking if the repo format supports it.
151
        # THIS IS A HACK - there is no sanctioned way of doing this yet.
152
        if isinstance(self.repo, pack_repo.KnitPackRepository):
153
            self._original_max_pack_count = \
154
                self.repo._pack_collection._max_pack_count
155
            def _max_pack_count_for_import(total_revisions):
156
                return total_revisions + 1
157
            self.repo._pack_collection._max_pack_count = \
158
                _max_pack_count_for_import
159
        else:
160
            self._original_max_pack_count = None
0.64.144 by Ian Clatworthy
make groupcompress _FAST during import
161
 
162
        # Make groupcompress use the fast algorithm during importing.
163
        # We want to repack at the end anyhow when more information
164
        # is available to do a better job of saving space.
165
        try:
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
166
            from bzrlib import groupcompress
0.64.144 by Ian Clatworthy
make groupcompress _FAST during import
167
            groupcompress._FAST = True
168
        except ImportError:
169
            pass
170
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
171
        # Create a write group. This is committed at the end of the import.
172
        # Checkpointing closes the current one and starts a new one.
173
        self.repo.start_write_group()
174
175
    def _load_info_and_params(self):
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
176
        self._mode = bool(self.params.get('mode', 'default'))
177
        self._experimental = self._mode == 'experimental'
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
178
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
179
        # This is currently hard-coded but might be configurable via
180
        # parameters one day if that's needed
181
        repo_transport = self.repo.control_files._transport
182
        self.id_map_path = repo_transport.local_abspath("fastimport-id-map")
183
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
184
        # Load the info file, if any
185
        info_path = self.params.get('info')
186
        if info_path is not None:
187
            self.info = configobj.ConfigObj(info_path)
188
        else:
189
            self.info = None
190
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
191
        # Decide which CommitHandler to use
0.64.167 by Ian Clatworthy
incremental packing for chk formats
192
        self.supports_chk = getattr(self.repo._format, 'supports_chks', False)
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
193
        if self.supports_chk and self._mode == 'classic':
194
            note("Cannot use classic algorithm on CHK repositories"
195
                 " - using default one instead")
196
            self._mode = 'default'
197
        if self._mode == 'classic':
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
198
            self.commit_handler_factory = \
199
                bzr_commit_handler.InventoryCommitHandler
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
200
        else:
201
            self.commit_handler_factory = \
202
                bzr_commit_handler.InventoryDeltaCommitHandler
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
203
0.64.41 by Ian Clatworthy
update multiple working trees if requested
204
        # Decide how often to automatically report progress
205
        # (not a parameter yet)
206
        self.progress_every = _DEFAULT_AUTO_PROGRESS
207
        if self.verbose:
208
            self.progress_every = self.progress_every / 10
209
0.64.170 by Ian Clatworthy
add autopack option to fast-import
210
        # Decide how often (# of commits) to automatically checkpoint
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
211
        self.checkpoint_every = int(self.params.get('checkpoint',
212
            _DEFAULT_AUTO_CHECKPOINT))
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
213
0.64.170 by Ian Clatworthy
add autopack option to fast-import
214
        # Decide how often (# of checkpoints) to automatically pack
215
        self.checkpoint_count = 0
216
        self.autopack_every = int(self.params.get('autopack',
217
            _DEFAULT_AUTO_PACK))
218
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
219
        # Decide how big to make the inventory cache
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
220
        cache_size = int(self.params.get('inv-cache', -1))
221
        if cache_size == -1:
0.64.167 by Ian Clatworthy
incremental packing for chk formats
222
            if self.supports_chk:
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
223
                cache_size = _DEFAULT_CHK_INV_CACHE_SIZE
224
            else:
225
                cache_size = _DEFAULT_INV_CACHE_SIZE
226
        self.inventory_cache_size = cache_size
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
227
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
228
        # Find the maximum number of commits to import (None means all)
229
        # and prepare progress reporting. Just in case the info file
230
        # has an outdated count of commits, we store the max counts
231
        # at which we need to terminate separately to the total used
232
        # for progress tracking.
233
        try:
234
            self.max_commits = int(self.params['count'])
0.64.38 by Ian Clatworthy
clean-up doc ready for initial release
235
            if self.max_commits < 0:
236
                self.max_commits = None
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
237
        except KeyError:
238
            self.max_commits = None
0.64.25 by Ian Clatworthy
slightly better progress reporting
239
        if self.info is not None:
240
            self.total_commits = int(self.info['Command counts']['commit'])
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
241
            if (self.max_commits is not None and
242
                self.total_commits > self.max_commits):
243
                self.total_commits = self.max_commits
0.64.25 by Ian Clatworthy
slightly better progress reporting
244
        else:
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
245
            self.total_commits = self.max_commits
0.64.25 by Ian Clatworthy
slightly better progress reporting
246
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
247
    def _revision_store_factory(self):
248
        """Make a RevisionStore based on what the repository supports."""
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
249
        new_repo_api = hasattr(self.repo, 'revisions')
250
        if new_repo_api:
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
251
            return revision_store.RevisionStore2(self.repo)
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
252
        elif not self._experimental:
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
253
            return revision_store.RevisionStore1(self.repo)
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
254
        else:
255
            def fulltext_when(count):
256
                total = self.total_commits
257
                if total is not None and count == total:
258
                    fulltext = True
259
                else:
260
                    # Create an inventory fulltext every 200 revisions
261
                    fulltext = count % 200 == 0
262
                if fulltext:
263
                    self.note("%d commits - storing inventory as full-text",
264
                        count)
265
                return fulltext
266
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
267
            return revision_store.ImportRevisionStore1(
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
268
                self.repo, self.inventory_cache_size,
269
                fulltext_when=fulltext_when)
270
0.64.27 by Ian Clatworthy
1st cut at performance tuning
271
    def _process(self, command_iter):
272
        # if anything goes wrong, abort the write group if any
273
        try:
274
            processor.ImportProcessor._process(self, command_iter)
275
        except:
276
            if self.repo is not None and self.repo.is_in_write_group():
277
                self.repo.abort_write_group()
278
            raise
279
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
280
    def post_process(self):
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
281
        # Commit the current write group and checkpoint the id map
0.64.27 by Ian Clatworthy
1st cut at performance tuning
282
        self.repo.commit_write_group()
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
283
        self._save_id_map()
0.64.27 by Ian Clatworthy
1st cut at performance tuning
284
0.64.82 by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch
285
        if self.params.get("export-marks") is not None:
0.78.5 by Ian Clatworthy
move import/export of marks into a module
286
            marks_file.export_marks(self.params.get("export-marks"),
287
                self.cache_mgr.revision_ids)
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
288
0.97.1 by Gonéri Le Bouder
avoid STDERR crash
289
        if self.cache_mgr.last_ref == None:
290
            """Nothing to refresh"""
291
            return
292
0.64.31 by Ian Clatworthy
fix branch updating for the single branch case
293
        # Update the branches
294
        self.note("Updating branch information ...")
0.78.4 by Ian Clatworthy
move GenericBranchUpdater into its own module
295
        updater = branch_updater.BranchUpdater(self.repo, self.branch,
296
            self.cache_mgr, helpers.invert_dictset(self.cache_mgr.heads),
0.64.64 by Ian Clatworthy
save tags known about in each branch
297
            self.cache_mgr.last_ref, self.tags)
0.64.34 by Ian Clatworthy
report lost branches
298
        branches_updated, branches_lost = updater.update()
299
        self._branch_count = len(branches_updated)
300
301
        # Tell the user about branches that were not created
302
        if branches_lost:
0.64.37 by Ian Clatworthy
create branches as required
303
            if not self.repo.is_shared():
304
                self.warning("Cannot import multiple branches into "
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
305
                    "a standalone branch")
0.64.37 by Ian Clatworthy
create branches as required
306
            self.warning("Not creating branches for these head revisions:")
0.64.34 by Ian Clatworthy
report lost branches
307
            for lost_info in branches_lost:
308
                head_revision = lost_info[1]
309
                branch_name = lost_info[0]
0.64.67 by James Westby
Add support for -Dfast-import.
310
                self.note("\t %s = %s", head_revision, branch_name)
0.64.34 by Ian Clatworthy
report lost branches
311
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
312
        # Update the working trees as requested
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
313
        self._tree_count = 0
0.64.34 by Ian Clatworthy
report lost branches
314
        remind_about_update = True
0.64.54 by Ian Clatworthy
handle existing branches and only count the branches really updated
315
        if self._branch_count == 0:
316
            self.note("no branches to update")
317
            self.note("no working trees to update")
318
            remind_about_update = False
319
        elif self.params.get('trees', False):
0.64.41 by Ian Clatworthy
update multiple working trees if requested
320
            trees = self._get_working_trees(branches_updated)
321
            if trees:
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
322
                self._update_working_trees(trees)
0.64.34 by Ian Clatworthy
report lost branches
323
                remind_about_update = False
0.64.41 by Ian Clatworthy
update multiple working trees if requested
324
            else:
325
                self.warning("No working trees available to update")
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
326
        else:
327
            # Update just the trunk. (This is always the first branch
328
            # returned by the branch updater.)
329
            trunk_branch = branches_updated[0]
330
            trees = self._get_working_trees([trunk_branch])
331
            if trees:
332
                self._update_working_trees(trees)
333
                remind_about_update = self._branch_count > 1
0.64.51 by Ian Clatworthy
disable autopacking
334
0.64.176 by Ian Clatworthy
faster export of revision range & improved diagnostics in fast-export
335
        # Dump the cache stats now because we clear it before the final pack
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
336
        if self.verbose:
337
            self.cache_mgr.dump_stats()
0.64.51 by Ian Clatworthy
disable autopacking
338
        if self._original_max_pack_count:
339
            # We earlier disabled autopacking, creating one pack every
0.64.75 by Ian Clatworthy
if checkpointed, pack repository and delete obsolete_packs
340
            # checkpoint instead. We now pack the repository to optimise
341
            # how data is stored.
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
342
            self.cache_mgr.clear_all()
0.64.162 by Ian Clatworthy
always repack the repository on completion
343
            self._pack_repository()
344
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
345
        # Finish up by dumping stats & telling the user what to do next.
346
        self.dump_stats()
0.64.34 by Ian Clatworthy
report lost branches
347
        if remind_about_update:
0.64.75 by Ian Clatworthy
if checkpointed, pack repository and delete obsolete_packs
348
            # This message is explicitly not timestamped.
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
349
            note("To refresh the working tree for other branches, "
350
                "use 'bzr update' inside that branch.")
351
352
    def _update_working_trees(self, trees):
353
        if self.verbose:
354
            reporter = delta._ChangeReporter()
355
        else:
356
            reporter = None
357
        for wt in trees:
358
            self.note("Updating the working tree for %s ...", wt.basedir)
359
            wt.update(reporter)
360
            self._tree_count += 1
0.64.41 by Ian Clatworthy
update multiple working trees if requested
361
0.64.167 by Ian Clatworthy
incremental packing for chk formats
362
    def _pack_repository(self, final=True):
0.64.162 by Ian Clatworthy
always repack the repository on completion
363
        # Before packing, free whatever memory we can and ensure
364
        # that groupcompress is configured to optimise disk space
365
        import gc
0.64.167 by Ian Clatworthy
incremental packing for chk formats
366
        if final:
367
            try:
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
368
                from bzrlib import groupcompress
0.64.167 by Ian Clatworthy
incremental packing for chk formats
369
            except ImportError:
370
                pass
371
            else:
372
                groupcompress._FAST = False
0.64.162 by Ian Clatworthy
always repack the repository on completion
373
        gc.collect()
374
        self.note("Packing repository ...")
375
        self.repo.pack()
376
377
        # To be conservative, packing puts the old packs and
378
        # indices in obsolete_packs. We err on the side of
379
        # optimism and clear out that directory to save space.
380
        self.note("Removing obsolete packs ...")
381
        # TODO: Use a public API for this once one exists
382
        repo_transport = self.repo._pack_collection.transport
383
        repo_transport.clone('obsolete_packs').delete_multi(
384
            repo_transport.list_dir('obsolete_packs'))
385
0.64.167 by Ian Clatworthy
incremental packing for chk formats
386
        # If we're not done, free whatever memory we can
387
        if not final:
388
            gc.collect()
389
0.64.41 by Ian Clatworthy
update multiple working trees if requested
390
    def _get_working_trees(self, branches):
391
        """Get the working trees for branches in the repository."""
392
        result = []
393
        wt_expected = self.repo.make_working_trees()
394
        for br in branches:
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
395
            if br is None:
396
                continue
397
            elif br == self.branch:
398
                if self.working_tree:
399
                    result.append(self.working_tree)
0.64.41 by Ian Clatworthy
update multiple working trees if requested
400
            elif wt_expected:
401
                try:
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
402
                    result.append(br.bzrdir.open_workingtree())
0.64.41 by Ian Clatworthy
update multiple working trees if requested
403
                except errors.NoWorkingTree:
404
                    self.warning("No working tree for branch %s", br)
405
        return result
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
406
407
    def dump_stats(self):
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
408
        time_required = progress.str_tdelta(time.time() - self._start_time)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
409
        rc = self._revision_count - self.skip_total
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
410
        bc = self._branch_count
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
411
        wtc = self._tree_count
412
        self.note("Imported %d %s, updating %d %s and %d %s in %s",
0.64.32 by Ian Clatworthy
move single_plural into helpers
413
            rc, helpers.single_plural(rc, "revision", "revisions"),
414
            bc, helpers.single_plural(bc, "branch", "branches"),
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
415
            wtc, helpers.single_plural(wtc, "tree", "trees"),
416
            time_required)
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
417
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
418
    def _init_id_map(self):
419
        """Load the id-map and check it matches the repository.
420
        
421
        :return: the number of entries in the map
422
        """
423
        # Currently, we just check the size. In the future, we might
424
        # decide to be more paranoid and check that the revision-ids
425
        # are identical as well.
426
        self.cache_mgr.revision_ids, known = idmapfile.load_id_map(
427
            self.id_map_path)
428
        existing_count = len(self.repo.all_revision_ids())
0.64.106 by Ian Clatworthy
let the id-map file have more revisions than the repository
429
        if existing_count < known:
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
430
            raise plugin_errors.BadRepositorySize(known, existing_count)
431
        return known
432
433
    def _save_id_map(self):
434
        """Save the id-map."""
435
        # Save the whole lot every time. If this proves a problem, we can
436
        # change to 'append just the new ones' at a later time.
437
        idmapfile.save_id_map(self.id_map_path, self.cache_mgr.revision_ids)
438
0.64.5 by Ian Clatworthy
first cut at generic processing method
439
    def blob_handler(self, cmd):
440
        """Process a BlobCommand."""
441
        if cmd.mark is not None:
0.64.36 by Ian Clatworthy
fix head tracking when unmarked commits used
442
            dataref = cmd.id
0.64.5 by Ian Clatworthy
first cut at generic processing method
443
        else:
444
            dataref = osutils.sha_strings(cmd.data)
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
445
        self.cache_mgr.store_blob(dataref, cmd.data)
0.64.5 by Ian Clatworthy
first cut at generic processing method
446
0.64.170 by Ian Clatworthy
add autopack option to fast-import
447
    def checkpoint_handler(self, cmd):
0.64.5 by Ian Clatworthy
first cut at generic processing method
448
        """Process a CheckpointCommand."""
0.64.27 by Ian Clatworthy
1st cut at performance tuning
449
        # Commit the current write group and start a new one
450
        self.repo.commit_write_group()
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
451
        self._save_id_map()
0.64.170 by Ian Clatworthy
add autopack option to fast-import
452
        self.checkpoint_count += 1
453
        if self.checkpoint_count % self.autopack_every == 0:
0.64.167 by Ian Clatworthy
incremental packing for chk formats
454
            self._pack_repository(final=False)
0.64.27 by Ian Clatworthy
1st cut at performance tuning
455
        self.repo.start_write_group()
0.64.5 by Ian Clatworthy
first cut at generic processing method
456
457
    def commit_handler(self, cmd):
458
        """Process a CommitCommand."""
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
459
        if self.skip_total and self._revision_count < self.skip_total:
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
460
            self.cache_mgr.track_heads(cmd)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
461
            # Check that we really do know about this commit-id
462
            if not self.cache_mgr.revision_ids.has_key(cmd.id):
463
                raise plugin_errors.BadRestart(cmd.id)
464
            # Consume the file commands and free any non-sticky blobs
465
            for fc in cmd.file_iter():
466
                pass
467
            self.cache_mgr._blobs = {}
468
            self._revision_count += 1
469
            return
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
470
        if self.first_incremental_commit:
471
            self.first_incremental_commit = None
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
472
            parents = self.cache_mgr.track_heads(cmd)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
473
474
        # 'Commit' the revision and report progress
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
475
        handler = self.commit_handler_factory(cmd, self.cache_mgr,
0.64.196 by Ian Clatworthy
get tests passing again
476
            self.rev_store, verbose=self.verbose,
477
            prune_empty_dirs=self.prune_empty_dirs)
0.64.180 by Ian Clatworthy
report triggering commit when exception occurs
478
        try:
479
            handler.process()
480
        except:
481
            print "ABORT: exception occurred processing commit %s" % (cmd.id)
482
            raise
0.64.36 by Ian Clatworthy
fix head tracking when unmarked commits used
483
        self.cache_mgr.revision_ids[cmd.id] = handler.revision_id
0.64.27 by Ian Clatworthy
1st cut at performance tuning
484
        self._revision_count += 1
0.64.36 by Ian Clatworthy
fix head tracking when unmarked commits used
485
        self.report_progress("(%s)" % cmd.id)
0.64.31 by Ian Clatworthy
fix branch updating for the single branch case
486
487
        # Check if we should finish up or automatically checkpoint
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
488
        if (self.max_commits is not None and
489
            self._revision_count >= self.max_commits):
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
490
            self.note("Stopping after reaching requested count of commits")
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
491
            self.finished = True
492
        elif self._revision_count % self.checkpoint_every == 0:
493
            self.note("%d commits - automatic checkpoint triggered",
494
                self._revision_count)
0.64.170 by Ian Clatworthy
add autopack option to fast-import
495
            self.checkpoint_handler(None)
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
496
0.64.25 by Ian Clatworthy
slightly better progress reporting
497
    def report_progress(self, details=''):
0.64.41 by Ian Clatworthy
update multiple working trees if requested
498
        if self._revision_count % self.progress_every == 0:
0.64.152 by Ian Clatworthy
miscellaneous progress reporting fixes
499
            if self.total_commits is not None:
0.64.26 by Ian Clatworthy
more progress reporting tweaks
500
                counts = "%d/%d" % (self._revision_count, self.total_commits)
501
            else:
502
                counts = "%d" % (self._revision_count,)
0.64.152 by Ian Clatworthy
miscellaneous progress reporting fixes
503
            minutes = (time.time() - self._start_time) / 60
504
            revisions_added = self._revision_count - self.skip_total
505
            rate = revisions_added * 1.0 / minutes
506
            if rate > 10:
507
                rate_str = "at %.0f/minute " % rate
508
            else:
509
                rate_str = "at %.1f/minute " % rate
0.64.150 by Ian Clatworthy
show commit rate rather than meaningless ETA in verbose mode
510
            self.note("%s commits processed %s%s" % (counts, rate_str, details))
0.64.25 by Ian Clatworthy
slightly better progress reporting
511
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
512
    def progress_handler(self, cmd):
513
        """Process a ProgressCommand."""
0.64.34 by Ian Clatworthy
report lost branches
514
        # We could use a progress bar here instead
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
515
        self.note("progress %s" % (cmd.message,))
0.64.5 by Ian Clatworthy
first cut at generic processing method
516
517
    def reset_handler(self, cmd):
518
        """Process a ResetCommand."""
0.64.12 by Ian Clatworthy
lightweight tags, filter processor and param validation
519
        if cmd.ref.startswith('refs/tags/'):
0.64.94 by Ian Clatworthy
ignore lightweight tags without a from clause
520
            tag_name = cmd.ref[len('refs/tags/'):]
0.64.95 by Ian Clatworthy
only output warning about missing from clause for lightweight tags in verbose mode
521
            if cmd.from_ is not None:
522
                self._set_tag(tag_name, cmd.from_)
523
            elif self.verbose:
0.64.94 by Ian Clatworthy
ignore lightweight tags without a from clause
524
                self.warning("ignoring reset refs/tags/%s - no from clause"
525
                    % tag_name)
0.64.109 by Ian Clatworthy
initial cut at reset support
526
            return
0.75.1 by Brian de Alwis
Add support for multiple branches by supporting the 'reset' command.
527
528
        if cmd.from_ is not None:
0.64.109 by Ian Clatworthy
initial cut at reset support
529
            self.cache_mgr.track_heads_for_ref(cmd.ref, cmd.from_)
0.64.5 by Ian Clatworthy
first cut at generic processing method
530
531
    def tag_handler(self, cmd):
532
        """Process a TagCommand."""
0.64.107 by Ian Clatworthy
warn on tags with a missing from clause
533
        if cmd.from_ is not None:
534
            self._set_tag(cmd.id, cmd.from_)
535
        else:
536
            self.warning("ignoring tag %s - no from clause" % cmd.id)
0.64.12 by Ian Clatworthy
lightweight tags, filter processor and param validation
537
538
    def _set_tag(self, name, from_):
0.64.93 by Ian Clatworthy
minor comment clean-ups
539
        """Define a tag given a name and import 'from' reference."""
0.64.12 by Ian Clatworthy
lightweight tags, filter processor and param validation
540
        bzr_tag_name = name.decode('utf-8', 'replace')
541
        bzr_rev_id = self.cache_mgr.revision_ids[from_]
0.64.11 by Ian Clatworthy
tag support
542
        self.tags[bzr_tag_name] = bzr_rev_id