/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
1
# Copyright (C) 2008 Canonical Ltd
2
#
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
7
#
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
#
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
17
"""Import processor that supports all Bazaar repository formats."""
18
19
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
20
import time
0.64.5 by Ian Clatworthy
first cut at generic processing method
21
from bzrlib import (
0.64.37 by Ian Clatworthy
create branches as required
22
    bzrdir,
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
23
    delta,
0.64.5 by Ian Clatworthy
first cut at generic processing method
24
    errors,
25
    osutils,
0.64.26 by Ian Clatworthy
more progress reporting tweaks
26
    progress,
0.64.5 by Ian Clatworthy
first cut at generic processing method
27
    )
0.64.51 by Ian Clatworthy
disable autopacking
28
from bzrlib.repofmt import pack_repo
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
29
from bzrlib.trace import note, mutter
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
30
import bzrlib.util.configobj.configobj as configobj
0.64.5 by Ian Clatworthy
first cut at generic processing method
31
from bzrlib.plugins.fastimport import (
0.78.4 by Ian Clatworthy
move GenericBranchUpdater into its own module
32
    branch_updater,
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
33
    bzr_commit_handler,
0.78.3 by Ian Clatworthy
move GenericCacheManager into its own module
34
    cache_manager,
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
35
    errors as plugin_errors,
0.64.31 by Ian Clatworthy
fix branch updating for the single branch case
36
    helpers,
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
37
    idmapfile,
0.78.5 by Ian Clatworthy
move import/export of marks into a module
38
    marks_file,
0.64.5 by Ian Clatworthy
first cut at generic processing method
39
    processor,
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
40
    revision_store,
0.64.5 by Ian Clatworthy
first cut at generic processing method
41
    )
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
42
43
0.64.41 by Ian Clatworthy
update multiple working trees if requested
44
# How many commits before automatically reporting progress
45
_DEFAULT_AUTO_PROGRESS = 1000
46
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
47
# How many commits before automatically checkpointing
48
_DEFAULT_AUTO_CHECKPOINT = 10000
49
0.64.170 by Ian Clatworthy
add autopack option to fast-import
50
# How many checkpoints before automatically packing
51
_DEFAULT_AUTO_PACK = 4
52
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
53
# How many inventories to cache
54
_DEFAULT_INV_CACHE_SIZE = 10
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
55
_DEFAULT_CHK_INV_CACHE_SIZE = 100
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
56
0.64.41 by Ian Clatworthy
update multiple working trees if requested
57
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
58
class GenericProcessor(processor.ImportProcessor):
59
    """An import processor that handles basic imports.
60
61
    Current features supported:
62
0.64.16 by Ian Clatworthy
safe processing tweaks
63
    * blobs are cached in memory
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
64
    * files and symlinks commits are supported
65
    * checkpoints automatically happen at a configurable frequency
66
      over and above the stream requested checkpoints
67
    * timestamped progress reporting, both automatic and stream requested
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
68
    * some basic statistics are dumped on completion.
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
69
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
70
    At checkpoints and on completion, the commit-id -> revision-id map is
71
    saved to a file called 'fastimport-id-map'. If the import crashes
72
    or is interrupted, it can be started again and this file will be
73
    used to skip over already loaded revisions. The format of each line
74
    is "commit-id revision-id" so commit-ids cannot include spaces.
75
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
76
    Here are the supported parameters:
77
0.64.38 by Ian Clatworthy
clean-up doc ready for initial release
78
    * info - name of a hints file holding the analysis generated
79
      by running the fast-import-info processor in verbose mode. When
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
80
      importing large repositories, this parameter is needed so
81
      that the importer knows what blobs to intelligently cache.
82
0.64.41 by Ian Clatworthy
update multiple working trees if requested
83
    * trees - update the working trees before completing.
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
84
      By default, the importer updates the repository
85
      and branches and the user needs to run 'bzr update' for the
0.64.41 by Ian Clatworthy
update multiple working trees if requested
86
      branches of interest afterwards.
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
87
0.64.170 by Ian Clatworthy
add autopack option to fast-import
88
    * count - only import this many commits then exit. If not set
89
      or negative, all commits are imported.
90
    
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
91
    * checkpoint - automatically checkpoint every n commits over and
92
      above any checkpoints contained in the import stream.
93
      The default is 10000.
94
0.64.170 by Ian Clatworthy
add autopack option to fast-import
95
    * autopack - pack every n checkpoints. The default is 4.
96
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
97
    * inv-cache - number of inventories to cache.
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
98
      If not set, the default is 100 for CHK formats and 10 otherwise.
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
99
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
100
    * mode - import algorithm to use: default, experimental or classic.
0.64.82 by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch
101
102
    * import-marks - name of file to read to load mark information from
103
104
    * export-marks - name of file to write to save mark information to
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
105
    """
106
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
107
    known_params = [
108
        'info',
109
        'trees',
0.64.170 by Ian Clatworthy
add autopack option to fast-import
110
        'count',
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
111
        'checkpoint',
0.64.170 by Ian Clatworthy
add autopack option to fast-import
112
        'autopack',
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
113
        'inv-cache',
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
114
        'mode',
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
115
        'import-marks',
116
        'export-marks',
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
117
        ]
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
118
0.64.196 by Ian Clatworthy
get tests passing again
119
    def __init__(self, bzrdir, params=None, verbose=False,
120
            prune_empty_dirs=True):
121
        processor.ImportProcessor.__init__(self, bzrdir, params, verbose)
122
        self.prune_empty_dirs = prune_empty_dirs
123
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
124
    def pre_process(self):
0.64.213 by Ian Clatworthy
Smarter blob tracking by implicitly collecting statistics before starting the import
125
        self.note("Starting import ...")
0.64.26 by Ian Clatworthy
more progress reporting tweaks
126
        self._start_time = time.time()
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
127
        self._load_info_and_params()
0.78.3 by Ian Clatworthy
move GenericCacheManager into its own module
128
        self.cache_mgr = cache_manager.CacheManager(self.info, self.verbose,
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
129
            self.inventory_cache_size)
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
130
        
0.64.82 by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch
131
        if self.params.get("import-marks") is not None:
0.79.2 by Ian Clatworthy
extend & use marks_file API
132
            mark_info = marks_file.import_marks(self.params.get("import-marks"))
133
            if mark_info is not None:
134
                self.cache_mgr.revision_ids = mark_info[0]
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
135
            self.skip_total = False
136
            self.first_incremental_commit = True
137
        else:
138
            self.first_incremental_commit = False
139
            self.skip_total = self._init_id_map()
140
            if self.skip_total:
141
                self.note("Found %d commits already loaded - "
142
                    "skipping over these ...", self.skip_total)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
143
        self._revision_count = 0
144
145
        # mapping of tag name to revision_id
146
        self.tags = {}
147
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
148
        # Create the revision store to use for committing, if any
149
        self.rev_store = self._revision_store_factory()
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
150
0.64.51 by Ian Clatworthy
disable autopacking
151
        # Disable autopacking if the repo format supports it.
152
        # THIS IS A HACK - there is no sanctioned way of doing this yet.
153
        if isinstance(self.repo, pack_repo.KnitPackRepository):
154
            self._original_max_pack_count = \
155
                self.repo._pack_collection._max_pack_count
156
            def _max_pack_count_for_import(total_revisions):
157
                return total_revisions + 1
158
            self.repo._pack_collection._max_pack_count = \
159
                _max_pack_count_for_import
160
        else:
161
            self._original_max_pack_count = None
0.64.144 by Ian Clatworthy
make groupcompress _FAST during import
162
 
163
        # Make groupcompress use the fast algorithm during importing.
164
        # We want to repack at the end anyhow when more information
165
        # is available to do a better job of saving space.
166
        try:
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
167
            from bzrlib import groupcompress
0.64.144 by Ian Clatworthy
make groupcompress _FAST during import
168
            groupcompress._FAST = True
169
        except ImportError:
170
            pass
171
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
172
        # Create a write group. This is committed at the end of the import.
173
        # Checkpointing closes the current one and starts a new one.
174
        self.repo.start_write_group()
175
176
    def _load_info_and_params(self):
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
177
        self._mode = bool(self.params.get('mode', 'default'))
178
        self._experimental = self._mode == 'experimental'
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
179
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
180
        # This is currently hard-coded but might be configurable via
181
        # parameters one day if that's needed
182
        repo_transport = self.repo.control_files._transport
183
        self.id_map_path = repo_transport.local_abspath("fastimport-id-map")
184
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
185
        # Load the info file, if any
186
        info_path = self.params.get('info')
187
        if info_path is not None:
188
            self.info = configobj.ConfigObj(info_path)
189
        else:
190
            self.info = None
191
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
192
        # Decide which CommitHandler to use
0.64.167 by Ian Clatworthy
incremental packing for chk formats
193
        self.supports_chk = getattr(self.repo._format, 'supports_chks', False)
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
194
        if self.supports_chk and self._mode == 'classic':
195
            note("Cannot use classic algorithm on CHK repositories"
196
                 " - using default one instead")
197
            self._mode = 'default'
198
        if self._mode == 'classic':
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
199
            self.commit_handler_factory = \
200
                bzr_commit_handler.InventoryCommitHandler
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
201
        else:
202
            self.commit_handler_factory = \
203
                bzr_commit_handler.InventoryDeltaCommitHandler
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
204
0.64.41 by Ian Clatworthy
update multiple working trees if requested
205
        # Decide how often to automatically report progress
206
        # (not a parameter yet)
207
        self.progress_every = _DEFAULT_AUTO_PROGRESS
208
        if self.verbose:
209
            self.progress_every = self.progress_every / 10
210
0.64.170 by Ian Clatworthy
add autopack option to fast-import
211
        # Decide how often (# of commits) to automatically checkpoint
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
212
        self.checkpoint_every = int(self.params.get('checkpoint',
213
            _DEFAULT_AUTO_CHECKPOINT))
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
214
0.64.170 by Ian Clatworthy
add autopack option to fast-import
215
        # Decide how often (# of checkpoints) to automatically pack
216
        self.checkpoint_count = 0
217
        self.autopack_every = int(self.params.get('autopack',
218
            _DEFAULT_AUTO_PACK))
219
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
220
        # Decide how big to make the inventory cache
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
221
        cache_size = int(self.params.get('inv-cache', -1))
222
        if cache_size == -1:
0.64.167 by Ian Clatworthy
incremental packing for chk formats
223
            if self.supports_chk:
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
224
                cache_size = _DEFAULT_CHK_INV_CACHE_SIZE
225
            else:
226
                cache_size = _DEFAULT_INV_CACHE_SIZE
227
        self.inventory_cache_size = cache_size
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
228
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
229
        # Find the maximum number of commits to import (None means all)
230
        # and prepare progress reporting. Just in case the info file
231
        # has an outdated count of commits, we store the max counts
232
        # at which we need to terminate separately to the total used
233
        # for progress tracking.
234
        try:
235
            self.max_commits = int(self.params['count'])
0.64.38 by Ian Clatworthy
clean-up doc ready for initial release
236
            if self.max_commits < 0:
237
                self.max_commits = None
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
238
        except KeyError:
239
            self.max_commits = None
0.64.25 by Ian Clatworthy
slightly better progress reporting
240
        if self.info is not None:
241
            self.total_commits = int(self.info['Command counts']['commit'])
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
242
            if (self.max_commits is not None and
243
                self.total_commits > self.max_commits):
244
                self.total_commits = self.max_commits
0.64.25 by Ian Clatworthy
slightly better progress reporting
245
        else:
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
246
            self.total_commits = self.max_commits
0.64.25 by Ian Clatworthy
slightly better progress reporting
247
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
248
    def _revision_store_factory(self):
249
        """Make a RevisionStore based on what the repository supports."""
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
250
        new_repo_api = hasattr(self.repo, 'revisions')
251
        if new_repo_api:
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
252
            return revision_store.RevisionStore2(self.repo)
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
253
        elif not self._experimental:
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
254
            return revision_store.RevisionStore1(self.repo)
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
255
        else:
256
            def fulltext_when(count):
257
                total = self.total_commits
258
                if total is not None and count == total:
259
                    fulltext = True
260
                else:
261
                    # Create an inventory fulltext every 200 revisions
262
                    fulltext = count % 200 == 0
263
                if fulltext:
264
                    self.note("%d commits - storing inventory as full-text",
265
                        count)
266
                return fulltext
267
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
268
            return revision_store.ImportRevisionStore1(
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
269
                self.repo, self.inventory_cache_size,
270
                fulltext_when=fulltext_when)
271
0.64.27 by Ian Clatworthy
1st cut at performance tuning
272
    def _process(self, command_iter):
273
        # if anything goes wrong, abort the write group if any
274
        try:
275
            processor.ImportProcessor._process(self, command_iter)
276
        except:
277
            if self.repo is not None and self.repo.is_in_write_group():
278
                self.repo.abort_write_group()
279
            raise
280
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
281
    def post_process(self):
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
282
        # Commit the current write group and checkpoint the id map
0.64.27 by Ian Clatworthy
1st cut at performance tuning
283
        self.repo.commit_write_group()
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
284
        self._save_id_map()
0.64.27 by Ian Clatworthy
1st cut at performance tuning
285
0.64.82 by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch
286
        if self.params.get("export-marks") is not None:
0.78.5 by Ian Clatworthy
move import/export of marks into a module
287
            marks_file.export_marks(self.params.get("export-marks"),
288
                self.cache_mgr.revision_ids)
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
289
0.64.31 by Ian Clatworthy
fix branch updating for the single branch case
290
        # Update the branches
291
        self.note("Updating branch information ...")
0.78.4 by Ian Clatworthy
move GenericBranchUpdater into its own module
292
        updater = branch_updater.BranchUpdater(self.repo, self.branch,
293
            self.cache_mgr, helpers.invert_dictset(self.cache_mgr.heads),
0.64.64 by Ian Clatworthy
save tags known about in each branch
294
            self.cache_mgr.last_ref, self.tags)
0.64.34 by Ian Clatworthy
report lost branches
295
        branches_updated, branches_lost = updater.update()
296
        self._branch_count = len(branches_updated)
297
298
        # Tell the user about branches that were not created
299
        if branches_lost:
0.64.37 by Ian Clatworthy
create branches as required
300
            if not self.repo.is_shared():
301
                self.warning("Cannot import multiple branches into "
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
302
                    "a standalone branch")
0.64.37 by Ian Clatworthy
create branches as required
303
            self.warning("Not creating branches for these head revisions:")
0.64.34 by Ian Clatworthy
report lost branches
304
            for lost_info in branches_lost:
305
                head_revision = lost_info[1]
306
                branch_name = lost_info[0]
0.64.67 by James Westby
Add support for -Dfast-import.
307
                self.note("\t %s = %s", head_revision, branch_name)
0.64.34 by Ian Clatworthy
report lost branches
308
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
309
        # Update the working trees as requested
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
310
        self._tree_count = 0
0.64.34 by Ian Clatworthy
report lost branches
311
        remind_about_update = True
0.64.54 by Ian Clatworthy
handle existing branches and only count the branches really updated
312
        if self._branch_count == 0:
313
            self.note("no branches to update")
314
            self.note("no working trees to update")
315
            remind_about_update = False
316
        elif self.params.get('trees', False):
0.64.41 by Ian Clatworthy
update multiple working trees if requested
317
            trees = self._get_working_trees(branches_updated)
318
            if trees:
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
319
                self._update_working_trees(trees)
0.64.34 by Ian Clatworthy
report lost branches
320
                remind_about_update = False
0.64.41 by Ian Clatworthy
update multiple working trees if requested
321
            else:
322
                self.warning("No working trees available to update")
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
323
        else:
324
            # Update just the trunk. (This is always the first branch
325
            # returned by the branch updater.)
326
            trunk_branch = branches_updated[0]
327
            trees = self._get_working_trees([trunk_branch])
328
            if trees:
329
                self._update_working_trees(trees)
330
                remind_about_update = self._branch_count > 1
0.64.51 by Ian Clatworthy
disable autopacking
331
0.64.176 by Ian Clatworthy
faster export of revision range & improved diagnostics in fast-export
332
        # Dump the cache stats now because we clear it before the final pack
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
333
        if self.verbose:
334
            self.cache_mgr.dump_stats()
0.64.51 by Ian Clatworthy
disable autopacking
335
        if self._original_max_pack_count:
336
            # We earlier disabled autopacking, creating one pack every
0.64.75 by Ian Clatworthy
if checkpointed, pack repository and delete obsolete_packs
337
            # checkpoint instead. We now pack the repository to optimise
338
            # how data is stored.
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
339
            self.cache_mgr.clear_all()
0.64.162 by Ian Clatworthy
always repack the repository on completion
340
            self._pack_repository()
341
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
342
        # Finish up by dumping stats & telling the user what to do next.
343
        self.dump_stats()
0.64.34 by Ian Clatworthy
report lost branches
344
        if remind_about_update:
0.64.75 by Ian Clatworthy
if checkpointed, pack repository and delete obsolete_packs
345
            # This message is explicitly not timestamped.
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
346
            note("To refresh the working tree for other branches, "
347
                "use 'bzr update' inside that branch.")
348
349
    def _update_working_trees(self, trees):
350
        if self.verbose:
351
            reporter = delta._ChangeReporter()
352
        else:
353
            reporter = None
354
        for wt in trees:
355
            self.note("Updating the working tree for %s ...", wt.basedir)
356
            wt.update(reporter)
357
            self._tree_count += 1
0.64.41 by Ian Clatworthy
update multiple working trees if requested
358
0.64.167 by Ian Clatworthy
incremental packing for chk formats
359
    def _pack_repository(self, final=True):
0.64.162 by Ian Clatworthy
always repack the repository on completion
360
        # Before packing, free whatever memory we can and ensure
361
        # that groupcompress is configured to optimise disk space
362
        import gc
0.64.167 by Ian Clatworthy
incremental packing for chk formats
363
        if final:
364
            try:
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
365
                from bzrlib import groupcompress
0.64.167 by Ian Clatworthy
incremental packing for chk formats
366
            except ImportError:
367
                pass
368
            else:
369
                groupcompress._FAST = False
0.64.162 by Ian Clatworthy
always repack the repository on completion
370
        gc.collect()
371
        self.note("Packing repository ...")
372
        self.repo.pack()
373
374
        # To be conservative, packing puts the old packs and
375
        # indices in obsolete_packs. We err on the side of
376
        # optimism and clear out that directory to save space.
377
        self.note("Removing obsolete packs ...")
378
        # TODO: Use a public API for this once one exists
379
        repo_transport = self.repo._pack_collection.transport
380
        repo_transport.clone('obsolete_packs').delete_multi(
381
            repo_transport.list_dir('obsolete_packs'))
382
0.64.167 by Ian Clatworthy
incremental packing for chk formats
383
        # If we're not done, free whatever memory we can
384
        if not final:
385
            gc.collect()
386
0.64.41 by Ian Clatworthy
update multiple working trees if requested
387
    def _get_working_trees(self, branches):
388
        """Get the working trees for branches in the repository."""
389
        result = []
390
        wt_expected = self.repo.make_working_trees()
391
        for br in branches:
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
392
            if br is None:
393
                continue
394
            elif br == self.branch:
395
                if self.working_tree:
396
                    result.append(self.working_tree)
0.64.41 by Ian Clatworthy
update multiple working trees if requested
397
            elif wt_expected:
398
                try:
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
399
                    result.append(br.bzrdir.open_workingtree())
0.64.41 by Ian Clatworthy
update multiple working trees if requested
400
                except errors.NoWorkingTree:
401
                    self.warning("No working tree for branch %s", br)
402
        return result
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
403
404
    def dump_stats(self):
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
405
        time_required = progress.str_tdelta(time.time() - self._start_time)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
406
        rc = self._revision_count - self.skip_total
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
407
        bc = self._branch_count
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
408
        wtc = self._tree_count
409
        self.note("Imported %d %s, updating %d %s and %d %s in %s",
0.64.32 by Ian Clatworthy
move single_plural into helpers
410
            rc, helpers.single_plural(rc, "revision", "revisions"),
411
            bc, helpers.single_plural(bc, "branch", "branches"),
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
412
            wtc, helpers.single_plural(wtc, "tree", "trees"),
413
            time_required)
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
414
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
415
    def _init_id_map(self):
416
        """Load the id-map and check it matches the repository.
417
        
418
        :return: the number of entries in the map
419
        """
420
        # Currently, we just check the size. In the future, we might
421
        # decide to be more paranoid and check that the revision-ids
422
        # are identical as well.
423
        self.cache_mgr.revision_ids, known = idmapfile.load_id_map(
424
            self.id_map_path)
425
        existing_count = len(self.repo.all_revision_ids())
0.64.106 by Ian Clatworthy
let the id-map file have more revisions than the repository
426
        if existing_count < known:
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
427
            raise plugin_errors.BadRepositorySize(known, existing_count)
428
        return known
429
430
    def _save_id_map(self):
431
        """Save the id-map."""
432
        # Save the whole lot every time. If this proves a problem, we can
433
        # change to 'append just the new ones' at a later time.
434
        idmapfile.save_id_map(self.id_map_path, self.cache_mgr.revision_ids)
435
0.64.5 by Ian Clatworthy
first cut at generic processing method
436
    def blob_handler(self, cmd):
437
        """Process a BlobCommand."""
438
        if cmd.mark is not None:
0.64.36 by Ian Clatworthy
fix head tracking when unmarked commits used
439
            dataref = cmd.id
0.64.5 by Ian Clatworthy
first cut at generic processing method
440
        else:
441
            dataref = osutils.sha_strings(cmd.data)
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
442
        self.cache_mgr.store_blob(dataref, cmd.data)
0.64.5 by Ian Clatworthy
first cut at generic processing method
443
0.64.170 by Ian Clatworthy
add autopack option to fast-import
444
    def checkpoint_handler(self, cmd):
0.64.5 by Ian Clatworthy
first cut at generic processing method
445
        """Process a CheckpointCommand."""
0.64.27 by Ian Clatworthy
1st cut at performance tuning
446
        # Commit the current write group and start a new one
447
        self.repo.commit_write_group()
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
448
        self._save_id_map()
0.64.170 by Ian Clatworthy
add autopack option to fast-import
449
        self.checkpoint_count += 1
450
        if self.checkpoint_count % self.autopack_every == 0:
0.64.167 by Ian Clatworthy
incremental packing for chk formats
451
            self._pack_repository(final=False)
0.64.27 by Ian Clatworthy
1st cut at performance tuning
452
        self.repo.start_write_group()
0.64.5 by Ian Clatworthy
first cut at generic processing method
453
454
    def commit_handler(self, cmd):
455
        """Process a CommitCommand."""
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
456
        if self.skip_total and self._revision_count < self.skip_total:
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
457
            self.cache_mgr.track_heads(cmd)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
458
            # Check that we really do know about this commit-id
459
            if not self.cache_mgr.revision_ids.has_key(cmd.id):
460
                raise plugin_errors.BadRestart(cmd.id)
461
            # Consume the file commands and free any non-sticky blobs
462
            for fc in cmd.file_iter():
463
                pass
464
            self.cache_mgr._blobs = {}
465
            self._revision_count += 1
466
            return
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
467
        if self.first_incremental_commit:
468
            self.first_incremental_commit = None
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
469
            parents = self.cache_mgr.track_heads(cmd)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
470
471
        # 'Commit' the revision and report progress
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
472
        handler = self.commit_handler_factory(cmd, self.cache_mgr,
0.64.196 by Ian Clatworthy
get tests passing again
473
            self.rev_store, verbose=self.verbose,
474
            prune_empty_dirs=self.prune_empty_dirs)
0.64.180 by Ian Clatworthy
report triggering commit when exception occurs
475
        try:
476
            handler.process()
477
        except:
478
            print "ABORT: exception occurred processing commit %s" % (cmd.id)
479
            raise
0.64.36 by Ian Clatworthy
fix head tracking when unmarked commits used
480
        self.cache_mgr.revision_ids[cmd.id] = handler.revision_id
0.64.27 by Ian Clatworthy
1st cut at performance tuning
481
        self._revision_count += 1
0.64.36 by Ian Clatworthy
fix head tracking when unmarked commits used
482
        self.report_progress("(%s)" % cmd.id)
0.64.31 by Ian Clatworthy
fix branch updating for the single branch case
483
484
        # Check if we should finish up or automatically checkpoint
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
485
        if (self.max_commits is not None and
486
            self._revision_count >= self.max_commits):
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
487
            self.note("Stopping after reaching requested count of commits")
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
488
            self.finished = True
489
        elif self._revision_count % self.checkpoint_every == 0:
490
            self.note("%d commits - automatic checkpoint triggered",
491
                self._revision_count)
0.64.170 by Ian Clatworthy
add autopack option to fast-import
492
            self.checkpoint_handler(None)
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
493
0.64.25 by Ian Clatworthy
slightly better progress reporting
494
    def report_progress(self, details=''):
0.64.41 by Ian Clatworthy
update multiple working trees if requested
495
        if self._revision_count % self.progress_every == 0:
0.64.152 by Ian Clatworthy
miscellaneous progress reporting fixes
496
            if self.total_commits is not None:
0.64.26 by Ian Clatworthy
more progress reporting tweaks
497
                counts = "%d/%d" % (self._revision_count, self.total_commits)
498
            else:
499
                counts = "%d" % (self._revision_count,)
0.64.152 by Ian Clatworthy
miscellaneous progress reporting fixes
500
            minutes = (time.time() - self._start_time) / 60
501
            revisions_added = self._revision_count - self.skip_total
502
            rate = revisions_added * 1.0 / minutes
503
            if rate > 10:
504
                rate_str = "at %.0f/minute " % rate
505
            else:
506
                rate_str = "at %.1f/minute " % rate
0.64.150 by Ian Clatworthy
show commit rate rather than meaningless ETA in verbose mode
507
            self.note("%s commits processed %s%s" % (counts, rate_str, details))
0.64.25 by Ian Clatworthy
slightly better progress reporting
508
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
509
    def progress_handler(self, cmd):
510
        """Process a ProgressCommand."""
0.64.34 by Ian Clatworthy
report lost branches
511
        # We could use a progress bar here instead
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
512
        self.note("progress %s" % (cmd.message,))
0.64.5 by Ian Clatworthy
first cut at generic processing method
513
514
    def reset_handler(self, cmd):
515
        """Process a ResetCommand."""
0.64.12 by Ian Clatworthy
lightweight tags, filter processor and param validation
516
        if cmd.ref.startswith('refs/tags/'):
0.64.94 by Ian Clatworthy
ignore lightweight tags without a from clause
517
            tag_name = cmd.ref[len('refs/tags/'):]
0.64.95 by Ian Clatworthy
only output warning about missing from clause for lightweight tags in verbose mode
518
            if cmd.from_ is not None:
519
                self._set_tag(tag_name, cmd.from_)
520
            elif self.verbose:
0.64.94 by Ian Clatworthy
ignore lightweight tags without a from clause
521
                self.warning("ignoring reset refs/tags/%s - no from clause"
522
                    % tag_name)
0.64.109 by Ian Clatworthy
initial cut at reset support
523
            return
0.75.1 by Brian de Alwis
Add support for multiple branches by supporting the 'reset' command.
524
525
        if cmd.from_ is not None:
0.64.109 by Ian Clatworthy
initial cut at reset support
526
            self.cache_mgr.track_heads_for_ref(cmd.ref, cmd.from_)
0.64.5 by Ian Clatworthy
first cut at generic processing method
527
528
    def tag_handler(self, cmd):
529
        """Process a TagCommand."""
0.64.107 by Ian Clatworthy
warn on tags with a missing from clause
530
        if cmd.from_ is not None:
531
            self._set_tag(cmd.id, cmd.from_)
532
        else:
533
            self.warning("ignoring tag %s - no from clause" % cmd.id)
0.64.12 by Ian Clatworthy
lightweight tags, filter processor and param validation
534
535
    def _set_tag(self, name, from_):
0.64.93 by Ian Clatworthy
minor comment clean-ups
536
        """Define a tag given a name and import 'from' reference."""
0.64.12 by Ian Clatworthy
lightweight tags, filter processor and param validation
537
        bzr_tag_name = name.decode('utf-8', 'replace')
538
        bzr_rev_id = self.cache_mgr.revision_ids[from_]
0.64.11 by Ian Clatworthy
tag support
539
        self.tags[bzr_tag_name] = bzr_rev_id