/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
1
# Copyright (C) 2008 Canonical Ltd
2
#
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
7
#
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
#
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
17
"""Import processor that supports all Bazaar repository formats."""
18
19
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
20
import time
0.64.5 by Ian Clatworthy
first cut at generic processing method
21
from bzrlib import (
0.64.37 by Ian Clatworthy
create branches as required
22
    bzrdir,
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
23
    delta,
0.64.5 by Ian Clatworthy
first cut at generic processing method
24
    errors,
25
    osutils,
0.64.26 by Ian Clatworthy
more progress reporting tweaks
26
    progress,
0.64.5 by Ian Clatworthy
first cut at generic processing method
27
    )
0.64.51 by Ian Clatworthy
disable autopacking
28
from bzrlib.repofmt import pack_repo
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
29
from bzrlib.trace import note, mutter
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
30
import bzrlib.util.configobj.configobj as configobj
0.64.5 by Ian Clatworthy
first cut at generic processing method
31
from bzrlib.plugins.fastimport import (
0.78.4 by Ian Clatworthy
move GenericBranchUpdater into its own module
32
    branch_updater,
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
33
    bzr_commit_handler,
0.78.3 by Ian Clatworthy
move GenericCacheManager into its own module
34
    cache_manager,
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
35
    errors as plugin_errors,
0.64.31 by Ian Clatworthy
fix branch updating for the single branch case
36
    helpers,
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
37
    idmapfile,
0.78.5 by Ian Clatworthy
move import/export of marks into a module
38
    marks_file,
0.64.5 by Ian Clatworthy
first cut at generic processing method
39
    processor,
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
40
    revision_store,
0.64.5 by Ian Clatworthy
first cut at generic processing method
41
    )
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
42
43
0.64.41 by Ian Clatworthy
update multiple working trees if requested
44
# How many commits before automatically reporting progress
45
_DEFAULT_AUTO_PROGRESS = 1000
46
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
47
# How many commits before automatically checkpointing
48
_DEFAULT_AUTO_CHECKPOINT = 10000
49
0.64.170 by Ian Clatworthy
add autopack option to fast-import
50
# How many checkpoints before automatically packing
51
_DEFAULT_AUTO_PACK = 4
52
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
53
# How many inventories to cache
54
_DEFAULT_INV_CACHE_SIZE = 10
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
55
_DEFAULT_CHK_INV_CACHE_SIZE = 100
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
56
0.64.41 by Ian Clatworthy
update multiple working trees if requested
57
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
58
class GenericProcessor(processor.ImportProcessor):
59
    """An import processor that handles basic imports.
60
61
    Current features supported:
62
0.64.16 by Ian Clatworthy
safe processing tweaks
63
    * blobs are cached in memory
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
64
    * files and symlinks commits are supported
65
    * checkpoints automatically happen at a configurable frequency
66
      over and above the stream requested checkpoints
67
    * timestamped progress reporting, both automatic and stream requested
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
68
    * some basic statistics are dumped on completion.
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
69
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
70
    At checkpoints and on completion, the commit-id -> revision-id map is
71
    saved to a file called 'fastimport-id-map'. If the import crashes
72
    or is interrupted, it can be started again and this file will be
73
    used to skip over already loaded revisions. The format of each line
74
    is "commit-id revision-id" so commit-ids cannot include spaces.
75
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
76
    Here are the supported parameters:
77
0.64.38 by Ian Clatworthy
clean-up doc ready for initial release
78
    * info - name of a hints file holding the analysis generated
79
      by running the fast-import-info processor in verbose mode. When
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
80
      importing large repositories, this parameter is needed so
81
      that the importer knows what blobs to intelligently cache.
82
0.64.41 by Ian Clatworthy
update multiple working trees if requested
83
    * trees - update the working trees before completing.
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
84
      By default, the importer updates the repository
85
      and branches and the user needs to run 'bzr update' for the
0.64.41 by Ian Clatworthy
update multiple working trees if requested
86
      branches of interest afterwards.
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
87
0.64.170 by Ian Clatworthy
add autopack option to fast-import
88
    * count - only import this many commits then exit. If not set
89
      or negative, all commits are imported.
90
    
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
91
    * checkpoint - automatically checkpoint every n commits over and
92
      above any checkpoints contained in the import stream.
93
      The default is 10000.
94
0.64.170 by Ian Clatworthy
add autopack option to fast-import
95
    * autopack - pack every n checkpoints. The default is 4.
96
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
97
    * inv-cache - number of inventories to cache.
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
98
      If not set, the default is 100 for CHK formats and 10 otherwise.
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
99
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
100
    * mode - import algorithm to use: default, experimental or classic.
0.64.82 by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch
101
102
    * import-marks - name of file to read to load mark information from
103
104
    * export-marks - name of file to write to save mark information to
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
105
    """
106
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
107
    known_params = [
108
        'info',
109
        'trees',
0.64.170 by Ian Clatworthy
add autopack option to fast-import
110
        'count',
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
111
        'checkpoint',
0.64.170 by Ian Clatworthy
add autopack option to fast-import
112
        'autopack',
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
113
        'inv-cache',
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
114
        'mode',
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
115
        'import-marks',
116
        'export-marks',
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
117
        ]
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
118
0.64.196 by Ian Clatworthy
get tests passing again
119
    def __init__(self, bzrdir, params=None, verbose=False,
120
            prune_empty_dirs=True):
121
        processor.ImportProcessor.__init__(self, bzrdir, params, verbose)
122
        self.prune_empty_dirs = prune_empty_dirs
123
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
124
    def pre_process(self):
0.64.26 by Ian Clatworthy
more progress reporting tweaks
125
        self._start_time = time.time()
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
126
        self._load_info_and_params()
0.78.3 by Ian Clatworthy
move GenericCacheManager into its own module
127
        self.cache_mgr = cache_manager.CacheManager(self.info, self.verbose,
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
128
            self.inventory_cache_size)
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
129
        
0.64.82 by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch
130
        if self.params.get("import-marks") is not None:
0.79.2 by Ian Clatworthy
extend & use marks_file API
131
            mark_info = marks_file.import_marks(self.params.get("import-marks"))
132
            if mark_info is not None:
133
                self.cache_mgr.revision_ids = mark_info[0]
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
134
            self.skip_total = False
135
            self.first_incremental_commit = True
136
        else:
137
            self.first_incremental_commit = False
138
            self.skip_total = self._init_id_map()
139
            if self.skip_total:
140
                self.note("Found %d commits already loaded - "
141
                    "skipping over these ...", self.skip_total)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
142
        self._revision_count = 0
143
144
        # mapping of tag name to revision_id
145
        self.tags = {}
146
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
147
        # Create the revision store to use for committing, if any
148
        self.rev_store = self._revision_store_factory()
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
149
0.64.51 by Ian Clatworthy
disable autopacking
150
        # Disable autopacking if the repo format supports it.
151
        # THIS IS A HACK - there is no sanctioned way of doing this yet.
152
        if isinstance(self.repo, pack_repo.KnitPackRepository):
153
            self._original_max_pack_count = \
154
                self.repo._pack_collection._max_pack_count
155
            def _max_pack_count_for_import(total_revisions):
156
                return total_revisions + 1
157
            self.repo._pack_collection._max_pack_count = \
158
                _max_pack_count_for_import
159
        else:
160
            self._original_max_pack_count = None
0.64.144 by Ian Clatworthy
make groupcompress _FAST during import
161
 
162
        # Make groupcompress use the fast algorithm during importing.
163
        # We want to repack at the end anyhow when more information
164
        # is available to do a better job of saving space.
165
        try:
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
166
            from bzrlib import groupcompress
0.64.144 by Ian Clatworthy
make groupcompress _FAST during import
167
            groupcompress._FAST = True
168
        except ImportError:
169
            pass
170
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
171
        # Create a write group. This is committed at the end of the import.
172
        # Checkpointing closes the current one and starts a new one.
173
        self.repo.start_write_group()
174
175
    def _load_info_and_params(self):
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
176
        self._mode = bool(self.params.get('mode', 'default'))
177
        self._experimental = self._mode == 'experimental'
0.64.47 by Ian Clatworthy
add option for enabling experimental stuff
178
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
179
        # This is currently hard-coded but might be configurable via
180
        # parameters one day if that's needed
181
        repo_transport = self.repo.control_files._transport
182
        self.id_map_path = repo_transport.local_abspath("fastimport-id-map")
183
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
184
        # Load the info file, if any
185
        info_path = self.params.get('info')
186
        if info_path is not None:
187
            self.info = configobj.ConfigObj(info_path)
188
        else:
189
            self.info = None
190
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
191
        # Decide which CommitHandler to use
0.64.167 by Ian Clatworthy
incremental packing for chk formats
192
        self.supports_chk = getattr(self.repo._format, 'supports_chks', False)
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
193
        if self.supports_chk and self._mode == 'classic':
194
            note("Cannot use classic algorithm on CHK repositories"
195
                 " - using default one instead")
196
            self._mode = 'default'
197
        if self._mode == 'classic':
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
198
            self.commit_handler_factory = \
199
                bzr_commit_handler.InventoryCommitHandler
0.64.171 by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs
200
        else:
201
            self.commit_handler_factory = \
202
                bzr_commit_handler.InventoryDeltaCommitHandler
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
203
0.64.41 by Ian Clatworthy
update multiple working trees if requested
204
        # Decide how often to automatically report progress
205
        # (not a parameter yet)
206
        self.progress_every = _DEFAULT_AUTO_PROGRESS
207
        if self.verbose:
208
            self.progress_every = self.progress_every / 10
209
0.64.170 by Ian Clatworthy
add autopack option to fast-import
210
        # Decide how often (# of commits) to automatically checkpoint
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
211
        self.checkpoint_every = int(self.params.get('checkpoint',
212
            _DEFAULT_AUTO_CHECKPOINT))
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
213
0.64.170 by Ian Clatworthy
add autopack option to fast-import
214
        # Decide how often (# of checkpoints) to automatically pack
215
        self.checkpoint_count = 0
216
        self.autopack_every = int(self.params.get('autopack',
217
            _DEFAULT_AUTO_PACK))
218
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
219
        # Decide how big to make the inventory cache
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
220
        cache_size = int(self.params.get('inv-cache', -1))
221
        if cache_size == -1:
0.64.167 by Ian Clatworthy
incremental packing for chk formats
222
            if self.supports_chk:
0.64.149 by Ian Clatworthy
larger default inventory cache for chk formats
223
                cache_size = _DEFAULT_CHK_INV_CACHE_SIZE
224
            else:
225
                cache_size = _DEFAULT_INV_CACHE_SIZE
226
        self.inventory_cache_size = cache_size
0.64.44 by Ian Clatworthy
smart caching of serialised inventories
227
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
228
        # Find the maximum number of commits to import (None means all)
229
        # and prepare progress reporting. Just in case the info file
230
        # has an outdated count of commits, we store the max counts
231
        # at which we need to terminate separately to the total used
232
        # for progress tracking.
233
        try:
234
            self.max_commits = int(self.params['count'])
0.64.38 by Ian Clatworthy
clean-up doc ready for initial release
235
            if self.max_commits < 0:
236
                self.max_commits = None
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
237
        except KeyError:
238
            self.max_commits = None
0.64.25 by Ian Clatworthy
slightly better progress reporting
239
        if self.info is not None:
240
            self.total_commits = int(self.info['Command counts']['commit'])
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
241
            if (self.max_commits is not None and
242
                self.total_commits > self.max_commits):
243
                self.total_commits = self.max_commits
0.64.25 by Ian Clatworthy
slightly better progress reporting
244
        else:
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
245
            self.total_commits = self.max_commits
0.64.25 by Ian Clatworthy
slightly better progress reporting
246
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
247
    def _revision_store_factory(self):
248
        """Make a RevisionStore based on what the repository supports."""
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
249
        new_repo_api = hasattr(self.repo, 'revisions')
250
        if new_repo_api:
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
251
            return revision_store.RevisionStore2(self.repo)
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
252
        elif not self._experimental:
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
253
            return revision_store.RevisionStore1(self.repo)
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
254
        else:
255
            def fulltext_when(count):
256
                total = self.total_commits
257
                if total is not None and count == total:
258
                    fulltext = True
259
                else:
260
                    # Create an inventory fulltext every 200 revisions
261
                    fulltext = count % 200 == 0
262
                if fulltext:
263
                    self.note("%d commits - storing inventory as full-text",
264
                        count)
265
                return fulltext
266
0.81.4 by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction
267
            return revision_store.ImportRevisionStore1(
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
268
                self.repo, self.inventory_cache_size,
269
                fulltext_when=fulltext_when)
270
0.64.27 by Ian Clatworthy
1st cut at performance tuning
271
    def _process(self, command_iter):
272
        # if anything goes wrong, abort the write group if any
273
        try:
274
            processor.ImportProcessor._process(self, command_iter)
275
        except:
276
            if self.repo is not None and self.repo.is_in_write_group():
277
                self.repo.abort_write_group()
278
            raise
279
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
280
    def post_process(self):
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
281
        # Commit the current write group and checkpoint the id map
0.64.27 by Ian Clatworthy
1st cut at performance tuning
282
        self.repo.commit_write_group()
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
283
        self._save_id_map()
0.64.27 by Ian Clatworthy
1st cut at performance tuning
284
0.64.82 by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch
285
        if self.params.get("export-marks") is not None:
0.78.5 by Ian Clatworthy
move import/export of marks into a module
286
            marks_file.export_marks(self.params.get("export-marks"),
287
                self.cache_mgr.revision_ids)
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
288
0.64.31 by Ian Clatworthy
fix branch updating for the single branch case
289
        # Update the branches
290
        self.note("Updating branch information ...")
0.78.4 by Ian Clatworthy
move GenericBranchUpdater into its own module
291
        updater = branch_updater.BranchUpdater(self.repo, self.branch,
292
            self.cache_mgr, helpers.invert_dictset(self.cache_mgr.heads),
0.64.64 by Ian Clatworthy
save tags known about in each branch
293
            self.cache_mgr.last_ref, self.tags)
0.64.34 by Ian Clatworthy
report lost branches
294
        branches_updated, branches_lost = updater.update()
295
        self._branch_count = len(branches_updated)
296
297
        # Tell the user about branches that were not created
298
        if branches_lost:
0.64.37 by Ian Clatworthy
create branches as required
299
            if not self.repo.is_shared():
300
                self.warning("Cannot import multiple branches into "
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
301
                    "a standalone branch")
0.64.37 by Ian Clatworthy
create branches as required
302
            self.warning("Not creating branches for these head revisions:")
0.64.34 by Ian Clatworthy
report lost branches
303
            for lost_info in branches_lost:
304
                head_revision = lost_info[1]
305
                branch_name = lost_info[0]
0.64.67 by James Westby
Add support for -Dfast-import.
306
                self.note("\t %s = %s", head_revision, branch_name)
0.64.34 by Ian Clatworthy
report lost branches
307
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
308
        # Update the working trees as requested
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
309
        self._tree_count = 0
0.64.34 by Ian Clatworthy
report lost branches
310
        remind_about_update = True
0.64.54 by Ian Clatworthy
handle existing branches and only count the branches really updated
311
        if self._branch_count == 0:
312
            self.note("no branches to update")
313
            self.note("no working trees to update")
314
            remind_about_update = False
315
        elif self.params.get('trees', False):
0.64.41 by Ian Clatworthy
update multiple working trees if requested
316
            trees = self._get_working_trees(branches_updated)
317
            if trees:
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
318
                self._update_working_trees(trees)
0.64.34 by Ian Clatworthy
report lost branches
319
                remind_about_update = False
0.64.41 by Ian Clatworthy
update multiple working trees if requested
320
            else:
321
                self.warning("No working trees available to update")
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
322
        else:
323
            # Update just the trunk. (This is always the first branch
324
            # returned by the branch updater.)
325
            trunk_branch = branches_updated[0]
326
            trees = self._get_working_trees([trunk_branch])
327
            if trees:
328
                self._update_working_trees(trees)
329
                remind_about_update = self._branch_count > 1
0.64.51 by Ian Clatworthy
disable autopacking
330
0.64.176 by Ian Clatworthy
faster export of revision range & improved diagnostics in fast-export
331
        # Dump the cache stats now because we clear it before the final pack
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
332
        if self.verbose:
333
            self.cache_mgr.dump_stats()
0.64.51 by Ian Clatworthy
disable autopacking
334
        if self._original_max_pack_count:
335
            # We earlier disabled autopacking, creating one pack every
0.64.75 by Ian Clatworthy
if checkpointed, pack repository and delete obsolete_packs
336
            # checkpoint instead. We now pack the repository to optimise
337
            # how data is stored.
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
338
            self.cache_mgr.clear_all()
0.64.162 by Ian Clatworthy
always repack the repository on completion
339
            self._pack_repository()
340
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
341
        # Finish up by dumping stats & telling the user what to do next.
342
        self.dump_stats()
0.64.34 by Ian Clatworthy
report lost branches
343
        if remind_about_update:
0.64.75 by Ian Clatworthy
if checkpointed, pack repository and delete obsolete_packs
344
            # This message is explicitly not timestamped.
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
345
            note("To refresh the working tree for other branches, "
346
                "use 'bzr update' inside that branch.")
347
348
    def _update_working_trees(self, trees):
349
        if self.verbose:
350
            reporter = delta._ChangeReporter()
351
        else:
352
            reporter = None
353
        for wt in trees:
354
            self.note("Updating the working tree for %s ...", wt.basedir)
355
            wt.update(reporter)
356
            self._tree_count += 1
0.64.41 by Ian Clatworthy
update multiple working trees if requested
357
0.64.167 by Ian Clatworthy
incremental packing for chk formats
358
    def _pack_repository(self, final=True):
0.64.162 by Ian Clatworthy
always repack the repository on completion
359
        # Before packing, free whatever memory we can and ensure
360
        # that groupcompress is configured to optimise disk space
361
        import gc
0.64.167 by Ian Clatworthy
incremental packing for chk formats
362
        if final:
363
            try:
0.64.168 by Ian Clatworthy
blob reference counting, not just sticky vs otherwise
364
                from bzrlib import groupcompress
0.64.167 by Ian Clatworthy
incremental packing for chk formats
365
            except ImportError:
366
                pass
367
            else:
368
                groupcompress._FAST = False
0.64.162 by Ian Clatworthy
always repack the repository on completion
369
        gc.collect()
370
        self.note("Packing repository ...")
371
        self.repo.pack()
372
373
        # To be conservative, packing puts the old packs and
374
        # indices in obsolete_packs. We err on the side of
375
        # optimism and clear out that directory to save space.
376
        self.note("Removing obsolete packs ...")
377
        # TODO: Use a public API for this once one exists
378
        repo_transport = self.repo._pack_collection.transport
379
        repo_transport.clone('obsolete_packs').delete_multi(
380
            repo_transport.list_dir('obsolete_packs'))
381
0.64.167 by Ian Clatworthy
incremental packing for chk formats
382
        # If we're not done, free whatever memory we can
383
        if not final:
384
            gc.collect()
385
0.64.41 by Ian Clatworthy
update multiple working trees if requested
386
    def _get_working_trees(self, branches):
387
        """Get the working trees for branches in the repository."""
388
        result = []
389
        wt_expected = self.repo.make_working_trees()
390
        for br in branches:
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
391
            if br is None:
392
                continue
393
            elif br == self.branch:
394
                if self.working_tree:
395
                    result.append(self.working_tree)
0.64.41 by Ian Clatworthy
update multiple working trees if requested
396
            elif wt_expected:
397
                try:
0.95.3 by Ian Clatworthy
Update the working tree for trunk implicitly
398
                    result.append(br.bzrdir.open_workingtree())
0.64.41 by Ian Clatworthy
update multiple working trees if requested
399
                except errors.NoWorkingTree:
400
                    self.warning("No working tree for branch %s", br)
401
        return result
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
402
403
    def dump_stats(self):
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
404
        time_required = progress.str_tdelta(time.time() - self._start_time)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
405
        rc = self._revision_count - self.skip_total
0.64.6 by Ian Clatworthy
generic processing method working for one revision in one branch
406
        bc = self._branch_count
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
407
        wtc = self._tree_count
408
        self.note("Imported %d %s, updating %d %s and %d %s in %s",
0.64.32 by Ian Clatworthy
move single_plural into helpers
409
            rc, helpers.single_plural(rc, "revision", "revisions"),
410
            bc, helpers.single_plural(bc, "branch", "branches"),
0.64.33 by Ian Clatworthy
make tree updating optional and minor UI improvements
411
            wtc, helpers.single_plural(wtc, "tree", "trees"),
412
            time_required)
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
413
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
414
    def _init_id_map(self):
415
        """Load the id-map and check it matches the repository.
416
        
417
        :return: the number of entries in the map
418
        """
419
        # Currently, we just check the size. In the future, we might
420
        # decide to be more paranoid and check that the revision-ids
421
        # are identical as well.
422
        self.cache_mgr.revision_ids, known = idmapfile.load_id_map(
423
            self.id_map_path)
424
        existing_count = len(self.repo.all_revision_ids())
0.64.106 by Ian Clatworthy
let the id-map file have more revisions than the repository
425
        if existing_count < known:
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
426
            raise plugin_errors.BadRepositorySize(known, existing_count)
427
        return known
428
429
    def _save_id_map(self):
430
        """Save the id-map."""
431
        # Save the whole lot every time. If this proves a problem, we can
432
        # change to 'append just the new ones' at a later time.
433
        idmapfile.save_id_map(self.id_map_path, self.cache_mgr.revision_ids)
434
0.64.5 by Ian Clatworthy
first cut at generic processing method
435
    def blob_handler(self, cmd):
436
        """Process a BlobCommand."""
437
        if cmd.mark is not None:
0.64.36 by Ian Clatworthy
fix head tracking when unmarked commits used
438
            dataref = cmd.id
0.64.5 by Ian Clatworthy
first cut at generic processing method
439
        else:
440
            dataref = osutils.sha_strings(cmd.data)
0.64.24 by Ian Clatworthy
smart blob caching using analysis done by --info
441
        self.cache_mgr.store_blob(dataref, cmd.data)
0.64.5 by Ian Clatworthy
first cut at generic processing method
442
0.64.170 by Ian Clatworthy
add autopack option to fast-import
443
    def checkpoint_handler(self, cmd):
0.64.5 by Ian Clatworthy
first cut at generic processing method
444
        """Process a CheckpointCommand."""
0.64.27 by Ian Clatworthy
1st cut at performance tuning
445
        # Commit the current write group and start a new one
446
        self.repo.commit_write_group()
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
447
        self._save_id_map()
0.64.170 by Ian Clatworthy
add autopack option to fast-import
448
        self.checkpoint_count += 1
449
        if self.checkpoint_count % self.autopack_every == 0:
0.64.167 by Ian Clatworthy
incremental packing for chk formats
450
            self._pack_repository(final=False)
0.64.27 by Ian Clatworthy
1st cut at performance tuning
451
        self.repo.start_write_group()
0.64.5 by Ian Clatworthy
first cut at generic processing method
452
453
    def commit_handler(self, cmd):
454
        """Process a CommitCommand."""
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
455
        if self.skip_total and self._revision_count < self.skip_total:
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
456
            self.cache_mgr.track_heads(cmd)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
457
            # Check that we really do know about this commit-id
458
            if not self.cache_mgr.revision_ids.has_key(cmd.id):
459
                raise plugin_errors.BadRestart(cmd.id)
460
            # Consume the file commands and free any non-sticky blobs
461
            for fc in cmd.file_iter():
462
                pass
463
            self.cache_mgr._blobs = {}
464
            self._revision_count += 1
465
            return
0.68.7 by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport
466
        if self.first_incremental_commit:
467
            self.first_incremental_commit = None
0.81.1 by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one
468
            parents = self.cache_mgr.track_heads(cmd)
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
469
470
        # 'Commit' the revision and report progress
0.84.4 by Ian Clatworthy
improved-but-not-yet-working CHKInventory support
471
        handler = self.commit_handler_factory(cmd, self.cache_mgr,
0.64.196 by Ian Clatworthy
get tests passing again
472
            self.rev_store, verbose=self.verbose,
473
            prune_empty_dirs=self.prune_empty_dirs)
0.64.180 by Ian Clatworthy
report triggering commit when exception occurs
474
        try:
475
            handler.process()
476
        except:
477
            print "ABORT: exception occurred processing commit %s" % (cmd.id)
478
            raise
0.64.36 by Ian Clatworthy
fix head tracking when unmarked commits used
479
        self.cache_mgr.revision_ids[cmd.id] = handler.revision_id
0.64.27 by Ian Clatworthy
1st cut at performance tuning
480
        self._revision_count += 1
0.64.36 by Ian Clatworthy
fix head tracking when unmarked commits used
481
        self.report_progress("(%s)" % cmd.id)
0.64.31 by Ian Clatworthy
fix branch updating for the single branch case
482
483
        # Check if we should finish up or automatically checkpoint
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
484
        if (self.max_commits is not None and
485
            self._revision_count >= self.max_commits):
0.64.50 by Ian Clatworthy
cleanly restart after an interruption - basic mirroring
486
            self.note("Stopping after reaching requested count of commits")
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
487
            self.finished = True
488
        elif self._revision_count % self.checkpoint_every == 0:
489
            self.note("%d commits - automatic checkpoint triggered",
490
                self._revision_count)
0.64.170 by Ian Clatworthy
add autopack option to fast-import
491
            self.checkpoint_handler(None)
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
492
0.64.25 by Ian Clatworthy
slightly better progress reporting
493
    def report_progress(self, details=''):
0.64.41 by Ian Clatworthy
update multiple working trees if requested
494
        if self._revision_count % self.progress_every == 0:
0.64.152 by Ian Clatworthy
miscellaneous progress reporting fixes
495
            if self.total_commits is not None:
0.64.26 by Ian Clatworthy
more progress reporting tweaks
496
                counts = "%d/%d" % (self._revision_count, self.total_commits)
497
            else:
498
                counts = "%d" % (self._revision_count,)
0.64.152 by Ian Clatworthy
miscellaneous progress reporting fixes
499
            minutes = (time.time() - self._start_time) / 60
500
            revisions_added = self._revision_count - self.skip_total
501
            rate = revisions_added * 1.0 / minutes
502
            if rate > 10:
503
                rate_str = "at %.0f/minute " % rate
504
            else:
505
                rate_str = "at %.1f/minute " % rate
0.64.150 by Ian Clatworthy
show commit rate rather than meaningless ETA in verbose mode
506
            self.note("%s commits processed %s%s" % (counts, rate_str, details))
0.64.25 by Ian Clatworthy
slightly better progress reporting
507
0.64.1 by Ian Clatworthy
1st cut: gfi parser + --info processing method
508
    def progress_handler(self, cmd):
509
        """Process a ProgressCommand."""
0.64.34 by Ian Clatworthy
report lost branches
510
        # We could use a progress bar here instead
0.64.28 by Ian Clatworthy
checkpoint and count params to generic processor
511
        self.note("progress %s" % (cmd.message,))
0.64.5 by Ian Clatworthy
first cut at generic processing method
512
513
    def reset_handler(self, cmd):
514
        """Process a ResetCommand."""
0.64.12 by Ian Clatworthy
lightweight tags, filter processor and param validation
515
        if cmd.ref.startswith('refs/tags/'):
0.64.94 by Ian Clatworthy
ignore lightweight tags without a from clause
516
            tag_name = cmd.ref[len('refs/tags/'):]
0.64.95 by Ian Clatworthy
only output warning about missing from clause for lightweight tags in verbose mode
517
            if cmd.from_ is not None:
518
                self._set_tag(tag_name, cmd.from_)
519
            elif self.verbose:
0.64.94 by Ian Clatworthy
ignore lightweight tags without a from clause
520
                self.warning("ignoring reset refs/tags/%s - no from clause"
521
                    % tag_name)
0.64.109 by Ian Clatworthy
initial cut at reset support
522
            return
0.75.1 by Brian de Alwis
Add support for multiple branches by supporting the 'reset' command.
523
524
        if cmd.from_ is not None:
0.64.109 by Ian Clatworthy
initial cut at reset support
525
            self.cache_mgr.track_heads_for_ref(cmd.ref, cmd.from_)
0.64.5 by Ian Clatworthy
first cut at generic processing method
526
527
    def tag_handler(self, cmd):
528
        """Process a TagCommand."""
0.64.107 by Ian Clatworthy
warn on tags with a missing from clause
529
        if cmd.from_ is not None:
530
            self._set_tag(cmd.id, cmd.from_)
531
        else:
532
            self.warning("ignoring tag %s - no from clause" % cmd.id)
0.64.12 by Ian Clatworthy
lightweight tags, filter processor and param validation
533
534
    def _set_tag(self, name, from_):
0.64.93 by Ian Clatworthy
minor comment clean-ups
535
        """Define a tag given a name and import 'from' reference."""
0.64.12 by Ian Clatworthy
lightweight tags, filter processor and param validation
536
        bzr_tag_name = name.decode('utf-8', 'replace')
537
        bzr_rev_id = self.cache_mgr.revision_ids[from_]
0.64.11 by Ian Clatworthy
tag support
538
        self.tags[bzr_tag_name] = bzr_rev_id