/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to processors/generic_processor.py

incremental packing for chk formats

Show diffs side-by-side

added added

removed removed

Lines of Context:
178
178
            self.info = None
179
179
 
180
180
        # Decide which CommitHandler to use
181
 
        supports_chk = getattr(self.repo._format, 'supports_chks', False)
182
 
        if supports_chk:
 
181
        self.supports_chk = getattr(self.repo._format, 'supports_chks', False)
 
182
        if self.supports_chk:
183
183
            self.commit_handler_factory = \
184
184
                bzr_commit_handler.CHKInventoryCommitHandler
185
185
        else:
199
199
        # Decide how big to make the inventory cache
200
200
        cache_size = int(self.params.get('inv-cache', -1))
201
201
        if cache_size == -1:
202
 
            if supports_chk:
 
202
            if self.supports_chk:
203
203
                cache_size = _DEFAULT_CHK_INV_CACHE_SIZE
204
204
            else:
205
205
                cache_size = _DEFAULT_INV_CACHE_SIZE
320
320
            note("To refresh the working tree for a branch, "
321
321
                "use 'bzr update'.")
322
322
 
323
 
    def _pack_repository(self):
 
323
    def _pack_repository(self, final=True):
324
324
        # Before packing, free whatever memory we can and ensure
325
325
        # that groupcompress is configured to optimise disk space
326
326
        import gc
327
 
        self.cache_mgr.clear_all()
 
327
        if final:
 
328
            self.cache_mgr.clear_all()
 
329
            try:
 
330
                from bzrlib.plugins.groupcompress import groupcompress
 
331
            except ImportError:
 
332
                pass
 
333
            else:
 
334
                groupcompress._FAST = False
328
335
        gc.collect()
329
 
        try:
330
 
            from bzrlib.plugins.groupcompress import groupcompress
331
 
        except ImportError:
332
 
            pass
333
 
        else:
334
 
            groupcompress._FAST = False
335
336
        self.note("Packing repository ...")
336
337
        self.repo.pack()
337
338
 
344
345
        repo_transport.clone('obsolete_packs').delete_multi(
345
346
            repo_transport.list_dir('obsolete_packs'))
346
347
 
 
348
        # If we're not done, free whatever memory we can
 
349
        if not final:
 
350
            gc.collect()
 
351
 
347
352
    def _get_working_trees(self, branches):
348
353
        """Get the working trees for branches in the repository."""
349
354
        result = []
404
409
            dataref = osutils.sha_strings(cmd.data)
405
410
        self.cache_mgr.store_blob(dataref, cmd.data)
406
411
 
407
 
    def checkpoint_handler(self, cmd):
 
412
    def checkpoint_handler(self, cmd, pack_repo=False):
408
413
        """Process a CheckpointCommand."""
409
414
        # Commit the current write group and start a new one
410
415
        self.repo.commit_write_group()
411
416
        self._save_id_map()
 
417
        if pack_repo:
 
418
            self._pack_repository(final=False)
412
419
        self.repo.start_write_group()
413
420
 
414
421
    def commit_handler(self, cmd):
444
451
        elif self._revision_count % self.checkpoint_every == 0:
445
452
            self.note("%d commits - automatic checkpoint triggered",
446
453
                self._revision_count)
447
 
            self.checkpoint_handler(None)
 
454
            self.checkpoint_handler(None, pack_repo=self.supports_chk)
448
455
 
449
456
    def report_progress(self, details=''):
450
457
        if self._revision_count % self.progress_every == 0: