bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
|
0.64.1
by Ian Clatworthy
1st cut: gfi parser + --info processing method |
1 |
# Copyright (C) 2008 Canonical Ltd
|
2 |
#
|
|
3 |
# This program is free software; you can redistribute it and/or modify
|
|
4 |
# it under the terms of the GNU General Public License as published by
|
|
5 |
# the Free Software Foundation; either version 2 of the License, or
|
|
6 |
# (at your option) any later version.
|
|
7 |
#
|
|
8 |
# This program is distributed in the hope that it will be useful,
|
|
9 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11 |
# GNU General Public License for more details.
|
|
12 |
#
|
|
13 |
# You should have received a copy of the GNU General Public License
|
|
|
0.64.334
by Jelmer Vernooij
Remove old FSF address. Thanks Dan Callaghan. |
14 |
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
0.64.1
by Ian Clatworthy
1st cut: gfi parser + --info processing method |
15 |
|
16 |
"""Import processor that supports all Bazaar repository formats."""
|
|
17 |
||
|
6628.1.2
by Jelmer Vernooij
Fix imports, move exporter.py, drop explorer metadata. |
18 |
from __future__ import absolute_import |
19 |
||
|
0.64.1
by Ian Clatworthy
1st cut: gfi parser + --info processing method |
20 |
|
|
0.64.6
by Ian Clatworthy
generic processing method working for one revision in one branch |
21 |
import time |
|
6628.1.2
by Jelmer Vernooij
Fix imports, move exporter.py, drop explorer metadata. |
22 |
from .... import ( |
|
0.123.9
by Jelmer Vernooij
Provide stubs for logging functions no longer provided by python-fastimport. |
23 |
debug, |
|
0.64.6
by Ian Clatworthy
generic processing method working for one revision in one branch |
24 |
delta, |
|
0.64.5
by Ian Clatworthy
first cut at generic processing method |
25 |
errors, |
26 |
osutils, |
|
|
0.64.26
by Ian Clatworthy
more progress reporting tweaks |
27 |
progress, |
|
0.64.5
by Ian Clatworthy
first cut at generic processing method |
28 |
)
|
|
6628.1.2
by Jelmer Vernooij
Fix imports, move exporter.py, drop explorer metadata. |
29 |
from ....repofmt.knitpack_repo import KnitPackRepository |
30 |
from ....trace import ( |
|
|
0.123.9
by Jelmer Vernooij
Provide stubs for logging functions no longer provided by python-fastimport. |
31 |
mutter, |
32 |
note, |
|
33 |
warning, |
|
34 |
)
|
|
|
6628.1.2
by Jelmer Vernooij
Fix imports, move exporter.py, drop explorer metadata. |
35 |
import configobj |
36 |
from .. import ( |
|
|
0.78.4
by Ian Clatworthy
move GenericBranchUpdater into its own module |
37 |
branch_updater, |
|
0.78.3
by Ian Clatworthy
move GenericCacheManager into its own module |
38 |
cache_manager, |
|
0.139.1
by Jelmer Vernooij
Import helper functions that have been removed from python-fastimport. |
39 |
helpers, |
|
0.64.349
by Jelmer Vernooij
Reimport some modules removed from python-fastimport 0.9.2. |
40 |
idmapfile, |
|
0.123.1
by Jelmer Vernooij
Move pure-fastimport code into its own directory, in preparation of splitting it into a separate package. |
41 |
marks_file, |
42 |
revision_store, |
|
43 |
)
|
|
|
0.123.2
by Jelmer Vernooij
Split out fastimport, import it from the system. |
44 |
from fastimport import ( |
|
0.102.13
by Ian Clatworthy
Fix feature checking |
45 |
commands, |
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
46 |
errors as plugin_errors, |
|
0.64.5
by Ian Clatworthy
first cut at generic processing method |
47 |
processor, |
48 |
)
|
|
|
0.64.1
by Ian Clatworthy
1st cut: gfi parser + --info processing method |
49 |
|
50 |
||
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
51 |
# How many commits before automatically reporting progress
|
52 |
_DEFAULT_AUTO_PROGRESS = 1000 |
|
53 |
||
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
54 |
# How many commits before automatically checkpointing
|
55 |
_DEFAULT_AUTO_CHECKPOINT = 10000 |
|
56 |
||
|
0.64.170
by Ian Clatworthy
add autopack option to fast-import |
57 |
# How many checkpoints before automatically packing
|
58 |
_DEFAULT_AUTO_PACK = 4 |
|
59 |
||
|
0.64.44
by Ian Clatworthy
smart caching of serialised inventories |
60 |
# How many inventories to cache
|
|
0.64.254
by Ian Clatworthy
Change the default inventory cache size to 1. For large projects, this reduces memory overhead and also speeds up conversion. |
61 |
_DEFAULT_INV_CACHE_SIZE = 1 |
62 |
_DEFAULT_CHK_INV_CACHE_SIZE = 1 |
|
|
0.64.44
by Ian Clatworthy
smart caching of serialised inventories |
63 |
|
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
64 |
|
|
0.64.1
by Ian Clatworthy
1st cut: gfi parser + --info processing method |
65 |
class GenericProcessor(processor.ImportProcessor): |
66 |
"""An import processor that handles basic imports. |
|
67 |
||
68 |
Current features supported:
|
|
69 |
||
|
0.64.16
by Ian Clatworthy
safe processing tweaks |
70 |
* blobs are cached in memory
|
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
71 |
* files and symlinks commits are supported
|
72 |
* checkpoints automatically happen at a configurable frequency
|
|
73 |
over and above the stream requested checkpoints
|
|
74 |
* timestamped progress reporting, both automatic and stream requested
|
|
|
0.64.1
by Ian Clatworthy
1st cut: gfi parser + --info processing method |
75 |
* some basic statistics are dumped on completion.
|
|
0.64.24
by Ian Clatworthy
smart blob caching using analysis done by --info |
76 |
|
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
77 |
At checkpoints and on completion, the commit-id -> revision-id map is
|
78 |
saved to a file called 'fastimport-id-map'. If the import crashes
|
|
79 |
or is interrupted, it can be started again and this file will be
|
|
80 |
used to skip over already loaded revisions. The format of each line
|
|
81 |
is "commit-id revision-id" so commit-ids cannot include spaces.
|
|
82 |
||
|
0.64.24
by Ian Clatworthy
smart blob caching using analysis done by --info |
83 |
Here are the supported parameters:
|
84 |
||
|
0.64.38
by Ian Clatworthy
clean-up doc ready for initial release |
85 |
* info - name of a hints file holding the analysis generated
|
86 |
by running the fast-import-info processor in verbose mode. When
|
|
|
0.64.33
by Ian Clatworthy
make tree updating optional and minor UI improvements |
87 |
importing large repositories, this parameter is needed so
|
88 |
that the importer knows what blobs to intelligently cache.
|
|
89 |
||
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
90 |
* trees - update the working trees before completing.
|
|
0.64.33
by Ian Clatworthy
make tree updating optional and minor UI improvements |
91 |
By default, the importer updates the repository
|
92 |
and branches and the user needs to run 'bzr update' for the
|
|
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
93 |
branches of interest afterwards.
|
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
94 |
|
|
0.64.170
by Ian Clatworthy
add autopack option to fast-import |
95 |
* count - only import this many commits then exit. If not set
|
96 |
or negative, all commits are imported.
|
|
97 |
|
|
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
98 |
* checkpoint - automatically checkpoint every n commits over and
|
99 |
above any checkpoints contained in the import stream.
|
|
100 |
The default is 10000.
|
|
101 |
||
|
0.64.170
by Ian Clatworthy
add autopack option to fast-import |
102 |
* autopack - pack every n checkpoints. The default is 4.
|
103 |
||
|
0.64.44
by Ian Clatworthy
smart caching of serialised inventories |
104 |
* inv-cache - number of inventories to cache.
|
|
0.64.254
by Ian Clatworthy
Change the default inventory cache size to 1. For large projects, this reduces memory overhead and also speeds up conversion. |
105 |
If not set, the default is 1.
|
|
0.64.47
by Ian Clatworthy
add option for enabling experimental stuff |
106 |
|
|
0.64.171
by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs |
107 |
* mode - import algorithm to use: default, experimental or classic.
|
|
0.64.82
by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch |
108 |
|
109 |
* import-marks - name of file to read to load mark information from
|
|
110 |
||
111 |
* export-marks - name of file to write to save mark information to
|
|
|
0.64.1
by Ian Clatworthy
1st cut: gfi parser + --info processing method |
112 |
"""
|
113 |
||
|
0.64.47
by Ian Clatworthy
add option for enabling experimental stuff |
114 |
known_params = [ |
115 |
'info', |
|
116 |
'trees', |
|
|
0.64.170
by Ian Clatworthy
add autopack option to fast-import |
117 |
'count', |
|
0.64.47
by Ian Clatworthy
add option for enabling experimental stuff |
118 |
'checkpoint', |
|
0.64.170
by Ian Clatworthy
add autopack option to fast-import |
119 |
'autopack', |
|
0.64.47
by Ian Clatworthy
add option for enabling experimental stuff |
120 |
'inv-cache', |
|
0.64.171
by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs |
121 |
'mode', |
|
0.68.7
by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport |
122 |
'import-marks', |
123 |
'export-marks', |
|
|
0.64.47
by Ian Clatworthy
add option for enabling experimental stuff |
124 |
]
|
|
0.64.33
by Ian Clatworthy
make tree updating optional and minor UI improvements |
125 |
|
|
0.64.215
by Ian Clatworthy
tweak GenericProcessor __init__ method |
126 |
def __init__(self, bzrdir, params=None, verbose=False, outf=None, |
|
0.64.196
by Ian Clatworthy
get tests passing again |
127 |
prune_empty_dirs=True): |
|
0.123.4
by Jelmer Vernooij
Only require passing BzrDir to bzr-specific processors. |
128 |
processor.ImportProcessor.__init__(self, params, verbose) |
|
0.64.196
by Ian Clatworthy
get tests passing again |
129 |
self.prune_empty_dirs = prune_empty_dirs |
|
6653.6.1
by Jelmer Vernooij
Rename a number of attributes from bzrdir to controldir. |
130 |
self.controldir = bzrdir |
|
0.123.4
by Jelmer Vernooij
Only require passing BzrDir to bzr-specific processors. |
131 |
try: |
132 |
# Might be inside a branch
|
|
133 |
(self.working_tree, self.branch) = bzrdir._get_tree_branch() |
|
134 |
self.repo = self.branch.repository |
|
135 |
except errors.NotBranchError: |
|
136 |
# Must be inside a repository
|
|
137 |
self.working_tree = None |
|
138 |
self.branch = None |
|
139 |
self.repo = bzrdir.open_repository() |
|
|
0.64.196
by Ian Clatworthy
get tests passing again |
140 |
|
|
0.64.1
by Ian Clatworthy
1st cut: gfi parser + --info processing method |
141 |
def pre_process(self): |
|
0.64.26
by Ian Clatworthy
more progress reporting tweaks |
142 |
self._start_time = time.time() |
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
143 |
self._load_info_and_params() |
|
0.102.18
by Ian Clatworthy
Tweak some diagnostic messages |
144 |
if self.total_commits: |
145 |
self.note("Starting import of %d commits ..." % |
|
146 |
(self.total_commits,)) |
|
147 |
else: |
|
148 |
self.note("Starting import ...") |
|
|
0.78.3
by Ian Clatworthy
move GenericCacheManager into its own module |
149 |
self.cache_mgr = cache_manager.CacheManager(self.info, self.verbose, |
|
0.64.44
by Ian Clatworthy
smart caching of serialised inventories |
150 |
self.inventory_cache_size) |
|
0.129.2
by Jelmer Vernooij
Use lookup functions for committish. |
151 |
|
|
0.64.82
by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch |
152 |
if self.params.get("import-marks") is not None: |
|
0.79.2
by Ian Clatworthy
extend & use marks_file API |
153 |
mark_info = marks_file.import_marks(self.params.get("import-marks")) |
154 |
if mark_info is not None: |
|
|
0.129.2
by Jelmer Vernooij
Use lookup functions for committish. |
155 |
self.cache_mgr.marks = mark_info |
|
0.68.7
by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport |
156 |
self.skip_total = False |
157 |
self.first_incremental_commit = True |
|
158 |
else: |
|
159 |
self.first_incremental_commit = False |
|
160 |
self.skip_total = self._init_id_map() |
|
161 |
if self.skip_total: |
|
162 |
self.note("Found %d commits already loaded - " |
|
163 |
"skipping over these ...", self.skip_total) |
|
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
164 |
self._revision_count = 0 |
165 |
||
166 |
# mapping of tag name to revision_id
|
|
167 |
self.tags = {} |
|
168 |
||
|
0.81.4
by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction |
169 |
# Create the revision store to use for committing, if any
|
170 |
self.rev_store = self._revision_store_factory() |
|
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
171 |
|
|
0.64.51
by Ian Clatworthy
disable autopacking |
172 |
# Disable autopacking if the repo format supports it.
|
173 |
# THIS IS A HACK - there is no sanctioned way of doing this yet.
|
|
|
0.64.313
by Jelmer Vernooij
Support both locations for KnitPackRepository. |
174 |
if isinstance(self.repo, KnitPackRepository): |
|
0.64.51
by Ian Clatworthy
disable autopacking |
175 |
self._original_max_pack_count = \ |
176 |
self.repo._pack_collection._max_pack_count |
|
177 |
def _max_pack_count_for_import(total_revisions): |
|
178 |
return total_revisions + 1 |
|
179 |
self.repo._pack_collection._max_pack_count = \ |
|
180 |
_max_pack_count_for_import
|
|
181 |
else: |
|
182 |
self._original_max_pack_count = None |
|
|
0.64.144
by Ian Clatworthy
make groupcompress _FAST during import |
183 |
|
184 |
# Make groupcompress use the fast algorithm during importing.
|
|
185 |
# We want to repack at the end anyhow when more information
|
|
186 |
# is available to do a better job of saving space.
|
|
187 |
try: |
|
|
6628.1.2
by Jelmer Vernooij
Fix imports, move exporter.py, drop explorer metadata. |
188 |
from .... import groupcompress |
|
0.64.144
by Ian Clatworthy
make groupcompress _FAST during import |
189 |
groupcompress._FAST = True |
190 |
except ImportError: |
|
191 |
pass
|
|
192 |
||
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
193 |
# Create a write group. This is committed at the end of the import.
|
194 |
# Checkpointing closes the current one and starts a new one.
|
|
195 |
self.repo.start_write_group() |
|
196 |
||
197 |
def _load_info_and_params(self): |
|
|
6628.1.2
by Jelmer Vernooij
Fix imports, move exporter.py, drop explorer metadata. |
198 |
from .. import bzr_commit_handler |
|
0.64.171
by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs |
199 |
self._mode = bool(self.params.get('mode', 'default')) |
200 |
self._experimental = self._mode == 'experimental' |
|
|
0.64.47
by Ian Clatworthy
add option for enabling experimental stuff |
201 |
|
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
202 |
# This is currently hard-coded but might be configurable via
|
203 |
# parameters one day if that's needed
|
|
204 |
repo_transport = self.repo.control_files._transport |
|
205 |
self.id_map_path = repo_transport.local_abspath("fastimport-id-map") |
|
206 |
||
|
0.64.24
by Ian Clatworthy
smart blob caching using analysis done by --info |
207 |
# Load the info file, if any
|
208 |
info_path = self.params.get('info') |
|
209 |
if info_path is not None: |
|
210 |
self.info = configobj.ConfigObj(info_path) |
|
211 |
else: |
|
212 |
self.info = None |
|
213 |
||
|
0.84.4
by Ian Clatworthy
improved-but-not-yet-working CHKInventory support |
214 |
# Decide which CommitHandler to use
|
|
0.64.167
by Ian Clatworthy
incremental packing for chk formats |
215 |
self.supports_chk = getattr(self.repo._format, 'supports_chks', False) |
|
0.64.171
by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs |
216 |
if self.supports_chk and self._mode == 'classic': |
217 |
note("Cannot use classic algorithm on CHK repositories" |
|
218 |
" - using default one instead") |
|
219 |
self._mode = 'default' |
|
220 |
if self._mode == 'classic': |
|
|
0.84.4
by Ian Clatworthy
improved-but-not-yet-working CHKInventory support |
221 |
self.commit_handler_factory = \ |
222 |
bzr_commit_handler.InventoryCommitHandler |
|
|
0.64.171
by Ian Clatworthy
use inv deltas by default for all formats now: --classic to get old algorithm for packs |
223 |
else: |
224 |
self.commit_handler_factory = \ |
|
225 |
bzr_commit_handler.InventoryDeltaCommitHandler |
|
|
0.84.4
by Ian Clatworthy
improved-but-not-yet-working CHKInventory support |
226 |
|
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
227 |
# Decide how often to automatically report progress
|
228 |
# (not a parameter yet)
|
|
229 |
self.progress_every = _DEFAULT_AUTO_PROGRESS |
|
230 |
if self.verbose: |
|
231 |
self.progress_every = self.progress_every / 10 |
|
232 |
||
|
0.64.170
by Ian Clatworthy
add autopack option to fast-import |
233 |
# Decide how often (# of commits) to automatically checkpoint
|
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
234 |
self.checkpoint_every = int(self.params.get('checkpoint', |
235 |
_DEFAULT_AUTO_CHECKPOINT)) |
|
|
0.64.6
by Ian Clatworthy
generic processing method working for one revision in one branch |
236 |
|
|
0.64.170
by Ian Clatworthy
add autopack option to fast-import |
237 |
# Decide how often (# of checkpoints) to automatically pack
|
238 |
self.checkpoint_count = 0 |
|
239 |
self.autopack_every = int(self.params.get('autopack', |
|
240 |
_DEFAULT_AUTO_PACK)) |
|
241 |
||
|
0.64.44
by Ian Clatworthy
smart caching of serialised inventories |
242 |
# Decide how big to make the inventory cache
|
|
0.64.149
by Ian Clatworthy
larger default inventory cache for chk formats |
243 |
cache_size = int(self.params.get('inv-cache', -1)) |
244 |
if cache_size == -1: |
|
|
0.64.167
by Ian Clatworthy
incremental packing for chk formats |
245 |
if self.supports_chk: |
|
0.64.149
by Ian Clatworthy
larger default inventory cache for chk formats |
246 |
cache_size = _DEFAULT_CHK_INV_CACHE_SIZE |
247 |
else: |
|
248 |
cache_size = _DEFAULT_INV_CACHE_SIZE |
|
249 |
self.inventory_cache_size = cache_size |
|
|
0.64.44
by Ian Clatworthy
smart caching of serialised inventories |
250 |
|
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
251 |
# Find the maximum number of commits to import (None means all)
|
252 |
# and prepare progress reporting. Just in case the info file
|
|
253 |
# has an outdated count of commits, we store the max counts
|
|
254 |
# at which we need to terminate separately to the total used
|
|
255 |
# for progress tracking.
|
|
256 |
try: |
|
257 |
self.max_commits = int(self.params['count']) |
|
|
0.64.38
by Ian Clatworthy
clean-up doc ready for initial release |
258 |
if self.max_commits < 0: |
259 |
self.max_commits = None |
|
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
260 |
except KeyError: |
261 |
self.max_commits = None |
|
|
0.64.25
by Ian Clatworthy
slightly better progress reporting |
262 |
if self.info is not None: |
263 |
self.total_commits = int(self.info['Command counts']['commit']) |
|
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
264 |
if (self.max_commits is not None and |
265 |
self.total_commits > self.max_commits): |
|
266 |
self.total_commits = self.max_commits |
|
|
0.64.25
by Ian Clatworthy
slightly better progress reporting |
267 |
else: |
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
268 |
self.total_commits = self.max_commits |
|
0.64.25
by Ian Clatworthy
slightly better progress reporting |
269 |
|
|
0.81.4
by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction |
270 |
def _revision_store_factory(self): |
271 |
"""Make a RevisionStore based on what the repository supports.""" |
|
|
0.81.1
by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one |
272 |
new_repo_api = hasattr(self.repo, 'revisions') |
273 |
if new_repo_api: |
|
|
0.81.4
by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction |
274 |
return revision_store.RevisionStore2(self.repo) |
|
0.81.1
by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one |
275 |
elif not self._experimental: |
|
0.81.4
by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction |
276 |
return revision_store.RevisionStore1(self.repo) |
|
0.81.1
by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one |
277 |
else: |
278 |
def fulltext_when(count): |
|
279 |
total = self.total_commits |
|
280 |
if total is not None and count == total: |
|
281 |
fulltext = True |
|
282 |
else: |
|
283 |
# Create an inventory fulltext every 200 revisions
|
|
284 |
fulltext = count % 200 == 0 |
|
285 |
if fulltext: |
|
286 |
self.note("%d commits - storing inventory as full-text", |
|
287 |
count) |
|
288 |
return fulltext |
|
289 |
||
|
0.81.4
by Ian Clatworthy
generalise RevisionLoader to RevisionStore as a repo abstraction |
290 |
return revision_store.ImportRevisionStore1( |
|
0.81.1
by Ian Clatworthy
move GenericCommitHandler into its own module in prep for a delta-based one |
291 |
self.repo, self.inventory_cache_size, |
292 |
fulltext_when=fulltext_when) |
|
293 |
||
|
0.123.5
by Jelmer Vernooij
Fix typo, handle bzr-specific locking in GenericProcessor. |
294 |
def process(self, command_iter): |
295 |
"""Import data into Bazaar by processing a stream of commands. |
|
296 |
||
297 |
:param command_iter: an iterator providing commands
|
|
298 |
"""
|
|
299 |
if self.working_tree is not None: |
|
300 |
self.working_tree.lock_write() |
|
301 |
elif self.branch is not None: |
|
302 |
self.branch.lock_write() |
|
303 |
elif self.repo is not None: |
|
304 |
self.repo.lock_write() |
|
305 |
try: |
|
306 |
super(GenericProcessor, self)._process(command_iter) |
|
307 |
finally: |
|
308 |
# If an unhandled exception occurred, abort the write group
|
|
309 |
if self.repo is not None and self.repo.is_in_write_group(): |
|
310 |
self.repo.abort_write_group() |
|
311 |
# Release the locks
|
|
312 |
if self.working_tree is not None: |
|
313 |
self.working_tree.unlock() |
|
314 |
elif self.branch is not None: |
|
315 |
self.branch.unlock() |
|
316 |
elif self.repo is not None: |
|
317 |
self.repo.unlock() |
|
318 |
||
|
0.64.27
by Ian Clatworthy
1st cut at performance tuning |
319 |
def _process(self, command_iter): |
320 |
# if anything goes wrong, abort the write group if any
|
|
321 |
try: |
|
322 |
processor.ImportProcessor._process(self, command_iter) |
|
323 |
except: |
|
324 |
if self.repo is not None and self.repo.is_in_write_group(): |
|
325 |
self.repo.abort_write_group() |
|
326 |
raise
|
|
327 |
||
|
0.64.6
by Ian Clatworthy
generic processing method working for one revision in one branch |
328 |
def post_process(self): |
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
329 |
# Commit the current write group and checkpoint the id map
|
|
0.64.27
by Ian Clatworthy
1st cut at performance tuning |
330 |
self.repo.commit_write_group() |
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
331 |
self._save_id_map() |
|
0.64.27
by Ian Clatworthy
1st cut at performance tuning |
332 |
|
|
0.64.82
by Ian Clatworthy
Merge Pieter de Bie's export-fixes branch |
333 |
if self.params.get("export-marks") is not None: |
|
0.78.5
by Ian Clatworthy
move import/export of marks into a module |
334 |
marks_file.export_marks(self.params.get("export-marks"), |
|
0.129.2
by Jelmer Vernooij
Use lookup functions for committish. |
335 |
self.cache_mgr.marks) |
|
0.68.7
by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport |
336 |
|
|
0.123.6
by Jelmer Vernooij
Split out reftracker. |
337 |
if self.cache_mgr.reftracker.last_ref == None: |
|
0.97.1
by Gonéri Le Bouder
avoid STDERR crash |
338 |
"""Nothing to refresh""" |
339 |
return
|
|
340 |
||
|
0.64.31
by Ian Clatworthy
fix branch updating for the single branch case |
341 |
# Update the branches
|
342 |
self.note("Updating branch information ...") |
|
|
0.78.4
by Ian Clatworthy
move GenericBranchUpdater into its own module |
343 |
updater = branch_updater.BranchUpdater(self.repo, self.branch, |
|
0.123.6
by Jelmer Vernooij
Split out reftracker. |
344 |
self.cache_mgr, helpers.invert_dictset( |
345 |
self.cache_mgr.reftracker.heads), |
|
346 |
self.cache_mgr.reftracker.last_ref, self.tags) |
|
|
0.64.34
by Ian Clatworthy
report lost branches |
347 |
branches_updated, branches_lost = updater.update() |
348 |
self._branch_count = len(branches_updated) |
|
349 |
||
350 |
# Tell the user about branches that were not created
|
|
351 |
if branches_lost: |
|
|
0.64.37
by Ian Clatworthy
create branches as required |
352 |
if not self.repo.is_shared(): |
353 |
self.warning("Cannot import multiple branches into " |
|
|
0.95.3
by Ian Clatworthy
Update the working tree for trunk implicitly |
354 |
"a standalone branch") |
|
0.64.37
by Ian Clatworthy
create branches as required |
355 |
self.warning("Not creating branches for these head revisions:") |
|
0.64.34
by Ian Clatworthy
report lost branches |
356 |
for lost_info in branches_lost: |
357 |
head_revision = lost_info[1] |
|
358 |
branch_name = lost_info[0] |
|
|
0.64.67
by James Westby
Add support for -Dfast-import. |
359 |
self.note("\t %s = %s", head_revision, branch_name) |
|
0.64.34
by Ian Clatworthy
report lost branches |
360 |
|
|
0.64.168
by Ian Clatworthy
blob reference counting, not just sticky vs otherwise |
361 |
# Update the working trees as requested
|
|
0.64.33
by Ian Clatworthy
make tree updating optional and minor UI improvements |
362 |
self._tree_count = 0 |
|
0.64.34
by Ian Clatworthy
report lost branches |
363 |
remind_about_update = True |
|
0.64.54
by Ian Clatworthy
handle existing branches and only count the branches really updated |
364 |
if self._branch_count == 0: |
365 |
self.note("no branches to update") |
|
366 |
self.note("no working trees to update") |
|
367 |
remind_about_update = False |
|
368 |
elif self.params.get('trees', False): |
|
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
369 |
trees = self._get_working_trees(branches_updated) |
370 |
if trees: |
|
|
0.95.3
by Ian Clatworthy
Update the working tree for trunk implicitly |
371 |
self._update_working_trees(trees) |
|
0.64.34
by Ian Clatworthy
report lost branches |
372 |
remind_about_update = False |
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
373 |
else: |
374 |
self.warning("No working trees available to update") |
|
|
0.95.3
by Ian Clatworthy
Update the working tree for trunk implicitly |
375 |
else: |
376 |
# Update just the trunk. (This is always the first branch
|
|
377 |
# returned by the branch updater.)
|
|
378 |
trunk_branch = branches_updated[0] |
|
379 |
trees = self._get_working_trees([trunk_branch]) |
|
380 |
if trees: |
|
381 |
self._update_working_trees(trees) |
|
382 |
remind_about_update = self._branch_count > 1 |
|
|
0.64.51
by Ian Clatworthy
disable autopacking |
383 |
|
|
0.64.176
by Ian Clatworthy
faster export of revision range & improved diagnostics in fast-export |
384 |
# Dump the cache stats now because we clear it before the final pack
|
|
0.64.168
by Ian Clatworthy
blob reference counting, not just sticky vs otherwise |
385 |
if self.verbose: |
386 |
self.cache_mgr.dump_stats() |
|
|
0.64.51
by Ian Clatworthy
disable autopacking |
387 |
if self._original_max_pack_count: |
388 |
# We earlier disabled autopacking, creating one pack every
|
|
|
0.64.75
by Ian Clatworthy
if checkpointed, pack repository and delete obsolete_packs |
389 |
# checkpoint instead. We now pack the repository to optimise
|
390 |
# how data is stored.
|
|
|
0.64.168
by Ian Clatworthy
blob reference counting, not just sticky vs otherwise |
391 |
self.cache_mgr.clear_all() |
|
0.64.162
by Ian Clatworthy
always repack the repository on completion |
392 |
self._pack_repository() |
393 |
||
|
0.64.168
by Ian Clatworthy
blob reference counting, not just sticky vs otherwise |
394 |
# Finish up by dumping stats & telling the user what to do next.
|
395 |
self.dump_stats() |
|
|
0.64.34
by Ian Clatworthy
report lost branches |
396 |
if remind_about_update: |
|
0.64.75
by Ian Clatworthy
if checkpointed, pack repository and delete obsolete_packs |
397 |
# This message is explicitly not timestamped.
|
|
0.95.3
by Ian Clatworthy
Update the working tree for trunk implicitly |
398 |
note("To refresh the working tree for other branches, " |
399 |
"use 'bzr update' inside that branch.") |
|
400 |
||
401 |
def _update_working_trees(self, trees): |
|
402 |
if self.verbose: |
|
403 |
reporter = delta._ChangeReporter() |
|
404 |
else: |
|
405 |
reporter = None |
|
406 |
for wt in trees: |
|
407 |
self.note("Updating the working tree for %s ...", wt.basedir) |
|
408 |
wt.update(reporter) |
|
409 |
self._tree_count += 1 |
|
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
410 |
|
|
0.64.167
by Ian Clatworthy
incremental packing for chk formats |
411 |
def _pack_repository(self, final=True): |
|
0.64.162
by Ian Clatworthy
always repack the repository on completion |
412 |
# Before packing, free whatever memory we can and ensure
|
413 |
# that groupcompress is configured to optimise disk space
|
|
414 |
import gc |
|
|
0.64.167
by Ian Clatworthy
incremental packing for chk formats |
415 |
if final: |
416 |
try: |
|
|
6628.1.2
by Jelmer Vernooij
Fix imports, move exporter.py, drop explorer metadata. |
417 |
from .... import groupcompress |
|
0.64.167
by Ian Clatworthy
incremental packing for chk formats |
418 |
except ImportError: |
419 |
pass
|
|
420 |
else: |
|
421 |
groupcompress._FAST = False |
|
|
0.64.162
by Ian Clatworthy
always repack the repository on completion |
422 |
gc.collect() |
423 |
self.note("Packing repository ...") |
|
424 |
self.repo.pack() |
|
425 |
||
426 |
# To be conservative, packing puts the old packs and
|
|
427 |
# indices in obsolete_packs. We err on the side of
|
|
428 |
# optimism and clear out that directory to save space.
|
|
429 |
self.note("Removing obsolete packs ...") |
|
430 |
# TODO: Use a public API for this once one exists
|
|
431 |
repo_transport = self.repo._pack_collection.transport |
|
432 |
repo_transport.clone('obsolete_packs').delete_multi( |
|
433 |
repo_transport.list_dir('obsolete_packs')) |
|
434 |
||
|
0.64.167
by Ian Clatworthy
incremental packing for chk formats |
435 |
# If we're not done, free whatever memory we can
|
436 |
if not final: |
|
437 |
gc.collect() |
|
438 |
||
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
439 |
def _get_working_trees(self, branches): |
440 |
"""Get the working trees for branches in the repository.""" |
|
441 |
result = [] |
|
442 |
wt_expected = self.repo.make_working_trees() |
|
443 |
for br in branches: |
|
|
0.95.3
by Ian Clatworthy
Update the working tree for trunk implicitly |
444 |
if br is None: |
445 |
continue
|
|
446 |
elif br == self.branch: |
|
447 |
if self.working_tree: |
|
448 |
result.append(self.working_tree) |
|
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
449 |
elif wt_expected: |
450 |
try: |
|
|
6653.6.1
by Jelmer Vernooij
Rename a number of attributes from bzrdir to controldir. |
451 |
result.append(br.controldir.open_workingtree()) |
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
452 |
except errors.NoWorkingTree: |
453 |
self.warning("No working tree for branch %s", br) |
|
454 |
return result |
|
|
0.64.6
by Ian Clatworthy
generic processing method working for one revision in one branch |
455 |
|
456 |
def dump_stats(self): |
|
|
0.64.33
by Ian Clatworthy
make tree updating optional and minor UI improvements |
457 |
time_required = progress.str_tdelta(time.time() - self._start_time) |
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
458 |
rc = self._revision_count - self.skip_total |
|
0.64.6
by Ian Clatworthy
generic processing method working for one revision in one branch |
459 |
bc = self._branch_count |
|
0.64.33
by Ian Clatworthy
make tree updating optional and minor UI improvements |
460 |
wtc = self._tree_count |
461 |
self.note("Imported %d %s, updating %d %s and %d %s in %s", |
|
|
0.64.32
by Ian Clatworthy
move single_plural into helpers |
462 |
rc, helpers.single_plural(rc, "revision", "revisions"), |
463 |
bc, helpers.single_plural(bc, "branch", "branches"), |
|
|
0.64.33
by Ian Clatworthy
make tree updating optional and minor UI improvements |
464 |
wtc, helpers.single_plural(wtc, "tree", "trees"), |
465 |
time_required) |
|
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
466 |
|
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
467 |
def _init_id_map(self): |
468 |
"""Load the id-map and check it matches the repository. |
|
469 |
|
|
470 |
:return: the number of entries in the map
|
|
471 |
"""
|
|
472 |
# Currently, we just check the size. In the future, we might
|
|
473 |
# decide to be more paranoid and check that the revision-ids
|
|
474 |
# are identical as well.
|
|
|
0.129.2
by Jelmer Vernooij
Use lookup functions for committish. |
475 |
self.cache_mgr.marks, known = idmapfile.load_id_map( |
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
476 |
self.id_map_path) |
477 |
existing_count = len(self.repo.all_revision_ids()) |
|
|
0.64.106
by Ian Clatworthy
let the id-map file have more revisions than the repository |
478 |
if existing_count < known: |
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
479 |
raise plugin_errors.BadRepositorySize(known, existing_count) |
480 |
return known |
|
481 |
||
482 |
def _save_id_map(self): |
|
483 |
"""Save the id-map.""" |
|
484 |
# Save the whole lot every time. If this proves a problem, we can
|
|
485 |
# change to 'append just the new ones' at a later time.
|
|
|
0.129.2
by Jelmer Vernooij
Use lookup functions for committish. |
486 |
idmapfile.save_id_map(self.id_map_path, self.cache_mgr.marks) |
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
487 |
|
|
0.64.5
by Ian Clatworthy
first cut at generic processing method |
488 |
def blob_handler(self, cmd): |
489 |
"""Process a BlobCommand.""" |
|
490 |
if cmd.mark is not None: |
|
|
0.64.36
by Ian Clatworthy
fix head tracking when unmarked commits used |
491 |
dataref = cmd.id |
|
0.64.5
by Ian Clatworthy
first cut at generic processing method |
492 |
else: |
493 |
dataref = osutils.sha_strings(cmd.data) |
|
|
0.64.24
by Ian Clatworthy
smart blob caching using analysis done by --info |
494 |
self.cache_mgr.store_blob(dataref, cmd.data) |
|
0.64.5
by Ian Clatworthy
first cut at generic processing method |
495 |
|
|
0.64.170
by Ian Clatworthy
add autopack option to fast-import |
496 |
def checkpoint_handler(self, cmd): |
|
0.64.5
by Ian Clatworthy
first cut at generic processing method |
497 |
"""Process a CheckpointCommand.""" |
|
0.64.27
by Ian Clatworthy
1st cut at performance tuning |
498 |
# Commit the current write group and start a new one
|
499 |
self.repo.commit_write_group() |
|
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
500 |
self._save_id_map() |
|
0.64.220
by Ian Clatworthy
Only count implicit checkpoints when deciding when to auto-pack |
501 |
# track the number of automatic checkpoints done
|
502 |
if cmd is None: |
|
503 |
self.checkpoint_count += 1 |
|
504 |
if self.checkpoint_count % self.autopack_every == 0: |
|
505 |
self._pack_repository(final=False) |
|
|
0.64.27
by Ian Clatworthy
1st cut at performance tuning |
506 |
self.repo.start_write_group() |
|
0.64.5
by Ian Clatworthy
first cut at generic processing method |
507 |
|
508 |
def commit_handler(self, cmd): |
|
509 |
"""Process a CommitCommand.""" |
|
|
0.129.2
by Jelmer Vernooij
Use lookup functions for committish. |
510 |
mark = cmd.id.lstrip(':') |
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
511 |
if self.skip_total and self._revision_count < self.skip_total: |
|
0.123.6
by Jelmer Vernooij
Split out reftracker. |
512 |
self.cache_mgr.reftracker.track_heads(cmd) |
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
513 |
# Check that we really do know about this commit-id
|
|
0.129.2
by Jelmer Vernooij
Use lookup functions for committish. |
514 |
if not self.cache_mgr.marks.has_key(mark): |
515 |
raise plugin_errors.BadRestart(mark) |
|
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
516 |
self.cache_mgr._blobs = {} |
517 |
self._revision_count += 1 |
|
|
0.111.2
by Max Bowsher
Also catch tagging via commit when resuming a crashed import. |
518 |
if cmd.ref.startswith('refs/tags/'): |
519 |
tag_name = cmd.ref[len('refs/tags/'):] |
|
520 |
self._set_tag(tag_name, cmd.id) |
|
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
521 |
return
|
|
0.68.7
by Pieter de Bie
Add importing and exporting of marks to bzr-fastimport |
522 |
if self.first_incremental_commit: |
523 |
self.first_incremental_commit = None |
|
|
0.123.6
by Jelmer Vernooij
Split out reftracker. |
524 |
parents = self.cache_mgr.reftracker.track_heads(cmd) |
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
525 |
|
526 |
# 'Commit' the revision and report progress
|
|
|
0.84.4
by Ian Clatworthy
improved-but-not-yet-working CHKInventory support |
527 |
handler = self.commit_handler_factory(cmd, self.cache_mgr, |
|
0.64.196
by Ian Clatworthy
get tests passing again |
528 |
self.rev_store, verbose=self.verbose, |
529 |
prune_empty_dirs=self.prune_empty_dirs) |
|
|
0.64.180
by Ian Clatworthy
report triggering commit when exception occurs |
530 |
try: |
531 |
handler.process() |
|
532 |
except: |
|
533 |
print "ABORT: exception occurred processing commit %s" % (cmd.id) |
|
534 |
raise
|
|
|
0.129.2
by Jelmer Vernooij
Use lookup functions for committish. |
535 |
self.cache_mgr.add_mark(mark, handler.revision_id) |
|
0.64.27
by Ian Clatworthy
1st cut at performance tuning |
536 |
self._revision_count += 1 |
|
0.129.1
by termie
Add a bunch of mark id normalization. |
537 |
self.report_progress("(%s)" % cmd.id.lstrip(':')) |
|
0.64.31
by Ian Clatworthy
fix branch updating for the single branch case |
538 |
|
|
0.111.1
by Max Bowsher
Set a tag when touching a refs/tags/ ref with a commit command. |
539 |
if cmd.ref.startswith('refs/tags/'): |
540 |
tag_name = cmd.ref[len('refs/tags/'):] |
|
541 |
self._set_tag(tag_name, cmd.id) |
|
542 |
||
|
0.64.31
by Ian Clatworthy
fix branch updating for the single branch case |
543 |
# Check if we should finish up or automatically checkpoint
|
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
544 |
if (self.max_commits is not None and |
545 |
self._revision_count >= self.max_commits): |
|
|
0.64.50
by Ian Clatworthy
cleanly restart after an interruption - basic mirroring |
546 |
self.note("Stopping after reaching requested count of commits") |
|
0.64.28
by Ian Clatworthy
checkpoint and count params to generic processor |
547 |
self.finished = True |
548 |
elif self._revision_count % self.checkpoint_every == 0: |
|
549 |
self.note("%d commits - automatic checkpoint triggered", |
|
550 |
self._revision_count) |
|
|
0.64.170
by Ian Clatworthy
add autopack option to fast-import |
551 |
self.checkpoint_handler(None) |
|
0.64.1
by Ian Clatworthy
1st cut: gfi parser + --info processing method |
552 |
|
|
0.64.25
by Ian Clatworthy
slightly better progress reporting |
553 |
def report_progress(self, details=''): |
|
0.64.41
by Ian Clatworthy
update multiple working trees if requested |
554 |
if self._revision_count % self.progress_every == 0: |
|
0.64.152
by Ian Clatworthy
miscellaneous progress reporting fixes |
555 |
if self.total_commits is not None: |
|
0.64.26
by Ian Clatworthy
more progress reporting tweaks |
556 |
counts = "%d/%d" % (self._revision_count, self.total_commits) |
557 |
else: |
|
558 |
counts = "%d" % (self._revision_count,) |
|
|
0.64.152
by Ian Clatworthy
miscellaneous progress reporting fixes |
559 |
minutes = (time.time() - self._start_time) / 60 |
560 |
revisions_added = self._revision_count - self.skip_total |
|
561 |
rate = revisions_added * 1.0 / minutes |
|
562 |
if rate > 10: |
|
563 |
rate_str = "at %.0f/minute " % rate |
|
564 |
else: |
|
565 |
rate_str = "at %.1f/minute " % rate |
|
|
0.64.150
by Ian Clatworthy
show commit rate rather than meaningless ETA in verbose mode |
566 |
self.note("%s commits processed %s%s" % (counts, rate_str, details)) |
|
0.64.25
by Ian Clatworthy
slightly better progress reporting |
567 |
|
|
0.64.1
by Ian Clatworthy
1st cut: gfi parser + --info processing method |
568 |
def progress_handler(self, cmd): |
569 |
"""Process a ProgressCommand.""" |
|
|
0.64.271
by Ian Clatworthy
Ignore progress messages unless in verbose mode |
570 |
# Most progress messages embedded in streams are annoying.
|
571 |
# Ignore them unless in verbose mode.
|
|
572 |
if self.verbose: |
|
573 |
self.note("progress %s" % (cmd.message,)) |
|
|
0.64.5
by Ian Clatworthy
first cut at generic processing method |
574 |
|
575 |
def reset_handler(self, cmd): |
|
576 |
"""Process a ResetCommand.""" |
|
|
0.64.12
by Ian Clatworthy
lightweight tags, filter processor and param validation |
577 |
if cmd.ref.startswith('refs/tags/'): |
|
0.64.94
by Ian Clatworthy
ignore lightweight tags without a from clause |
578 |
tag_name = cmd.ref[len('refs/tags/'):] |
|
0.64.95
by Ian Clatworthy
only output warning about missing from clause for lightweight tags in verbose mode |
579 |
if cmd.from_ is not None: |
580 |
self._set_tag(tag_name, cmd.from_) |
|
581 |
elif self.verbose: |
|
|
0.64.94
by Ian Clatworthy
ignore lightweight tags without a from clause |
582 |
self.warning("ignoring reset refs/tags/%s - no from clause" |
583 |
% tag_name) |
|
|
0.64.109
by Ian Clatworthy
initial cut at reset support |
584 |
return
|
|
0.75.1
by Brian de Alwis
Add support for multiple branches by supporting the 'reset' command. |
585 |
|
586 |
if cmd.from_ is not None: |
|
|
0.123.6
by Jelmer Vernooij
Split out reftracker. |
587 |
self.cache_mgr.reftracker.track_heads_for_ref(cmd.ref, cmd.from_) |
|
0.64.5
by Ian Clatworthy
first cut at generic processing method |
588 |
|
589 |
def tag_handler(self, cmd): |
|
590 |
"""Process a TagCommand.""" |
|
|
0.64.107
by Ian Clatworthy
warn on tags with a missing from clause |
591 |
if cmd.from_ is not None: |
592 |
self._set_tag(cmd.id, cmd.from_) |
|
593 |
else: |
|
594 |
self.warning("ignoring tag %s - no from clause" % cmd.id) |
|
|
0.64.12
by Ian Clatworthy
lightweight tags, filter processor and param validation |
595 |
|
596 |
def _set_tag(self, name, from_): |
|
|
0.64.93
by Ian Clatworthy
minor comment clean-ups |
597 |
"""Define a tag given a name and import 'from' reference.""" |
|
0.64.12
by Ian Clatworthy
lightweight tags, filter processor and param validation |
598 |
bzr_tag_name = name.decode('utf-8', 'replace') |
|
0.129.2
by Jelmer Vernooij
Use lookup functions for committish. |
599 |
bzr_rev_id = self.cache_mgr.lookup_committish(from_) |
|
0.64.11
by Ian Clatworthy
tag support |
600 |
self.tags[bzr_tag_name] = bzr_rev_id |
|
0.102.9
by Ian Clatworthy
parsing of multiple authors and commit properties |
601 |
|
602 |
def feature_handler(self, cmd): |
|
603 |
"""Process a FeatureCommand.""" |
|
|
0.102.11
by Ian Clatworthy
Validate features are known before importing |
604 |
feature = cmd.feature_name |
605 |
if feature not in commands.FEATURE_NAMES: |
|
606 |
raise plugin_errors.UnknownFeature(feature) |
|
|
0.123.9
by Jelmer Vernooij
Provide stubs for logging functions no longer provided by python-fastimport. |
607 |
|
|
0.64.297
by Jelmer Vernooij
Fix typo. |
608 |
def debug(self, msg, *args): |
|
0.123.9
by Jelmer Vernooij
Provide stubs for logging functions no longer provided by python-fastimport. |
609 |
"""Output a debug message if the appropriate -D option was given.""" |
610 |
if "fast-import" in debug.debug_flags: |
|
611 |
msg = "%s DEBUG: %s" % (self._time_of_day(), msg) |
|
612 |
mutter(msg, *args) |
|
613 |
||
614 |
def note(self, msg, *args): |
|
615 |
"""Output a note but timestamp it.""" |
|
616 |
msg = "%s %s" % (self._time_of_day(), msg) |
|
617 |
note(msg, *args) |
|
618 |
||
619 |
def warning(self, msg, *args): |
|
620 |
"""Output a warning but timestamp it.""" |
|
621 |
msg = "%s WARNING: %s" % (self._time_of_day(), msg) |
|
622 |
warning(msg, *args) |