348
358
' --revision or a revision_id')
349
359
b = WorkingTree.open_containing(u'.')[0].branch
351
# TODO: jam 20060112 should cat-revision always output utf-8?
352
if revision_id is not None:
353
revision_id = osutils.safe_revision_id(revision_id, warn=False)
355
self.outf.write(b.repository.get_revision_xml(revision_id).decode('utf-8'))
356
except errors.NoSuchRevision:
357
msg = "The repository %s contains no revision %s." % (b.repository.base,
359
raise errors.BzrCommandError(msg)
360
elif revision is not None:
363
raise errors.BzrCommandError('You cannot specify a NULL'
365
rev_id = rev.as_revision_id(b)
366
self.outf.write(b.repository.get_revision_xml(rev_id).decode('utf-8'))
361
revisions = b.repository.revisions
362
if revisions is None:
363
raise errors.BzrCommandError('Repository %r does not support '
364
'access to raw revision texts')
366
b.repository.lock_read()
368
# TODO: jam 20060112 should cat-revision always output utf-8?
369
if revision_id is not None:
370
revision_id = osutils.safe_revision_id(revision_id, warn=False)
372
self.print_revision(revisions, revision_id)
373
except errors.NoSuchRevision:
374
msg = "The repository %s contains no revision %s." % (
375
b.repository.base, revision_id)
376
raise errors.BzrCommandError(msg)
377
elif revision is not None:
380
raise errors.BzrCommandError(
381
'You cannot specify a NULL revision.')
382
rev_id = rev.as_revision_id(b)
383
self.print_revision(revisions, rev_id)
385
b.repository.unlock()
369
388
class cmd_dump_btree(Command):
370
389
"""Dump the contents of a btree index file to stdout.
451
475
To re-create the working tree, use "bzr checkout".
453
477
_see_also = ['checkout', 'working-trees']
454
takes_args = ['location?']
478
takes_args = ['location*']
455
479
takes_options = [
457
481
help='Remove the working tree even if it has '
458
482
'uncommitted changes.'),
461
def run(self, location='.', force=False):
462
d = bzrdir.BzrDir.open(location)
465
working = d.open_workingtree()
466
except errors.NoWorkingTree:
467
raise errors.BzrCommandError("No working tree to remove")
468
except errors.NotLocalUrl:
469
raise errors.BzrCommandError("You cannot remove the working tree"
472
if (working.has_changes()):
473
raise errors.UncommittedChanges(working)
475
working_path = working.bzrdir.root_transport.base
476
branch_path = working.branch.bzrdir.root_transport.base
477
if working_path != branch_path:
478
raise errors.BzrCommandError("You cannot remove the working tree"
479
" from a lightweight checkout")
481
d.destroy_workingtree()
485
def run(self, location_list, force=False):
486
if not location_list:
489
for location in location_list:
490
d = bzrdir.BzrDir.open(location)
493
working = d.open_workingtree()
494
except errors.NoWorkingTree:
495
raise errors.BzrCommandError("No working tree to remove")
496
except errors.NotLocalUrl:
497
raise errors.BzrCommandError("You cannot remove the working tree"
500
if (working.has_changes()):
501
raise errors.UncommittedChanges(working)
503
working_path = working.bzrdir.root_transport.base
504
branch_path = working.branch.bzrdir.root_transport.base
505
if working_path != branch_path:
506
raise errors.BzrCommandError("You cannot remove the working tree"
507
" from a lightweight checkout")
509
d.destroy_workingtree()
484
512
class cmd_revno(Command):
545
569
wt = WorkingTree.open_containing(directory)[0]
572
self.add_cleanup(wt.unlock)
548
573
except (errors.NoWorkingTree, errors.NotLocalUrl):
550
575
b = Branch.open_containing(directory)[0]
554
if revision is not None:
555
revision_ids.extend(rev.as_revision_id(b) for rev in revision)
556
if revision_info_list is not None:
557
for rev_str in revision_info_list:
558
rev_spec = RevisionSpec.from_string(rev_str)
559
revision_ids.append(rev_spec.as_revision_id(b))
560
# No arguments supplied, default to the last revision
561
if len(revision_ids) == 0:
564
raise errors.NoWorkingTree(directory)
565
revision_ids.append(wt.last_revision())
567
revision_ids.append(b.last_revision())
571
for revision_id in revision_ids:
573
dotted_revno = b.revision_id_to_dotted_revno(revision_id)
574
revno = '.'.join(str(i) for i in dotted_revno)
575
except errors.NoSuchRevision:
577
maxlen = max(maxlen, len(revno))
578
revinfos.append([revno, revision_id])
577
self.add_cleanup(b.unlock)
579
if revision is not None:
580
revision_ids.extend(rev.as_revision_id(b) for rev in revision)
581
if revision_info_list is not None:
582
for rev_str in revision_info_list:
583
rev_spec = RevisionSpec.from_string(rev_str)
584
revision_ids.append(rev_spec.as_revision_id(b))
585
# No arguments supplied, default to the last revision
586
if len(revision_ids) == 0:
589
raise errors.NoWorkingTree(directory)
590
revision_ids.append(wt.last_revision())
592
revision_ids.append(b.last_revision())
596
for revision_id in revision_ids:
598
dotted_revno = b.revision_id_to_dotted_revno(revision_id)
599
revno = '.'.join(str(i) for i in dotted_revno)
600
except errors.NoSuchRevision:
602
maxlen = max(maxlen, len(revno))
603
revinfos.append([revno, revision_id])
585
606
for ri in revinfos:
586
607
self.outf.write('%*s %s\n' % (maxlen, ri[0], ri[1]))
735
759
revision = _get_one_revision('inventory', revision)
736
760
work_tree, file_list = tree_files(file_list)
737
761
work_tree.lock_read()
739
if revision is not None:
740
tree = revision.as_tree(work_tree.branch)
742
extra_trees = [work_tree]
748
if file_list is not None:
749
file_ids = tree.paths2ids(file_list, trees=extra_trees,
750
require_versioned=True)
751
# find_ids_across_trees may include some paths that don't
753
entries = sorted((tree.id2path(file_id), tree.inventory[file_id])
754
for file_id in file_ids if file_id in tree)
756
entries = tree.inventory.entries()
759
if tree is not work_tree:
762
self.add_cleanup(work_tree.unlock)
763
if revision is not None:
764
tree = revision.as_tree(work_tree.branch)
766
extra_trees = [work_tree]
768
self.add_cleanup(tree.unlock)
773
if file_list is not None:
774
file_ids = tree.paths2ids(file_list, trees=extra_trees,
775
require_versioned=True)
776
# find_ids_across_trees may include some paths that don't
778
entries = sorted((tree.id2path(file_id), tree.inventory[file_id])
779
for file_id in file_ids if file_id in tree)
781
entries = tree.inventory.entries()
762
784
for path, entry in entries:
763
785
if kind and kind != entry.kind:
1012
1030
if branch_from is not branch_to:
1013
1031
branch_from.lock_read()
1015
if revision is not None:
1016
revision_id = revision.as_revision_id(branch_from)
1018
branch_to.lock_write()
1020
if tree_to is not None:
1021
view_info = _get_view_info_for_change_reporter(tree_to)
1022
change_reporter = delta._ChangeReporter(
1023
unversioned_filter=tree_to.is_ignored,
1024
view_info=view_info)
1025
result = tree_to.pull(
1026
branch_from, overwrite, revision_id, change_reporter,
1027
possible_transports=possible_transports, local=local)
1029
result = branch_to.pull(
1030
branch_from, overwrite, revision_id, local=local)
1032
result.report(self.outf)
1033
if verbose and result.old_revid != result.new_revid:
1034
log.show_branch_change(
1035
branch_to, self.outf, result.old_revno,
1040
if branch_from is not branch_to:
1041
branch_from.unlock()
1032
self.add_cleanup(branch_from.unlock)
1033
if revision is not None:
1034
revision_id = revision.as_revision_id(branch_from)
1036
branch_to.lock_write()
1037
self.add_cleanup(branch_to.unlock)
1038
if tree_to is not None:
1039
view_info = _get_view_info_for_change_reporter(tree_to)
1040
change_reporter = delta._ChangeReporter(
1041
unversioned_filter=tree_to.is_ignored,
1042
view_info=view_info)
1043
result = tree_to.pull(
1044
branch_from, overwrite, revision_id, change_reporter,
1045
possible_transports=possible_transports, local=local)
1047
result = branch_to.pull(
1048
branch_from, overwrite, revision_id, local=local)
1050
result.report(self.outf)
1051
if verbose and result.old_revid != result.new_revid:
1052
log.show_branch_change(
1053
branch_to, self.outf, result.old_revno,
1044
1057
class cmd_push(Command):
1199
1212
' directory exists, but does not already'
1200
1213
' have a control directory. This flag will'
1201
1214
' allow branch to proceed.'),
1216
help="Bind new branch to from location."),
1203
1218
aliases = ['get', 'clone']
1205
1220
def run(self, from_location, to_location=None, revision=None,
1206
1221
hardlink=False, stacked=False, standalone=False, no_tree=False,
1207
use_existing_dir=False, switch=False):
1222
use_existing_dir=False, switch=False, bind=False):
1208
1223
from bzrlib import switch as _mod_switch
1209
1224
from bzrlib.tag import _merge_tags_if_possible
1210
1225
accelerator_tree, br_from = bzrdir.BzrDir.open_tree_or_branch(
1212
1227
revision = _get_one_revision('branch', revision)
1213
1228
br_from.lock_read()
1229
self.add_cleanup(br_from.unlock)
1230
if revision is not None:
1231
revision_id = revision.as_revision_id(br_from)
1233
# FIXME - wt.last_revision, fallback to branch, fall back to
1234
# None or perhaps NULL_REVISION to mean copy nothing
1236
revision_id = br_from.last_revision()
1237
if to_location is None:
1238
to_location = urlutils.derive_to_location(from_location)
1239
to_transport = transport.get_transport(to_location)
1215
if revision is not None:
1216
revision_id = revision.as_revision_id(br_from)
1241
to_transport.mkdir('.')
1242
except errors.FileExists:
1243
if not use_existing_dir:
1244
raise errors.BzrCommandError('Target directory "%s" '
1245
'already exists.' % to_location)
1218
# FIXME - wt.last_revision, fallback to branch, fall back to
1219
# None or perhaps NULL_REVISION to mean copy nothing
1221
revision_id = br_from.last_revision()
1222
if to_location is None:
1223
to_location = urlutils.derive_to_location(from_location)
1224
to_transport = transport.get_transport(to_location)
1226
to_transport.mkdir('.')
1227
except errors.FileExists:
1228
if not use_existing_dir:
1229
raise errors.BzrCommandError('Target directory "%s" '
1230
'already exists.' % to_location)
1248
bzrdir.BzrDir.open_from_transport(to_transport)
1249
except errors.NotBranchError:
1233
bzrdir.BzrDir.open_from_transport(to_transport)
1234
except errors.NotBranchError:
1237
raise errors.AlreadyBranchError(to_location)
1238
except errors.NoSuchFile:
1239
raise errors.BzrCommandError('Parent of "%s" does not exist.'
1242
# preserve whatever source format we have.
1243
dir = br_from.bzrdir.sprout(to_transport.base, revision_id,
1244
possible_transports=[to_transport],
1245
accelerator_tree=accelerator_tree,
1246
hardlink=hardlink, stacked=stacked,
1247
force_new_repo=standalone,
1248
create_tree_if_local=not no_tree,
1249
source_branch=br_from)
1250
branch = dir.open_branch()
1251
except errors.NoSuchRevision:
1252
to_transport.delete_tree('.')
1253
msg = "The branch %s has no revision %s." % (from_location,
1255
raise errors.BzrCommandError(msg)
1256
_merge_tags_if_possible(br_from, branch)
1257
# If the source branch is stacked, the new branch may
1258
# be stacked whether we asked for that explicitly or not.
1259
# We therefore need a try/except here and not just 'if stacked:'
1261
note('Created new stacked branch referring to %s.' %
1262
branch.get_stacked_on_url())
1263
except (errors.NotStacked, errors.UnstackableBranchFormat,
1264
errors.UnstackableRepositoryFormat), e:
1265
note('Branched %d revision(s).' % branch.revno())
1267
# Switch to the new branch
1268
wt, _ = WorkingTree.open_containing('.')
1269
_mod_switch.switch(wt.bzrdir, branch)
1270
note('Switched to branch: %s',
1271
urlutils.unescape_for_display(branch.base, 'utf-8'))
1252
raise errors.AlreadyBranchError(to_location)
1253
except errors.NoSuchFile:
1254
raise errors.BzrCommandError('Parent of "%s" does not exist.'
1257
# preserve whatever source format we have.
1258
dir = br_from.bzrdir.sprout(to_transport.base, revision_id,
1259
possible_transports=[to_transport],
1260
accelerator_tree=accelerator_tree,
1261
hardlink=hardlink, stacked=stacked,
1262
force_new_repo=standalone,
1263
create_tree_if_local=not no_tree,
1264
source_branch=br_from)
1265
branch = dir.open_branch()
1266
except errors.NoSuchRevision:
1267
to_transport.delete_tree('.')
1268
msg = "The branch %s has no revision %s." % (from_location,
1270
raise errors.BzrCommandError(msg)
1271
_merge_tags_if_possible(br_from, branch)
1272
# If the source branch is stacked, the new branch may
1273
# be stacked whether we asked for that explicitly or not.
1274
# We therefore need a try/except here and not just 'if stacked:'
1276
note('Created new stacked branch referring to %s.' %
1277
branch.get_stacked_on_url())
1278
except (errors.NotStacked, errors.UnstackableBranchFormat,
1279
errors.UnstackableRepositoryFormat), e:
1280
note('Branched %d revision(s).' % branch.revno())
1282
# Bind to the parent
1283
parent_branch = Branch.open(from_location)
1284
branch.bind(parent_branch)
1285
note('New branch bound to %s' % from_location)
1287
# Switch to the new branch
1288
wt, _ = WorkingTree.open_containing('.')
1289
_mod_switch.switch(wt.bzrdir, branch)
1290
note('Switched to branch: %s',
1291
urlutils.unescape_for_display(branch.base, 'utf-8'))
1276
1294
class cmd_checkout(Command):
1355
1373
def run(self, dir=u'.'):
1356
1374
tree = WorkingTree.open_containing(dir)[0]
1357
1375
tree.lock_read()
1359
new_inv = tree.inventory
1360
old_tree = tree.basis_tree()
1361
old_tree.lock_read()
1363
old_inv = old_tree.inventory
1365
iterator = tree.iter_changes(old_tree, include_unchanged=True)
1366
for f, paths, c, v, p, n, k, e in iterator:
1367
if paths[0] == paths[1]:
1371
renames.append(paths)
1373
for old_name, new_name in renames:
1374
self.outf.write("%s => %s\n" % (old_name, new_name))
1376
self.add_cleanup(tree.unlock)
1377
new_inv = tree.inventory
1378
old_tree = tree.basis_tree()
1379
old_tree.lock_read()
1380
self.add_cleanup(old_tree.unlock)
1381
old_inv = old_tree.inventory
1383
iterator = tree.iter_changes(old_tree, include_unchanged=True)
1384
for f, paths, c, v, p, n, k, e in iterator:
1385
if paths[0] == paths[1]:
1389
renames.append(paths)
1391
for old_name, new_name in renames:
1392
self.outf.write("%s => %s\n" % (old_name, new_name))
1381
1395
class cmd_update(Command):
1405
1427
tree.lock_tree_write()
1406
1428
branch_location = tree.branch.base
1429
self.add_cleanup(tree.unlock)
1407
1430
# get rid of the final '/' and be ready for display
1408
1431
branch_location = urlutils.unescape_for_display(branch_location[:-1],
1409
1432
self.outf.encoding)
1433
existing_pending_merges = tree.get_parent_ids()[1:]
1437
# may need to fetch data into a heavyweight checkout
1438
# XXX: this may take some time, maybe we should display a
1440
old_tip = branch.update(possible_transports)
1441
if revision is not None:
1442
revision_id = revision[0].as_revision_id(branch)
1444
revision_id = branch.last_revision()
1445
if revision_id == _mod_revision.ensure_null(tree.last_revision()):
1446
revno = branch.revision_id_to_revno(revision_id)
1447
note("Tree is up to date at revision %d of branch %s" %
1448
(revno, branch_location))
1450
view_info = _get_view_info_for_change_reporter(tree)
1451
change_reporter = delta._ChangeReporter(
1452
unversioned_filter=tree.is_ignored,
1453
view_info=view_info)
1411
existing_pending_merges = tree.get_parent_ids()[1:]
1412
last_rev = _mod_revision.ensure_null(tree.last_revision())
1413
if last_rev == _mod_revision.ensure_null(
1414
tree.branch.last_revision()):
1415
# may be up to date, check master too.
1416
if master is None or last_rev == _mod_revision.ensure_null(
1417
master.last_revision()):
1418
revno = tree.branch.revision_id_to_revno(last_rev)
1419
note('Tree is up to date at revision %d of branch %s'
1420
% (revno, branch_location))
1422
view_info = _get_view_info_for_change_reporter(tree)
1423
1455
conflicts = tree.update(
1424
delta._ChangeReporter(unversioned_filter=tree.is_ignored,
1425
view_info=view_info), possible_transports=possible_transports)
1426
revno = tree.branch.revision_id_to_revno(
1427
_mod_revision.ensure_null(tree.last_revision()))
1428
note('Updated to revision %d of branch %s' %
1429
(revno, branch_location))
1430
if tree.get_parent_ids()[1:] != existing_pending_merges:
1431
note('Your local commits will now show as pending merges with '
1432
"'bzr status', and can be committed with 'bzr commit'.")
1457
possible_transports=possible_transports,
1458
revision=revision_id,
1460
except errors.NoSuchRevision, e:
1461
raise errors.BzrCommandError(
1462
"branch has no revision %s\n"
1463
"bzr update --revision only works"
1464
" for a revision in the branch history"
1466
revno = tree.branch.revision_id_to_revno(
1467
_mod_revision.ensure_null(tree.last_revision()))
1468
note('Updated to revision %d of branch %s' %
1469
(revno, branch_location))
1470
if tree.get_parent_ids()[1:] != existing_pending_merges:
1471
note('Your local commits will now show as pending merges with '
1472
"'bzr status', and can be committed with 'bzr commit'.")
1441
1479
class cmd_info(Command):
1512
1550
file_list = [f for f in file_list]
1514
1552
tree.lock_write()
1516
# Heuristics should probably all move into tree.remove_smart or
1519
added = tree.changes_from(tree.basis_tree(),
1520
specific_files=file_list).added
1521
file_list = sorted([f[0] for f in added], reverse=True)
1522
if len(file_list) == 0:
1523
raise errors.BzrCommandError('No matching files.')
1524
elif file_list is None:
1525
# missing files show up in iter_changes(basis) as
1526
# versioned-with-no-kind.
1528
for change in tree.iter_changes(tree.basis_tree()):
1529
# Find paths in the working tree that have no kind:
1530
if change[1][1] is not None and change[6][1] is None:
1531
missing.append(change[1][1])
1532
file_list = sorted(missing, reverse=True)
1533
file_deletion_strategy = 'keep'
1534
tree.remove(file_list, verbose=verbose, to_file=self.outf,
1535
keep_files=file_deletion_strategy=='keep',
1536
force=file_deletion_strategy=='force')
1553
self.add_cleanup(tree.unlock)
1554
# Heuristics should probably all move into tree.remove_smart or
1557
added = tree.changes_from(tree.basis_tree(),
1558
specific_files=file_list).added
1559
file_list = sorted([f[0] for f in added], reverse=True)
1560
if len(file_list) == 0:
1561
raise errors.BzrCommandError('No matching files.')
1562
elif file_list is None:
1563
# missing files show up in iter_changes(basis) as
1564
# versioned-with-no-kind.
1566
for change in tree.iter_changes(tree.basis_tree()):
1567
# Find paths in the working tree that have no kind:
1568
if change[1][1] is not None and change[6][1] is None:
1569
missing.append(change[1][1])
1570
file_list = sorted(missing, reverse=True)
1571
file_deletion_strategy = 'keep'
1572
tree.remove(file_list, verbose=verbose, to_file=self.outf,
1573
keep_files=file_deletion_strategy=='keep',
1574
force=file_deletion_strategy=='force')
1541
1577
class cmd_file_id(Command):
2310
2335
filter_by_dir = False
2314
# find the file ids to log and check for directory filtering
2315
b, file_info_list, rev1, rev2 = _get_info_for_log_files(
2316
revision, file_list)
2317
for relpath, file_id, kind in file_info_list:
2319
raise errors.BzrCommandError(
2320
"Path unknown at end or start of revision range: %s" %
2322
# If the relpath is the top of the tree, we log everything
2327
file_ids.append(file_id)
2328
filter_by_dir = filter_by_dir or (
2329
kind in ['directory', 'tree-reference'])
2332
# FIXME ? log the current subdir only RBC 20060203
2333
if revision is not None \
2334
and len(revision) > 0 and revision[0].get_branch():
2335
location = revision[0].get_branch()
2337
# find the file ids to log and check for directory filtering
2338
b, file_info_list, rev1, rev2 = _get_info_for_log_files(
2339
revision, file_list)
2340
self.add_cleanup(b.unlock)
2341
for relpath, file_id, kind in file_info_list:
2343
raise errors.BzrCommandError(
2344
"Path unknown at end or start of revision range: %s" %
2346
# If the relpath is the top of the tree, we log everything
2338
dir, relpath = bzrdir.BzrDir.open_containing(location)
2339
b = dir.open_branch()
2341
rev1, rev2 = _get_revision_range(revision, b, self.name())
2343
# Decide on the type of delta & diff filtering to use
2344
# TODO: add an --all-files option to make this configurable & consistent
2352
diff_type = 'partial'
2356
# Build the log formatter
2357
if log_format is None:
2358
log_format = log.log_formatter_registry.get_default(b)
2359
# Make a non-encoding output to include the diffs - bug 328007
2360
unencoded_output = ui.ui_factory.make_output_stream(encoding_type='exact')
2361
lf = log_format(show_ids=show_ids, to_file=self.outf,
2362
to_exact_file=unencoded_output,
2363
show_timezone=timezone,
2364
delta_format=get_verbosity_level(),
2366
show_advice=levels is None)
2368
# Choose the algorithm for doing the logging. It's annoying
2369
# having multiple code paths like this but necessary until
2370
# the underlying repository format is faster at generating
2371
# deltas or can provide everything we need from the indices.
2372
# The default algorithm - match-using-deltas - works for
2373
# multiple files and directories and is faster for small
2374
# amounts of history (200 revisions say). However, it's too
2375
# slow for logging a single file in a repository with deep
2376
# history, i.e. > 10K revisions. In the spirit of "do no
2377
# evil when adding features", we continue to use the
2378
# original algorithm - per-file-graph - for the "single
2379
# file that isn't a directory without showing a delta" case.
2380
partial_history = revision and b.repository._format.supports_chks
2381
match_using_deltas = (len(file_ids) != 1 or filter_by_dir
2382
or delta_type or partial_history)
2384
# Build the LogRequest and execute it
2385
if len(file_ids) == 0:
2387
rqst = make_log_request_dict(
2388
direction=direction, specific_fileids=file_ids,
2389
start_revision=rev1, end_revision=rev2, limit=limit,
2390
message_search=message, delta_type=delta_type,
2391
diff_type=diff_type, _match_using_deltas=match_using_deltas)
2392
Logger(b, rqst).show(lf)
2351
file_ids.append(file_id)
2352
filter_by_dir = filter_by_dir or (
2353
kind in ['directory', 'tree-reference'])
2356
# FIXME ? log the current subdir only RBC 20060203
2357
if revision is not None \
2358
and len(revision) > 0 and revision[0].get_branch():
2359
location = revision[0].get_branch()
2362
dir, relpath = bzrdir.BzrDir.open_containing(location)
2363
b = dir.open_branch()
2365
self.add_cleanup(b.unlock)
2366
rev1, rev2 = _get_revision_range(revision, b, self.name())
2368
# Decide on the type of delta & diff filtering to use
2369
# TODO: add an --all-files option to make this configurable & consistent
2377
diff_type = 'partial'
2381
# Build the log formatter
2382
if log_format is None:
2383
log_format = log.log_formatter_registry.get_default(b)
2384
# Make a non-encoding output to include the diffs - bug 328007
2385
unencoded_output = ui.ui_factory.make_output_stream(encoding_type='exact')
2386
lf = log_format(show_ids=show_ids, to_file=self.outf,
2387
to_exact_file=unencoded_output,
2388
show_timezone=timezone,
2389
delta_format=get_verbosity_level(),
2391
show_advice=levels is None)
2393
# Choose the algorithm for doing the logging. It's annoying
2394
# having multiple code paths like this but necessary until
2395
# the underlying repository format is faster at generating
2396
# deltas or can provide everything we need from the indices.
2397
# The default algorithm - match-using-deltas - works for
2398
# multiple files and directories and is faster for small
2399
# amounts of history (200 revisions say). However, it's too
2400
# slow for logging a single file in a repository with deep
2401
# history, i.e. > 10K revisions. In the spirit of "do no
2402
# evil when adding features", we continue to use the
2403
# original algorithm - per-file-graph - for the "single
2404
# file that isn't a directory without showing a delta" case.
2405
partial_history = revision and b.repository._format.supports_chks
2406
match_using_deltas = (len(file_ids) != 1 or filter_by_dir
2407
or delta_type or partial_history)
2409
# Build the LogRequest and execute it
2410
if len(file_ids) == 0:
2412
rqst = make_log_request_dict(
2413
direction=direction, specific_fileids=file_ids,
2414
start_revision=rev1, end_revision=rev2, limit=limit,
2415
message_search=message, delta_type=delta_type,
2416
diff_type=diff_type, _match_using_deltas=match_using_deltas)
2417
Logger(b, rqst).show(lf)
2398
2420
def _get_revision_range(revisionspec_list, branch, command_name):
2543
2567
note("Ignoring files outside view. View is %s" % view_str)
2545
2569
tree.lock_read()
2547
for fp, fc, fkind, fid, entry in tree.list_files(include_root=False,
2548
from_dir=relpath, recursive=recursive):
2549
# Apply additional masking
2550
if not all and not selection[fc]:
2552
if kind is not None and fkind != kind:
2557
fullpath = osutils.pathjoin(relpath, fp)
2560
views.check_path_in_view(tree, fullpath)
2561
except errors.FileOutsideView:
2570
self.add_cleanup(tree.unlock)
2571
for fp, fc, fkind, fid, entry in tree.list_files(include_root=False,
2572
from_dir=relpath, recursive=recursive):
2573
# Apply additional masking
2574
if not all and not selection[fc]:
2576
if kind is not None and fkind != kind:
2581
fullpath = osutils.pathjoin(relpath, fp)
2584
views.check_path_in_view(tree, fullpath)
2585
except errors.FileOutsideView:
2566
fp = osutils.pathjoin(prefix, fp)
2567
kindch = entry.kind_character()
2568
outstring = fp + kindch
2569
ui.ui_factory.clear_term()
2571
outstring = '%-8s %s' % (fc, outstring)
2572
if show_ids and fid is not None:
2573
outstring = "%-50s %s" % (outstring, fid)
2590
fp = osutils.pathjoin(prefix, fp)
2591
kindch = entry.kind_character()
2592
outstring = fp + kindch
2593
ui.ui_factory.clear_term()
2595
outstring = '%-8s %s' % (fc, outstring)
2596
if show_ids and fid is not None:
2597
outstring = "%-50s %s" % (outstring, fid)
2598
self.outf.write(outstring + '\n')
2600
self.outf.write(fp + '\0')
2603
self.outf.write(fid)
2604
self.outf.write('\0')
2612
self.outf.write('%-50s %s\n' % (outstring, my_id))
2574
2614
self.outf.write(outstring + '\n')
2576
self.outf.write(fp + '\0')
2579
self.outf.write(fid)
2580
self.outf.write('\0')
2588
self.outf.write('%-50s %s\n' % (outstring, my_id))
2590
self.outf.write(outstring + '\n')
2595
2617
class cmd_unknowns(Command):
3543
3589
verbose = not is_quiet()
3544
3590
# TODO: should possibly lock the history file...
3545
3591
benchfile = open(".perf_history", "at", buffering=1)
3592
self.add_cleanup(benchfile.close)
3547
3594
test_suite_factory = None
3548
3595
benchfile = None
3550
selftest_kwargs = {"verbose": verbose,
3552
"stop_on_failure": one,
3553
"transport": transport,
3554
"test_suite_factory": test_suite_factory,
3555
"lsprof_timed": lsprof_timed,
3556
"lsprof_tests": lsprof_tests,
3557
"bench_history": benchfile,
3558
"matching_tests_first": first,
3559
"list_only": list_only,
3560
"random_seed": randomize,
3561
"exclude_pattern": exclude,
3563
"load_list": load_list,
3564
"debug_flags": debugflag,
3565
"starting_with": starting_with
3567
selftest_kwargs.update(self.additional_selftest_args)
3568
result = selftest(**selftest_kwargs)
3570
if benchfile is not None:
3596
selftest_kwargs = {"verbose": verbose,
3598
"stop_on_failure": one,
3599
"transport": transport,
3600
"test_suite_factory": test_suite_factory,
3601
"lsprof_timed": lsprof_timed,
3602
"lsprof_tests": lsprof_tests,
3603
"bench_history": benchfile,
3604
"matching_tests_first": first,
3605
"list_only": list_only,
3606
"random_seed": randomize,
3607
"exclude_pattern": exclude,
3609
"load_list": load_list,
3610
"debug_flags": debugflag,
3611
"starting_with": starting_with
3613
selftest_kwargs.update(self.additional_selftest_args)
3614
result = selftest(**selftest_kwargs)
3572
3615
return int(not result)
3749
3800
view_info = _get_view_info_for_change_reporter(tree)
3750
3801
change_reporter = delta._ChangeReporter(
3751
3802
unversioned_filter=tree.is_ignored, view_info=view_info)
3754
pb = ui.ui_factory.nested_progress_bar()
3755
cleanups.append(pb.finished)
3757
cleanups.append(tree.unlock)
3758
if location is not None:
3760
mergeable = bundle.read_mergeable_from_url(location,
3761
possible_transports=possible_transports)
3762
except errors.NotABundle:
3766
raise errors.BzrCommandError('Cannot use --uncommitted'
3767
' with bundles or merge directives.')
3769
if revision is not None:
3770
raise errors.BzrCommandError(
3771
'Cannot use -r with merge directives or bundles')
3772
merger, verified = _mod_merge.Merger.from_mergeable(tree,
3775
if merger is None and uncommitted:
3776
if revision is not None and len(revision) > 0:
3777
raise errors.BzrCommandError('Cannot use --uncommitted and'
3778
' --revision at the same time.')
3779
merger = self.get_merger_from_uncommitted(tree, location, pb,
3781
allow_pending = False
3784
merger, allow_pending = self._get_merger_from_branch(tree,
3785
location, revision, remember, possible_transports, pb)
3787
merger.merge_type = merge_type
3788
merger.reprocess = reprocess
3789
merger.show_base = show_base
3790
self.sanity_check_merger(merger)
3791
if (merger.base_rev_id == merger.other_rev_id and
3792
merger.other_rev_id is not None):
3793
note('Nothing to do.')
3803
pb = ui.ui_factory.nested_progress_bar()
3804
self.add_cleanup(pb.finished)
3806
self.add_cleanup(tree.unlock)
3807
if location is not None:
3809
mergeable = bundle.read_mergeable_from_url(location,
3810
possible_transports=possible_transports)
3811
except errors.NotABundle:
3815
raise errors.BzrCommandError('Cannot use --uncommitted'
3816
' with bundles or merge directives.')
3818
if revision is not None:
3819
raise errors.BzrCommandError(
3820
'Cannot use -r with merge directives or bundles')
3821
merger, verified = _mod_merge.Merger.from_mergeable(tree,
3824
if merger is None and uncommitted:
3825
if revision is not None and len(revision) > 0:
3826
raise errors.BzrCommandError('Cannot use --uncommitted and'
3827
' --revision at the same time.')
3828
merger = self.get_merger_from_uncommitted(tree, location, None)
3829
allow_pending = False
3832
merger, allow_pending = self._get_merger_from_branch(tree,
3833
location, revision, remember, possible_transports, None)
3835
merger.merge_type = merge_type
3836
merger.reprocess = reprocess
3837
merger.show_base = show_base
3838
self.sanity_check_merger(merger)
3839
if (merger.base_rev_id == merger.other_rev_id and
3840
merger.other_rev_id is not None):
3841
note('Nothing to do.')
3844
if merger.interesting_files is not None:
3845
raise errors.BzrCommandError('Cannot pull individual files')
3846
if (merger.base_rev_id == tree.last_revision()):
3847
result = tree.pull(merger.other_branch, False,
3848
merger.other_rev_id)
3849
result.report(self.outf)
3796
if merger.interesting_files is not None:
3797
raise errors.BzrCommandError('Cannot pull individual files')
3798
if (merger.base_rev_id == tree.last_revision()):
3799
result = tree.pull(merger.other_branch, False,
3800
merger.other_rev_id)
3801
result.report(self.outf)
3803
if merger.this_basis is None:
3804
raise errors.BzrCommandError(
3805
"This branch has no commits."
3806
" (perhaps you would prefer 'bzr pull')")
3808
return self._do_preview(merger, cleanups)
3810
return self._do_interactive(merger, cleanups)
3812
return self._do_merge(merger, change_reporter, allow_pending,
3815
for cleanup in reversed(cleanups):
3851
if merger.this_basis is None:
3852
raise errors.BzrCommandError(
3853
"This branch has no commits."
3854
" (perhaps you would prefer 'bzr pull')")
3856
return self._do_preview(merger)
3858
return self._do_interactive(merger)
3860
return self._do_merge(merger, change_reporter, allow_pending,
3818
def _get_preview(self, merger, cleanups):
3863
def _get_preview(self, merger):
3819
3864
tree_merger = merger.make_merger()
3820
3865
tt = tree_merger.make_preview_transform()
3821
cleanups.append(tt.finalize)
3866
self.add_cleanup(tt.finalize)
3822
3867
result_tree = tt.get_preview_tree()
3823
3868
return result_tree
3825
def _do_preview(self, merger, cleanups):
3870
def _do_preview(self, merger):
3826
3871
from bzrlib.diff import show_diff_trees
3827
result_tree = self._get_preview(merger, cleanups)
3872
result_tree = self._get_preview(merger)
3828
3873
show_diff_trees(merger.this_tree, result_tree, self.outf,
3829
3874
old_label='', new_label='')
4022
4065
merge_type = _mod_merge.Merge3Merger
4023
4066
tree, file_list = tree_files(file_list)
4024
4067
tree.lock_write()
4026
parents = tree.get_parent_ids()
4027
if len(parents) != 2:
4028
raise errors.BzrCommandError("Sorry, remerge only works after normal"
4029
" merges. Not cherrypicking or"
4031
repository = tree.branch.repository
4032
interesting_ids = None
4034
conflicts = tree.conflicts()
4035
if file_list is not None:
4036
interesting_ids = set()
4037
for filename in file_list:
4038
file_id = tree.path2id(filename)
4040
raise errors.NotVersionedError(filename)
4041
interesting_ids.add(file_id)
4042
if tree.kind(file_id) != "directory":
4068
self.add_cleanup(tree.unlock)
4069
parents = tree.get_parent_ids()
4070
if len(parents) != 2:
4071
raise errors.BzrCommandError("Sorry, remerge only works after normal"
4072
" merges. Not cherrypicking or"
4074
repository = tree.branch.repository
4075
interesting_ids = None
4077
conflicts = tree.conflicts()
4078
if file_list is not None:
4079
interesting_ids = set()
4080
for filename in file_list:
4081
file_id = tree.path2id(filename)
4083
raise errors.NotVersionedError(filename)
4084
interesting_ids.add(file_id)
4085
if tree.kind(file_id) != "directory":
4045
for name, ie in tree.inventory.iter_entries(file_id):
4046
interesting_ids.add(ie.file_id)
4047
new_conflicts = conflicts.select_conflicts(tree, file_list)[0]
4049
# Remerge only supports resolving contents conflicts
4050
allowed_conflicts = ('text conflict', 'contents conflict')
4051
restore_files = [c.path for c in conflicts
4052
if c.typestring in allowed_conflicts]
4053
_mod_merge.transform_tree(tree, tree.basis_tree(), interesting_ids)
4054
tree.set_conflicts(ConflictList(new_conflicts))
4055
if file_list is not None:
4056
restore_files = file_list
4057
for filename in restore_files:
4059
restore(tree.abspath(filename))
4060
except errors.NotConflicted:
4062
# Disable pending merges, because the file texts we are remerging
4063
# have not had those merges performed. If we use the wrong parents
4064
# list, we imply that the working tree text has seen and rejected
4065
# all the changes from the other tree, when in fact those changes
4066
# have not yet been seen.
4067
pb = ui.ui_factory.nested_progress_bar()
4068
tree.set_parent_ids(parents[:1])
4088
for name, ie in tree.inventory.iter_entries(file_id):
4089
interesting_ids.add(ie.file_id)
4090
new_conflicts = conflicts.select_conflicts(tree, file_list)[0]
4092
# Remerge only supports resolving contents conflicts
4093
allowed_conflicts = ('text conflict', 'contents conflict')
4094
restore_files = [c.path for c in conflicts
4095
if c.typestring in allowed_conflicts]
4096
_mod_merge.transform_tree(tree, tree.basis_tree(), interesting_ids)
4097
tree.set_conflicts(ConflictList(new_conflicts))
4098
if file_list is not None:
4099
restore_files = file_list
4100
for filename in restore_files:
4070
merger = _mod_merge.Merger.from_revision_ids(pb,
4072
merger.interesting_ids = interesting_ids
4073
merger.merge_type = merge_type
4074
merger.show_base = show_base
4075
merger.reprocess = reprocess
4076
conflicts = merger.do_merge()
4078
tree.set_parent_ids(parents)
4102
restore(tree.abspath(filename))
4103
except errors.NotConflicted:
4105
# Disable pending merges, because the file texts we are remerging
4106
# have not had those merges performed. If we use the wrong parents
4107
# list, we imply that the working tree text has seen and rejected
4108
# all the changes from the other tree, when in fact those changes
4109
# have not yet been seen.
4110
tree.set_parent_ids(parents[:1])
4112
merger = _mod_merge.Merger.from_revision_ids(None, tree, parents[1])
4113
merger.interesting_ids = interesting_ids
4114
merger.merge_type = merge_type
4115
merger.show_base = show_base
4116
merger.reprocess = reprocess
4117
conflicts = merger.do_merge()
4119
tree.set_parent_ids(parents)
4082
4120
if conflicts > 0:
4111
4149
created as above. Directories containing unknown files will not be
4114
The working tree contains a list of pending merged revisions, which will
4115
be included as parents in the next commit. Normally, revert clears that
4116
list as well as reverting the files. If any files are specified, revert
4117
leaves the pending merge list alone and reverts only the files. Use "bzr
4118
revert ." in the tree root to revert all files but keep the merge record,
4119
and "bzr revert --forget-merges" to clear the pending merge list without
4152
The working tree contains a list of revisions that have been merged but
4153
not yet committed. These revisions will be included as additional parents
4154
of the next commit. Normally, using revert clears that list as well as
4155
reverting the files. If any files are specified, revert leaves the list
4156
of uncommitted merges alone and reverts only the files. Use ``bzr revert
4157
.`` in the tree root to revert all files but keep the recorded merges,
4158
and ``bzr revert --forget-merges`` to clear the pending merge list without
4120
4159
reverting any files.
4122
Using "bzr revert --forget-merges", it is possible to apply the changes
4123
from an arbitrary merge as a single revision. To do this, perform the
4124
merge as desired. Then doing revert with the "--forget-merges" option will
4125
keep the content of the tree as it was, but it will clear the list of
4126
pending merges. The next commit will then contain all of the changes that
4127
would have been in the merge, but without any mention of the other parent
4128
revisions. Because this technique forgets where these changes originated,
4129
it may cause additional conflicts on later merges involving the source and
4161
Using "bzr revert --forget-merges", it is possible to apply all of the
4162
changes from a branch in a single revision. To do this, perform the merge
4163
as desired. Then doing revert with the "--forget-merges" option will keep
4164
the content of the tree as it was, but it will clear the list of pending
4165
merges. The next commit will then contain all of the changes that are
4166
present in the other branch, but without any other parent revisions.
4167
Because this technique forgets where these changes originated, it may
4168
cause additional conflicts on later merges involving the same source and
4130
4169
target branches.
4306
4339
if remote_branch.base == local_branch.base:
4307
4340
remote_branch = local_branch
4342
local_branch.lock_read()
4343
self.add_cleanup(local_branch.unlock)
4309
4344
local_revid_range = _revision_range_to_revid_range(
4310
4345
_get_revision_range(my_revision, local_branch,
4348
remote_branch.lock_read()
4349
self.add_cleanup(remote_branch.unlock)
4313
4350
remote_revid_range = _revision_range_to_revid_range(
4314
4351
_get_revision_range(revision,
4315
4352
remote_branch, self.name()))
4317
local_branch.lock_read()
4319
remote_branch.lock_read()
4321
local_extra, remote_extra = find_unmerged(
4322
local_branch, remote_branch, restrict,
4323
backward=not reverse,
4324
include_merges=include_merges,
4325
local_revid_range=local_revid_range,
4326
remote_revid_range=remote_revid_range)
4328
if log_format is None:
4329
registry = log.log_formatter_registry
4330
log_format = registry.get_default(local_branch)
4331
lf = log_format(to_file=self.outf,
4333
show_timezone='original')
4336
if local_extra and not theirs_only:
4337
message("You have %d extra revision(s):\n" %
4339
for revision in iter_log_revisions(local_extra,
4340
local_branch.repository,
4342
lf.log_revision(revision)
4343
printed_local = True
4346
printed_local = False
4348
if remote_extra and not mine_only:
4349
if printed_local is True:
4351
message("You are missing %d revision(s):\n" %
4353
for revision in iter_log_revisions(remote_extra,
4354
remote_branch.repository,
4356
lf.log_revision(revision)
4359
if mine_only and not local_extra:
4360
# We checked local, and found nothing extra
4361
message('This branch is up to date.\n')
4362
elif theirs_only and not remote_extra:
4363
# We checked remote, and found nothing extra
4364
message('Other branch is up to date.\n')
4365
elif not (mine_only or theirs_only or local_extra or
4367
# We checked both branches, and neither one had extra
4369
message("Branches are up to date.\n")
4371
remote_branch.unlock()
4373
local_branch.unlock()
4354
local_extra, remote_extra = find_unmerged(
4355
local_branch, remote_branch, restrict,
4356
backward=not reverse,
4357
include_merges=include_merges,
4358
local_revid_range=local_revid_range,
4359
remote_revid_range=remote_revid_range)
4361
if log_format is None:
4362
registry = log.log_formatter_registry
4363
log_format = registry.get_default(local_branch)
4364
lf = log_format(to_file=self.outf,
4366
show_timezone='original')
4369
if local_extra and not theirs_only:
4370
message("You have %d extra revision(s):\n" %
4372
for revision in iter_log_revisions(local_extra,
4373
local_branch.repository,
4375
lf.log_revision(revision)
4376
printed_local = True
4379
printed_local = False
4381
if remote_extra and not mine_only:
4382
if printed_local is True:
4384
message("You are missing %d revision(s):\n" %
4386
for revision in iter_log_revisions(remote_extra,
4387
remote_branch.repository,
4389
lf.log_revision(revision)
4392
if mine_only and not local_extra:
4393
# We checked local, and found nothing extra
4394
message('This branch is up to date.\n')
4395
elif theirs_only and not remote_extra:
4396
# We checked remote, and found nothing extra
4397
message('Other branch is up to date.\n')
4398
elif not (mine_only or theirs_only or local_extra or
4400
# We checked both branches, and neither one had extra
4402
message("Branches are up to date.\n")
4374
4404
if not status_code and parent is None and other_branch is not None:
4375
4405
local_branch.lock_write()
4377
# handle race conditions - a parent might be set while we run.
4378
if local_branch.get_parent() is None:
4379
local_branch.set_parent(remote_branch.base)
4381
local_branch.unlock()
4406
self.add_cleanup(local_branch.unlock)
4407
# handle race conditions - a parent might be set while we run.
4408
if local_branch.get_parent() is None:
4409
local_branch.set_parent(remote_branch.base)
4382
4410
return status_code
4506
4532
bzrdir.BzrDir.open_containing_tree_or_branch(filename)
4507
4533
if wt is not None:
4535
self.add_cleanup(wt.unlock)
4510
4537
branch.lock_read()
4512
tree = _get_one_revision_tree('annotate', revision, branch=branch)
4514
file_id = wt.path2id(relpath)
4516
file_id = tree.path2id(relpath)
4518
raise errors.NotVersionedError(filename)
4519
file_version = tree.inventory[file_id].revision
4520
if wt is not None and revision is None:
4521
# If there is a tree and we're not annotating historical
4522
# versions, annotate the working tree's content.
4523
annotate_file_tree(wt, file_id, self.outf, long, all,
4526
annotate_file(branch, file_version, file_id, long, all, self.outf,
4538
self.add_cleanup(branch.unlock)
4539
tree = _get_one_revision_tree('annotate', revision, branch=branch)
4541
self.add_cleanup(tree.unlock)
4543
file_id = wt.path2id(relpath)
4545
file_id = tree.path2id(relpath)
4547
raise errors.NotVersionedError(filename)
4548
file_version = tree.inventory[file_id].revision
4549
if wt is not None and revision is None:
4550
# If there is a tree and we're not annotating historical
4551
# versions, annotate the working tree's content.
4552
annotate_file_tree(wt, file_id, self.outf, long, all,
4555
annotate_file(branch, file_version, file_id, long, all, self.outf,
4535
4559
class cmd_re_sign(Command):
4749
4769
end_revision=last_revno)
4752
print 'Dry-run, pretending to remove the above revisions.'
4754
val = raw_input('Press <enter> to continue')
4772
self.outf.write('Dry-run, pretending to remove'
4773
' the above revisions.\n')
4756
print 'The above revision(s) will be removed.'
4758
val = raw_input('Are you sure [y/N]? ')
4759
if val.lower() not in ('y', 'yes'):
4775
self.outf.write('The above revision(s) will be removed.\n')
4778
if not ui.ui_factory.get_boolean('Are you sure'):
4779
self.outf.write('Canceled')
4763
4782
mutter('Uncommitting from {%s} to {%s}',
4764
4783
last_rev_id, rev_id)
4765
4784
uncommit(b, tree=tree, dry_run=dry_run, verbose=verbose,
4766
4785
revno=revno, local=local)
4767
note('You can restore the old tip by running:\n'
4768
' bzr pull . -r revid:%s', last_rev_id)
4786
self.outf.write('You can restore the old tip by running:\n'
4787
' bzr pull . -r revid:%s\n' % last_rev_id)
4771
4790
class cmd_break_lock(Command):
5055
5076
directly from the merge directive, without retrieving data from a
5058
If --no-bundle is specified, then public_branch is needed (and must be
5059
up-to-date), so that the receiver can perform the merge using the
5060
public_branch. The public_branch is always included if known, so that
5061
people can check it later.
5063
The submit branch defaults to the parent, but can be overridden. Both
5064
submit branch and public branch will be remembered if supplied.
5066
If a public_branch is known for the submit_branch, that public submit
5067
branch is used in the merge instructions. This means that a local mirror
5068
can be used as your actual submit branch, once you have set public_branch
5079
`bzr send` creates a compact data set that, when applied using bzr
5080
merge, has the same effect as merging from the source branch.
5082
By default the merge directive is self-contained and can be applied to any
5083
branch containing submit_branch in its ancestory without needing access to
5086
If --no-bundle is specified, then Bazaar doesn't send the contents of the
5087
revisions, but only a structured request to merge from the
5088
public_location. In that case the public_branch is needed and it must be
5089
up-to-date and accessible to the recipient. The public_branch is always
5090
included if known, so that people can check it later.
5092
The submit branch defaults to the parent of the source branch, but can be
5093
overridden. Both submit branch and public branch will be remembered in
5094
branch.conf the first time they are used for a particular branch. The
5095
source branch defaults to that containing the working directory, but can
5096
be changed using --from.
5098
In order to calculate those changes, bzr must analyse the submit branch.
5099
Therefore it is most efficient for the submit branch to be a local mirror.
5100
If a public location is known for the submit_branch, that location is used
5101
in the merge directive.
5103
The default behaviour is to send the merge directive by mail, unless -o is
5104
given, in which case it is sent to a file.
5071
5106
Mail is sent using your preferred mail program. This should be transparent
5072
5107
on Windows (it uses MAPI). On Linux, it requires the xdg-email utility.
5257
5296
branch, relpath = Branch.open_containing(directory)
5258
5297
branch.lock_write()
5261
branch.tags.delete_tag(tag_name)
5262
self.outf.write('Deleted tag %s.\n' % tag_name)
5298
self.add_cleanup(branch.unlock)
5300
branch.tags.delete_tag(tag_name)
5301
self.outf.write('Deleted tag %s.\n' % tag_name)
5304
if len(revision) != 1:
5305
raise errors.BzrCommandError(
5306
"Tags can only be placed on a single revision, "
5308
revision_id = revision[0].as_revision_id(branch)
5265
if len(revision) != 1:
5266
raise errors.BzrCommandError(
5267
"Tags can only be placed on a single revision, "
5269
revision_id = revision[0].as_revision_id(branch)
5271
revision_id = branch.last_revision()
5272
if (not force) and branch.tags.has_tag(tag_name):
5273
raise errors.TagAlreadyExists(tag_name)
5274
branch.tags.set_tag(tag_name, revision_id)
5275
self.outf.write('Created tag %s.\n' % tag_name)
5310
revision_id = branch.last_revision()
5311
if (not force) and branch.tags.has_tag(tag_name):
5312
raise errors.TagAlreadyExists(tag_name)
5313
branch.tags.set_tag(tag_name, revision_id)
5314
self.outf.write('Created tag %s.\n' % tag_name)
5280
5317
class cmd_tags(Command):
5315
5352
branch.lock_read()
5318
graph = branch.repository.get_graph()
5319
rev1, rev2 = _get_revision_range(revision, branch, self.name())
5320
revid1, revid2 = rev1.rev_id, rev2.rev_id
5321
# only show revisions between revid1 and revid2 (inclusive)
5322
tags = [(tag, revid) for tag, revid in tags if
5323
graph.is_between(revid, revid1, revid2)]
5326
elif sort == 'time':
5328
for tag, revid in tags:
5330
revobj = branch.repository.get_revision(revid)
5331
except errors.NoSuchRevision:
5332
timestamp = sys.maxint # place them at the end
5334
timestamp = revobj.timestamp
5335
timestamps[revid] = timestamp
5336
tags.sort(key=lambda x: timestamps[x[1]])
5338
# [ (tag, revid), ... ] -> [ (tag, dotted_revno), ... ]
5339
for index, (tag, revid) in enumerate(tags):
5341
revno = branch.revision_id_to_dotted_revno(revid)
5342
if isinstance(revno, tuple):
5343
revno = '.'.join(map(str, revno))
5344
except errors.NoSuchRevision:
5345
# Bad tag data/merges can lead to tagged revisions
5346
# which are not in this branch. Fail gracefully ...
5348
tags[index] = (tag, revno)
5353
self.add_cleanup(branch.unlock)
5355
graph = branch.repository.get_graph()
5356
rev1, rev2 = _get_revision_range(revision, branch, self.name())
5357
revid1, revid2 = rev1.rev_id, rev2.rev_id
5358
# only show revisions between revid1 and revid2 (inclusive)
5359
tags = [(tag, revid) for tag, revid in tags if
5360
graph.is_between(revid, revid1, revid2)]
5363
elif sort == 'time':
5365
for tag, revid in tags:
5367
revobj = branch.repository.get_revision(revid)
5368
except errors.NoSuchRevision:
5369
timestamp = sys.maxint # place them at the end
5371
timestamp = revobj.timestamp
5372
timestamps[revid] = timestamp
5373
tags.sort(key=lambda x: timestamps[x[1]])
5375
# [ (tag, revid), ... ] -> [ (tag, dotted_revno), ... ]
5376
for index, (tag, revid) in enumerate(tags):
5378
revno = branch.revision_id_to_dotted_revno(revid)
5379
if isinstance(revno, tuple):
5380
revno = '.'.join(map(str, revno))
5381
except errors.NoSuchRevision:
5382
# Bad tag data/merges can lead to tagged revisions
5383
# which are not in this branch. Fail gracefully ...
5385
tags[index] = (tag, revno)
5351
5387
for tag, revspec in tags:
5352
5388
self.outf.write('%-20s %s\n' % (tag, revspec))